Upload 45 files
Browse files- .gitignore +158 -0
- README.md +143 -5
- codes/__init__.py +0 -0
- codes/__pycache__/__init__.cpython-39.pyc +0 -0
- codes/__pycache__/calibration.cpython-39.pyc +0 -0
- codes/__pycache__/crt_train_models.cpython-39.pyc +0 -0
- codes/__pycache__/eye_track.cpython-39.pyc +0 -0
- codes/__pycache__/sampling.cpython-39.pyc +0 -0
- codes/__pycache__/see_data.cpython-39.pyc +0 -0
- codes/__pycache__/show.cpython-39.pyc +0 -0
- codes/__pycache__/tune_models_params.cpython-39.pyc +0 -0
- codes/__pycache__/work.cpython-39.pyc +0 -0
- codes/base/__pycache__/core.cpython-39.pyc +0 -0
- codes/base/__pycache__/eyeing.cpython-39.pyc +0 -0
- codes/base/__pycache__/face_geometry.cpython-39.pyc +0 -0
- codes/base/__pycache__/iris_lm_depth.cpython-39.pyc +0 -0
- codes/base/core.py +168 -0
- codes/base/eyeing.py +821 -0
- codes/base/face_geometry.py +2678 -0
- codes/base/iris_lm_depth.py +157 -0
- codes/calibration.py +582 -0
- codes/crt_train_models.py +561 -0
- codes/eye_track.py +701 -0
- codes/jupyter_notebook/check_boi_model_on_et.ipynb +178 -0
- codes/jupyter_notebook/crt_emp_2mdl_et.ipynb +317 -0
- codes/jupyter_notebook/crt_emp_mdl_io.ipynb +334 -0
- codes/jupyter_notebook/detect_blink.ipynb +333 -0
- codes/jupyter_notebook/fixations_in_AOIs.ipynb +0 -0
- codes/jupyter_notebook/rtn_2mdl_et.ipynb +791 -0
- codes/jupyter_notebook/rtn_mdl_boi.ipynb +505 -0
- codes/jupyter_notebook/tr_2mdl_et.ipynb +0 -0
- codes/jupyter_notebook/tr_mdl_io.ipynb +0 -0
- codes/sampling.py +395 -0
- codes/see_data.py +383 -0
- codes/show.py +109 -0
- codes/tune_models_params.py +198 -0
- codes/work.py +143 -0
- crt_exe_gui.spec +46 -0
- docs/USE_APP.md +86 -0
- docs/images/468_landmarks.jpg +0 -0
- docs/images/Owleye structure.png +0 -0
- docs/images/logo.ico +0 -0
- main.py +159 -0
- main_gui.py +514 -0
- requirements.txt +74 -0
.gitignore
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
pip-wheel-metadata/
|
| 24 |
+
share/python-wheels/
|
| 25 |
+
*.egg-info/
|
| 26 |
+
.installed.cfg
|
| 27 |
+
*.egg
|
| 28 |
+
MANIFEST
|
| 29 |
+
|
| 30 |
+
# PyInstaller
|
| 31 |
+
# Usually these files are written by a python script from a template
|
| 32 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 33 |
+
*.manifest
|
| 34 |
+
*.spec
|
| 35 |
+
|
| 36 |
+
# Installer logs
|
| 37 |
+
pip-log.txt
|
| 38 |
+
pip-delete-this-directory.txt
|
| 39 |
+
|
| 40 |
+
# Unit test / coverage reports
|
| 41 |
+
htmlcov/
|
| 42 |
+
.tox/
|
| 43 |
+
.nox/
|
| 44 |
+
.coverage
|
| 45 |
+
.coverage.*
|
| 46 |
+
.cache
|
| 47 |
+
nosetests.xml
|
| 48 |
+
coverage.xml
|
| 49 |
+
*.cover
|
| 50 |
+
*.py,cover
|
| 51 |
+
.hypothesis/
|
| 52 |
+
.pytest_cache/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
target/
|
| 76 |
+
|
| 77 |
+
# Jupyter Notebook
|
| 78 |
+
.ipynb_checkpoints
|
| 79 |
+
|
| 80 |
+
# IPython
|
| 81 |
+
profile_default/
|
| 82 |
+
ipython_config.py
|
| 83 |
+
|
| 84 |
+
# pyenv
|
| 85 |
+
.python-version
|
| 86 |
+
|
| 87 |
+
# pipenv
|
| 88 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 89 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 90 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 91 |
+
# install all needed dependencies.
|
| 92 |
+
#Pipfile.lock
|
| 93 |
+
|
| 94 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
| 95 |
+
__pypackages__/
|
| 96 |
+
|
| 97 |
+
# Celery stuff
|
| 98 |
+
celerybeat-schedule
|
| 99 |
+
celerybeat.pid
|
| 100 |
+
|
| 101 |
+
# SageMath parsed files
|
| 102 |
+
*.sage.py
|
| 103 |
+
|
| 104 |
+
# Environments
|
| 105 |
+
.env
|
| 106 |
+
.venv
|
| 107 |
+
env/
|
| 108 |
+
venv/
|
| 109 |
+
ENV/
|
| 110 |
+
env.bak/
|
| 111 |
+
venv.bak/
|
| 112 |
+
|
| 113 |
+
# Spyder project settings
|
| 114 |
+
.spyderproject
|
| 115 |
+
.spyproject
|
| 116 |
+
|
| 117 |
+
# Rope project settings
|
| 118 |
+
.ropeproject
|
| 119 |
+
|
| 120 |
+
# mkdocs documentation
|
| 121 |
+
/site
|
| 122 |
+
|
| 123 |
+
# mypy
|
| 124 |
+
.mypy_cache/
|
| 125 |
+
.dmypy.json
|
| 126 |
+
dmypy.json
|
| 127 |
+
|
| 128 |
+
# Pyre type checker
|
| 129 |
+
.pyre/
|
| 130 |
+
|
| 131 |
+
# pycharm
|
| 132 |
+
.idea/
|
| 133 |
+
|
| 134 |
+
# Eye-Tracker
|
| 135 |
+
gui/
|
| 136 |
+
media/
|
| 137 |
+
subjects/
|
| 138 |
+
build/
|
| 139 |
+
dist/
|
| 140 |
+
codes/p1.py
|
| 141 |
+
codes/temp1.py
|
| 142 |
+
codes/.ipynb_checkpoints/
|
| 143 |
+
codes/__pycache__/
|
| 144 |
+
codes/backup_codes/
|
| 145 |
+
models/io/raw/
|
| 146 |
+
models/io/trained/*
|
| 147 |
+
!models/io/trained/mdl1.h5
|
| 148 |
+
!models/io/trained/mdl1.pickle
|
| 149 |
+
models/et/raw/
|
| 150 |
+
models/et/trained/*
|
| 151 |
+
!models/et/trained/mdl1-hrz.h5
|
| 152 |
+
!models/et/trained/mdl1-vrt.h5
|
| 153 |
+
!models/et/trained/mdl1.pickle
|
| 154 |
+
!crt_exe_gui.spec
|
| 155 |
+
dataset/
|
| 156 |
+
codes/jupyter_notebook/FinalPlots.ipynb
|
| 157 |
+
codes/jupyter_notebook/temp1.ipynb
|
| 158 |
+
temp1.py
|
README.md
CHANGED
|
@@ -1,5 +1,143 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Owleye
|
| 2 |
+
## Intro
|
| 3 |
+
Owleye gives you the posibiliy to transform your webcam to an eye tracker. At first, you should calibrate owleye to know you, then it detects which point you are looking on your monitor.
|
| 4 |
+
|
| 5 |
+
**Demo: Click on the image below and see the video explanation.**
|
| 6 |
+
|
| 7 |
+
[](https://www.youtube.com/watch?v=E6OmSAgI)
|
| 8 |
+
___
|
| 9 |
+
## Installation
|
| 10 |
+
|
| 11 |
+
### 1.Use source code:
|
| 12 |
+
|
| 13 |
+
Open terminal, download the repo:
|
| 14 |
+
`git clone <repo address>`
|
| 15 |
+
|
| 16 |
+
(or just download the zip file)
|
| 17 |
+
|
| 18 |
+
Go to the project directory:
|
| 19 |
+
`cd Owleye`
|
| 20 |
+
|
| 21 |
+
make a virtual environment:
|
| 22 |
+
`python -m venv env` or `virtualenv env`
|
| 23 |
+
|
| 24 |
+
activate the virtual environment:
|
| 25 |
+
|
| 26 |
+
Windows:
|
| 27 |
+
`.\env\Scripts\activate`
|
| 28 |
+
|
| 29 |
+
Linux:
|
| 30 |
+
`source env/bin/activate`
|
| 31 |
+
|
| 32 |
+
Install required libraries:
|
| 33 |
+
`pip install -r requirements.txt`
|
| 34 |
+
|
| 35 |
+
### 2. Use .exe file
|
| 36 |
+
|
| 37 |
+
Download the release file. It is tested on Windows 10.
|
| 38 |
+
|
| 39 |
+
## Usage
|
| 40 |
+
|
| 41 |
+
If you are using the source code, after activating the virtual environment, run main.py or main_gui.py: `python main.py` or `python main_gui.py`
|
| 42 |
+
|
| 43 |
+
main_gui.py is a simplified version of main.py. There are some methods in main.py that are not in main_gui.py. Also, main_gui.py has GUI. Usage of main_gui.py is much easier.
|
| 44 |
+
|
| 45 |
+
If you have downloaded the Owleye.exe, run it. This file is exactly main_gui.py with the needed libraries.
|
| 46 |
+
|
| 47 |
+
This is the opened window:
|
| 48 |
+
|
| 49 |
+

|
| 50 |
+
|
| 51 |
+
You can learn about the program's usage in [this tutorial](https://github.com/owleye/Owleye/blob/main/docs/USE_APP.md).
|
| 52 |
+
|
| 53 |
+
## Method
|
| 54 |
+
|
| 55 |
+
**Owleye's structure:**
|
| 56 |
+
|
| 57 |
+

|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
While the camera is streaming, Owleye gets the images and extracts head and eyes features (blocks 1-5 in above image). Then it feeds this data to the neural networks (NN) models to calculate the user's eye viewpoint (block 6 in above image).
|
| 61 |
+
|
| 62 |
+
### Calculating the sixth block's inputs
|
| 63 |
+
|
| 64 |
+
As in the first block of the [Owleye's structure](https://github.com/owleye/Owleye/blob/main/README.md#:~:text=Method-,Owleye%27s%20structure%3A,-While%20the%20camera) is visible, it receives the user's images during time and after detecting thier face, in the second block it extracts their [468 landmarks/keypoints](https://github.com/owleye/Owleye/blob/main/docs/images/468_landmarks.jpg). It's done by canonical face model which is in the world coordinates. Owleye uses Mediapipe package to implement these steps. Then in the third block, Owleye computes the face rotation and position vectors by extracted landmarks. In the fourth block, Owleye extracts the eyes' images using landmarks and gives them to the fifth block to calculate iris positions. The image below shows the landmarks and the rotaion vector on face:
|
| 65 |
+
|
| 66 |
+

|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
Finlaly, three type of inputs are ready to be fed to sixth block which is eye viewpoint predictive model:
|
| 70 |
+
- **Head rotation and position vectors:** (r1, r2, r3), (x, y, z). Rotation and position, world coordinates.
|
| 71 |
+
- **Left and right eyes iris:** (xl, yl), (xr, yr). These are calculated respect to the eyes (image coordinates).
|
| 72 |
+
- **Eyes images:** Two images are concatenated together in rows.
|
| 73 |
+
|
| 74 |
+
We will consider the first and the second inputs as the **face vector** which has a length of 10.
|
| 75 |
+
|
| 76 |
+

|
| 77 |
+
|
| 78 |
+
### Sixth block's output
|
| 79 |
+
|
| 80 |
+
The output of Owleye is a vector of user's eye view points on screen (xp, yp) per sample (an image and a vector). During time, this output will be a matrix. The matrix's shape is n by 2. The values are normalized between 0 and 1. For example, the program tracks the user for 10 seconds, with an FPS of 15, we have a matirx with a shape of 150 by 2. The first column is for the horizontal direction and the second is for the vertical direction.
|
| 81 |
+
|
| 82 |
+
### Calibration
|
| 83 |
+
The calibration process consists of looking at a white point in a black screen for a certain time. Then, the point's position changes and the user must look at it again. This process is repeated until the calibration ends. During this procedure, Owleye collects data (input and output). It means each sample data entails one image, one face vector and one location point. This is because we already have the first five blocks, and the models and calculations have been prepared. Just the sixth block should be made.
|
| 84 |
+
|
| 85 |
+
### Dataset
|
| 86 |
+
|
| 87 |
+
We implemented the calibration on 20 male subjects and collected 221000 samples (eyes images and face vectors as inputs and the appeared point's locations as outputs). The dataset is collected in an environment like the image below. The subjects were seated in a driving simulator and they werer instructed for Owleye's calibration. The camera in front of them is Microsoft Lifecam VX-6000 (in the red area). The distance between the camera and the participants was nearly 80 cm. There were three monitors (1280 x 720) that were located 170 cm further the user.
|
| 88 |
+
|
| 89 |
+

|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
### Modeling
|
| 93 |
+
|
| 94 |
+
For the sixth block in [Owleye's structure](https://github.com/owleye/Owleye/blob/main/README.md#:~:text=Method-,Owleye%27s%20structure%3A,-While%20the%20camera), Two Convolutional Neural Network (CNN) models are used to predict the user's eye view point in the [horizonal](https://github.com/owleye/Owleye/blob/main/models/et/trained/mdl1-hrz.h5) and [vertical](https://github.com/owleye/Owleye/blob/main/models/et/trained/mdl1-vrt.h5) directions on the monitor. These models are trained using the aforementioned [dataset](https://github.com/owleye/Owleye/tree/main#:~:text=should%20be%20made.-,Dataset,-We%20implemented%20calibration). We called them "base models" or "et models" (Abbreviation of eye tracking). They are located in the models folder.
|
| 95 |
+
|
| 96 |
+
**Network architecture:**
|
| 97 |
+

|
| 98 |
+
|
| 99 |
+
Right side of the above picture illustrates the CNN model's structure. In this model there is two branches. The left branch is for the eyes image, and the right branch is for a vector with a length of 10. six value for head's rotation and position and 4 value for iris position.
|
| 100 |
+
|
| 101 |
+
The horizontal and vertical base models' accuracy on the test data were 95.9% and 85.4%, respectively.
|
| 102 |
+
|
| 103 |
+
### Fine-tuning
|
| 104 |
+
|
| 105 |
+
Owleye and its base models are built using the dataset that aleardy we explained. The features like light condition, camera quality, camera position, and monitors position were stationary in the dataset. They are specific to the environment. A rich dataset is the one that is robust to all of the mentioned situations and features. It should be collected in various light conditions, with different cameras and different positions for camera, user and monitors. But, due to the possible costs, it was not an appropriate way for us. So we decided to provide a calibration step. In thes way, we can collect a little amount of data from the person that we want detect their eye movement. Then we can retrain the [base models in the sixth block](https://github.com/owleye/Owleye/blob/main/README.md#:~:text=Method-,Owleye%27s%20structure%3A,-While%20the%20camera) using the new collected data. Actually we are customizing Owleye for each person. So, the last layer's weights of the base models change based on the new collected data. In this way, the network maintains its original shape and just is calibrated a little for each person. Finally, Owleye gets familiar with the new light condition and the devices positions. So, despite Owleye is built using a specific dataset with specific features, you can use it in different conditions (e.g. your own computer with different features).
|
| 106 |
+
|
| 107 |
+
### Fixations
|
| 108 |
+
|
| 109 |
+
The [IV-T method](https://tobii.23video.com/the-tobii-pro-fixation-filters-eye-movement) is used to extract user's fixations. A [fixation](https://en.wikipedia.org/wiki/Fixation_(visual)) is a series of eye view points that are close together. So, first of all we removed the outliers using median filter. Then we merged close fixations and removed short ones. below image shows the fixations in a monitor during a certain time. In [this file](https://github.com/owleye/Owleye/blob/main/codes/jupyter_notebook/fixations_in_AOIs.ipynb) you can see the way of calculating the fixations.
|
| 110 |
+
|
| 111 |
+

|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
### Blinking
|
| 115 |
+
|
| 116 |
+
Indeed, while the user is blinking, they aren't seeing anywhere. So, the data in that short time should be removed. We've calculated the blinking using Eye Aspect Ratio (EAR) method. In this way, when the user's EAR goes lower than a certain threshold, it is considered as a blink. So, the output (x, y) will be deleted in the next computations. Also in this periods, we can interpolate the outputs during time.
|
| 117 |
+
|
| 118 |
+

|
| 119 |
+
|
| 120 |
+
### In-out model
|
| 121 |
+
|
| 122 |
+
A model called io is trained to see whether the user is looking into the screen or not. This is because the et model always predict a point, no matter the user is looking inside of the screen or outside of it. So, because the et (base models) are trained using the data of inside of the screen, the et models can't extrapolate when the user is looking outside of the screen. This model is in the "models/io" folder.
|
| 123 |
+
|
| 124 |
+
## Limitations and future works
|
| 125 |
+
**1) Recunstructing whole code:** The structure of the code is terrible:)) Owleye is made in 2021 and I have not dedicated so much of time to improve it since then. Therefore, a lot of things have changed. The structure of the code totally can be redesigned to reach a better performance. The code can be more object oriented. the libraries (mediapipe and tensorflow) have changed a lot. So, the algorithm can be rewritten considering the changes.
|
| 126 |
+
|
| 127 |
+
**2) Changing the calibration algorithm:** The calibration duration time is really long. Using methods like image morphing makes it unnecessary to collect images in all positions and angles of the head and eyes.
|
| 128 |
+
|
| 129 |
+
**3) Changing the fine-tuning method:** In the current method, to retrain the algorithm, we considered to just change the weights in the last layer of the network. This fine-tuning process can be improved by implementing better solutions.
|
| 130 |
+
|
| 131 |
+
**4) Adding camera calibration:** The computed head angles and positions are meaningless when the camera is not calibrated. By calibrating and having angles and positions of the head, we can calculate the real eyes' angles and positions. So, using these parameters, we can implement better methods to reach to the eyes view point. Maybe just with a simple linear regression model and real parameters of the head and eyes we could get to the target.
|
| 132 |
+
|
| 133 |
+
**5) Creating a python library:** It can be desired to create a package from the code. So, everybody could just install and import the library and use it as they want.
|
| 134 |
+
|
| 135 |
+
**6) Providing real-time usage:** For now, it isn't possible to use the program in real-time. Because the FPS goes down in this way. the program's FPS for a camera that is 30 FPS reaches to 15. So, by optimizing some packages, we can get to a better result.
|
| 136 |
+
|
| 137 |
+
## Contributing
|
| 138 |
+
|
| 139 |
+
Feel free to improve the project. I'll appreciate your pull requests.
|
| 140 |
+
|
| 141 |
+
## About project
|
| 142 |
+
|
| 143 |
+
Owleye is a subsection of my master thesis: "Driver's Hazard Perception Assessment in a Driving Simulator".
|
codes/__init__.py
ADDED
|
File without changes
|
codes/__pycache__/__init__.cpython-39.pyc
ADDED
|
Binary file (127 Bytes). View file
|
|
|
codes/__pycache__/calibration.cpython-39.pyc
ADDED
|
Binary file (10.2 kB). View file
|
|
|
codes/__pycache__/crt_train_models.cpython-39.pyc
ADDED
|
Binary file (12 kB). View file
|
|
|
codes/__pycache__/eye_track.cpython-39.pyc
ADDED
|
Binary file (13.3 kB). View file
|
|
|
codes/__pycache__/sampling.cpython-39.pyc
ADDED
|
Binary file (7.08 kB). View file
|
|
|
codes/__pycache__/see_data.cpython-39.pyc
ADDED
|
Binary file (8.64 kB). View file
|
|
|
codes/__pycache__/show.cpython-39.pyc
ADDED
|
Binary file (2.04 kB). View file
|
|
|
codes/__pycache__/tune_models_params.cpython-39.pyc
ADDED
|
Binary file (4.62 kB). View file
|
|
|
codes/__pycache__/work.cpython-39.pyc
ADDED
|
Binary file (3.01 kB). View file
|
|
|
codes/base/__pycache__/core.cpython-39.pyc
ADDED
|
Binary file (3.57 kB). View file
|
|
|
codes/base/__pycache__/eyeing.cpython-39.pyc
ADDED
|
Binary file (13.3 kB). View file
|
|
|
codes/base/__pycache__/face_geometry.cpython-39.pyc
ADDED
|
Binary file (26 kB). View file
|
|
|
codes/base/__pycache__/iris_lm_depth.cpython-39.pyc
ADDED
|
Binary file (3.44 kB). View file
|
|
|
codes/base/core.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import tensorflow as tf
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def tflite_inference(inputs, model_path, dtype=np.float32):
|
| 6 |
+
|
| 7 |
+
if not isinstance(inputs, (list, tuple)):
|
| 8 |
+
inputs = (inputs,)
|
| 9 |
+
|
| 10 |
+
# Load the TFLite model and allocate tensors.
|
| 11 |
+
interpreter = tf.lite.Interpreter(model_path=model_path)
|
| 12 |
+
interpreter.allocate_tensors()
|
| 13 |
+
|
| 14 |
+
# Get input and output tensors.
|
| 15 |
+
input_details = interpreter.get_input_details()
|
| 16 |
+
output_details = interpreter.get_output_details()
|
| 17 |
+
|
| 18 |
+
# Test the model on random input data.
|
| 19 |
+
for inp, inp_det in zip(inputs, input_details):
|
| 20 |
+
interpreter.set_tensor(inp_det["index"], np.array(inp[None, ...], dtype=dtype))
|
| 21 |
+
|
| 22 |
+
interpreter.invoke()
|
| 23 |
+
|
| 24 |
+
# The function `get_tensor()` returns a copy of the tensor data.
|
| 25 |
+
# Use `tensor()` in order to get a pointer to the tensor.
|
| 26 |
+
outputs = [interpreter.get_tensor(out["index"]) for out in output_details]
|
| 27 |
+
|
| 28 |
+
return outputs
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def landmarks_to_detections(landmarks):
|
| 32 |
+
"""
|
| 33 |
+
landmarks: (3, N) landmarks
|
| 34 |
+
"""
|
| 35 |
+
x_min = np.amin(landmarks[0, :])
|
| 36 |
+
x_max = np.amax(landmarks[0, :])
|
| 37 |
+
y_min = np.amin(landmarks[1, :])
|
| 38 |
+
y_max = np.amax(landmarks[1, :])
|
| 39 |
+
|
| 40 |
+
bbox = dict()
|
| 41 |
+
bbox["x_min"] = x_min
|
| 42 |
+
bbox["y_min"] = y_min
|
| 43 |
+
bbox["width"] = x_max - x_min
|
| 44 |
+
bbox["height"] = y_max - y_min
|
| 45 |
+
|
| 46 |
+
detections = dict()
|
| 47 |
+
detections["bboxs"] = bbox
|
| 48 |
+
detections["keypoints"] = landmarks[:2, :]
|
| 49 |
+
|
| 50 |
+
return detections
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def detections_to_rect(
|
| 54 |
+
detections,
|
| 55 |
+
image_size,
|
| 56 |
+
rotation_vector_start_end=None,
|
| 57 |
+
rotation_vector_target_angle=0,
|
| 58 |
+
):
|
| 59 |
+
|
| 60 |
+
keypoints = detections["keypoints"]
|
| 61 |
+
x_min = np.amin(keypoints[0, :])
|
| 62 |
+
x_max = np.amax(keypoints[0, :])
|
| 63 |
+
y_min = np.amin(keypoints[1, :])
|
| 64 |
+
y_max = np.amax(keypoints[1, :])
|
| 65 |
+
|
| 66 |
+
rect = dict()
|
| 67 |
+
rect["x_center"] = (x_min + x_max) / 2
|
| 68 |
+
rect["y_center"] = (y_min + y_max) / 2
|
| 69 |
+
rect["width"] = x_max - x_min
|
| 70 |
+
rect["height"] = y_max - y_min
|
| 71 |
+
|
| 72 |
+
if rotation_vector_start_end is not None:
|
| 73 |
+
rect["rotation"] = compute_rotation(
|
| 74 |
+
detections,
|
| 75 |
+
image_size,
|
| 76 |
+
rotation_vector_start_end,
|
| 77 |
+
rotation_vector_target_angle,
|
| 78 |
+
)
|
| 79 |
+
else:
|
| 80 |
+
rect["rotation"] = None
|
| 81 |
+
|
| 82 |
+
return rect
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def compute_rotation(detections, image_size, rotation_vector_start_end, target_angle):
|
| 86 |
+
|
| 87 |
+
keypoints = detections["keypoints"]
|
| 88 |
+
|
| 89 |
+
x0 = keypoints[0, rotation_vector_start_end[0]] * image_size[0]
|
| 90 |
+
y0 = keypoints[1, rotation_vector_start_end[0]] * image_size[1]
|
| 91 |
+
x1 = keypoints[0, rotation_vector_start_end[1]] * image_size[0]
|
| 92 |
+
y1 = keypoints[1, rotation_vector_start_end[1]] * image_size[1]
|
| 93 |
+
|
| 94 |
+
rotation = normalize_radians(target_angle - np.arctan2(-(y1 - y0), x1 - x0))
|
| 95 |
+
|
| 96 |
+
return rotation
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def normalize_radians(angle):
|
| 100 |
+
return angle - 2 * np.pi * np.floor((angle - (-np.pi)) / (2 * np.pi))
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def transform_rect(
|
| 104 |
+
rect,
|
| 105 |
+
image_size,
|
| 106 |
+
scale_x=1,
|
| 107 |
+
scale_y=1,
|
| 108 |
+
shift_x=0,
|
| 109 |
+
shift_y=0,
|
| 110 |
+
square_long=True,
|
| 111 |
+
square_short=False,
|
| 112 |
+
opt_rotation=None,
|
| 113 |
+
):
|
| 114 |
+
width = rect["width"]
|
| 115 |
+
height = rect["height"]
|
| 116 |
+
rotation = rect["rotation"]
|
| 117 |
+
image_width = image_size[0]
|
| 118 |
+
image_height = image_size[1]
|
| 119 |
+
|
| 120 |
+
if rotation is not None and opt_rotation is not None:
|
| 121 |
+
rotation += opt_rotation
|
| 122 |
+
rotation = normalize_radians(rotation)
|
| 123 |
+
|
| 124 |
+
if rotation is None:
|
| 125 |
+
rect["x_center"] = rect["x_center"] + width * shift_x
|
| 126 |
+
rect["y_center"] = rect["y_center"] + height * shift_y
|
| 127 |
+
else:
|
| 128 |
+
x_shift = (
|
| 129 |
+
image_width * width * shift_x * np.cos(rotation)
|
| 130 |
+
- image_height * height * shift_y * np.sin(rotation)
|
| 131 |
+
) / image_width
|
| 132 |
+
y_shift = (
|
| 133 |
+
image_width * width * shift_x * np.sin(rotation)
|
| 134 |
+
+ image_height * height * shift_y * np.cos(rotation)
|
| 135 |
+
) / image_height
|
| 136 |
+
|
| 137 |
+
rect["x_center"] = rect["x_center"] + x_shift
|
| 138 |
+
rect["y_center"] = rect["y_center"] + y_shift
|
| 139 |
+
|
| 140 |
+
if square_long:
|
| 141 |
+
long_side = np.max((width * image_width, height * image_height))
|
| 142 |
+
width = long_side / image_width
|
| 143 |
+
height = long_side / image_height
|
| 144 |
+
elif square_short:
|
| 145 |
+
short_side = np.min((width * image_width, height * image_height))
|
| 146 |
+
width = short_side / image_width
|
| 147 |
+
height = short_side / image_height
|
| 148 |
+
|
| 149 |
+
rect["width"] = width * scale_x
|
| 150 |
+
rect["height"] = height * scale_y
|
| 151 |
+
|
| 152 |
+
return rect
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def slice_from_roi(roi, image_size, horizontal_side=True):
|
| 156 |
+
if horizontal_side:
|
| 157 |
+
center = roi["x_center"]
|
| 158 |
+
norm_side = roi["width"]
|
| 159 |
+
image_side = image_size[0]
|
| 160 |
+
else:
|
| 161 |
+
center = roi["y_center"]
|
| 162 |
+
norm_side = roi["height"]
|
| 163 |
+
image_side = image_size[1]
|
| 164 |
+
|
| 165 |
+
first_id = int((center - norm_side / 2) * image_side)
|
| 166 |
+
second_id = int((center + norm_side / 2) * image_side)
|
| 167 |
+
|
| 168 |
+
return (first_id, second_id)
|
codes/base/eyeing.py
ADDED
|
@@ -0,0 +1,821 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module is like a tool for just eyes and also the alll of the program. There are a lot of functions here"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import numpy as np
|
| 6 |
+
import mediapipe as mp
|
| 7 |
+
import cv2
|
| 8 |
+
import pickle
|
| 9 |
+
from sklearn.utils import shuffle
|
| 10 |
+
from screeninfo import get_monitors
|
| 11 |
+
from codes.base.face_geometry import PCF, procrustes_landmark_basis, get_metric_landmarks
|
| 12 |
+
from codes.base.iris_lm_depth import from_landmarks_to_depth as fl2d
|
| 13 |
+
import time
|
| 14 |
+
import math
|
| 15 |
+
from screeninfo import get_monitors
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
STATIC_IMAGE_MODE = False
|
| 19 |
+
MIN_TRACKING_CONFIDENCE = 0.5
|
| 20 |
+
MIN_DETECTION_CONFIDENCE = 0.5
|
| 21 |
+
EYE_SIZE = (100, 50)
|
| 22 |
+
Y_SCALER = 1000.0
|
| 23 |
+
X1_SCALER = 255.0
|
| 24 |
+
WHITE = (220, 220, 220)
|
| 25 |
+
BLACK = (0, 0, 0)
|
| 26 |
+
GRAY = (70, 70, 70)
|
| 27 |
+
RED = (0, 0, 220)
|
| 28 |
+
BLUE = (220, 0, 0)
|
| 29 |
+
GREEN = (0, 220, 0)
|
| 30 |
+
PATH2ROOT = ""
|
| 31 |
+
PATH2ROOT_ABS = os.path.dirname(__file__) + "/../../"
|
| 32 |
+
CLB = "clb"
|
| 33 |
+
IO = "io"
|
| 34 |
+
LTN = "ltn"
|
| 35 |
+
ACC = "acc"
|
| 36 |
+
SMP = "smp"
|
| 37 |
+
MDL = "mdl"
|
| 38 |
+
RAW = "raw"
|
| 39 |
+
TRAINED = "trained"
|
| 40 |
+
T = "t"
|
| 41 |
+
X1 = "x1"
|
| 42 |
+
X2 = "x2"
|
| 43 |
+
Y = "y"
|
| 44 |
+
ER = "er"
|
| 45 |
+
FV = "fv"
|
| 46 |
+
DEFAULT_BLINKING_THRESHOLD = 4.5
|
| 47 |
+
LATENCY_WAITING_TIME = 50
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def get_mesh():
|
| 51 |
+
"""
|
| 52 |
+
Creating face mesh model
|
| 53 |
+
|
| 54 |
+
Parameters:
|
| 55 |
+
None
|
| 56 |
+
|
| 57 |
+
Returns:
|
| 58 |
+
face_mesh: The face mesh model
|
| 59 |
+
"""
|
| 60 |
+
print("Configuring face detection model...")
|
| 61 |
+
face_mesh = mp.solutions.face_mesh.FaceMesh(
|
| 62 |
+
static_image_mode=STATIC_IMAGE_MODE,
|
| 63 |
+
min_tracking_confidence=MIN_TRACKING_CONFIDENCE,
|
| 64 |
+
min_detection_confidence=MIN_DETECTION_CONFIDENCE)
|
| 65 |
+
|
| 66 |
+
return face_mesh
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def get_clb_win_prp(clb_win_align=(0, 0)):
|
| 70 |
+
"""
|
| 71 |
+
Creating calibration window
|
| 72 |
+
|
| 73 |
+
Parameters:
|
| 74 |
+
clb_win_align: The window's top-left location
|
| 75 |
+
|
| 76 |
+
Returns:
|
| 77 |
+
clb_win_size: The window's size
|
| 78 |
+
"""
|
| 79 |
+
clb_win_w_align, clb_win_h_align = clb_win_align
|
| 80 |
+
screen_w = None
|
| 81 |
+
screen_h = None
|
| 82 |
+
for m in get_monitors():
|
| 83 |
+
screen_w = m.width
|
| 84 |
+
screen_h = m.height
|
| 85 |
+
|
| 86 |
+
clb_win_w = screen_w - clb_win_w_align
|
| 87 |
+
clb_win_h = screen_h - clb_win_h_align
|
| 88 |
+
clb_win_size = (clb_win_w, clb_win_h)
|
| 89 |
+
|
| 90 |
+
return clb_win_size
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def get_some_landmarks_ids():
|
| 94 |
+
"""
|
| 95 |
+
Getting some landmarks that are needed for calculation of the face rotation and position vectors
|
| 96 |
+
|
| 97 |
+
Parameters:
|
| 98 |
+
None
|
| 99 |
+
|
| 100 |
+
Returns:
|
| 101 |
+
some_landmarks_ids: The landmarks numbers
|
| 102 |
+
"""
|
| 103 |
+
jaw_landmarks_ids = [61, 291, 199]
|
| 104 |
+
some_landmarks_ids = jaw_landmarks_ids + [
|
| 105 |
+
key for key, _ in procrustes_landmark_basis
|
| 106 |
+
]
|
| 107 |
+
some_landmarks_ids.sort()
|
| 108 |
+
return some_landmarks_ids
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def get_camera_properties(camera_id):
|
| 112 |
+
"""
|
| 113 |
+
Getting the camera properties.
|
| 114 |
+
|
| 115 |
+
Parameters:
|
| 116 |
+
camera_id: camera ID
|
| 117 |
+
|
| 118 |
+
Returns:
|
| 119 |
+
fr_size: The frame size
|
| 120 |
+
camera_matrix: The intrinsic matrix of the camera
|
| 121 |
+
dst_cof: distortion coefficients of the camera
|
| 122 |
+
pcf: An object that is needed for later calculations
|
| 123 |
+
"""
|
| 124 |
+
print("Getting camera properties...")
|
| 125 |
+
fr_w, fr_h = 1280, 720
|
| 126 |
+
cap = cv2.VideoCapture(camera_id) # (tp.CAMERA_ID, cv2.CAP_DSHOW)
|
| 127 |
+
cap.set(cv2.CAP_PROP_FRAME_WIDTH, fr_w)
|
| 128 |
+
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, fr_h)
|
| 129 |
+
|
| 130 |
+
new_fr_w = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
|
| 131 |
+
new_fr_h = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
|
| 132 |
+
cap.release()
|
| 133 |
+
fr_size = new_fr_w, new_fr_h
|
| 134 |
+
|
| 135 |
+
fr_center = (new_fr_w // 2, new_fr_h // 2)
|
| 136 |
+
focal_length = new_fr_w
|
| 137 |
+
camera_matrix = np.array([
|
| 138 |
+
[focal_length, 0, fr_center[0]],
|
| 139 |
+
[0, focal_length, fr_center[1]],
|
| 140 |
+
[0, 0, 1]], dtype="double")
|
| 141 |
+
dst_cof = np.zeros((4, 1))
|
| 142 |
+
|
| 143 |
+
pcf = PCF(
|
| 144 |
+
frame_height=fr_h,
|
| 145 |
+
frame_width=fr_w,
|
| 146 |
+
fy=fr_w)
|
| 147 |
+
return fr_size, camera_matrix, dst_cof, pcf
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def get_camera(camera_id, frame_size):
|
| 151 |
+
"""
|
| 152 |
+
Setting the camera
|
| 153 |
+
|
| 154 |
+
Parameters:
|
| 155 |
+
camera_id: Camera ID
|
| 156 |
+
frame_size: The frame size
|
| 157 |
+
|
| 158 |
+
Returns:
|
| 159 |
+
cap: The capture object"""
|
| 160 |
+
frame_w, frame_h = frame_size
|
| 161 |
+
cap = cv2.VideoCapture(camera_id) # (camera_id, cv2.CAP_DSHOW)
|
| 162 |
+
cap.set(cv2.CAP_PROP_FRAME_WIDTH, frame_w)
|
| 163 |
+
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, frame_h)
|
| 164 |
+
return cap
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def get_frame(cap):
|
| 168 |
+
"""
|
| 169 |
+
Getting the frame
|
| 170 |
+
|
| 171 |
+
Parameters:
|
| 172 |
+
cap: The capture object
|
| 173 |
+
|
| 174 |
+
Returns:
|
| 175 |
+
success: whether or not the frame is received
|
| 176 |
+
img: the frame (BGR)
|
| 177 |
+
img_rgb: the frame (RGB). It is needed for face mesh model
|
| 178 |
+
"""
|
| 179 |
+
success, img = cap.read()
|
| 180 |
+
if success:
|
| 181 |
+
img = cv2.flip(img, 1)
|
| 182 |
+
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
| 183 |
+
|
| 184 |
+
else:
|
| 185 |
+
img = None
|
| 186 |
+
img_rgb = None
|
| 187 |
+
|
| 188 |
+
return success, img, img_rgb
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def get_eyes_pixels(eye_pixels):
|
| 192 |
+
"""
|
| 193 |
+
Get eyes locations
|
| 194 |
+
|
| 195 |
+
Parameters:
|
| 196 |
+
eyes_pixels: eyes pixels
|
| 197 |
+
|
| 198 |
+
Returns:
|
| 199 |
+
eye_top_left: eye top left
|
| 200 |
+
eye_bottom_right: eyes bottom right
|
| 201 |
+
"""
|
| 202 |
+
pxl = np.min(eye_pixels[:, 0])
|
| 203 |
+
pxr = np.max(eye_pixels[:, 0])
|
| 204 |
+
pyt = np.min(eye_pixels[:, 1])
|
| 205 |
+
pyb = np.max(eye_pixels[:, 1])
|
| 206 |
+
ew = max(pxr - pxl, 25)
|
| 207 |
+
ht = int(0.35 * ew)
|
| 208 |
+
hb = int(0.25 * ew)
|
| 209 |
+
wl = int(0.2 * ew)
|
| 210 |
+
wr = int(0.1 * ew)
|
| 211 |
+
eye_top_left = pxl - wl, pyt - ht
|
| 212 |
+
eye_bottom_right = pxr + wr, pyb + hb
|
| 213 |
+
return eye_top_left, eye_bottom_right
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def get_face(all_landmarks_pixels):
|
| 217 |
+
"""
|
| 218 |
+
Getting the face
|
| 219 |
+
|
| 220 |
+
Parameters:
|
| 221 |
+
all_landmarks_pixels: the landmarks
|
| 222 |
+
|
| 223 |
+
Retruns:
|
| 224 |
+
face_left: face left
|
| 225 |
+
face_right: face_right
|
| 226 |
+
face_top: face top
|
| 227 |
+
face_bottom: face bottom
|
| 228 |
+
"""
|
| 229 |
+
face_left = all_landmarks_pixels[:, 0].min()
|
| 230 |
+
face_right = all_landmarks_pixels[:, 0].max()
|
| 231 |
+
face_top = all_landmarks_pixels[:, 1].min()
|
| 232 |
+
face_bottom = all_landmarks_pixels[:, 1].max()
|
| 233 |
+
|
| 234 |
+
return face_left, face_right, face_top, face_bottom
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def get_eyes_ratio(all_landmarks):
|
| 238 |
+
"""
|
| 239 |
+
Getting the eyes ratio
|
| 240 |
+
|
| 241 |
+
Parameters:
|
| 242 |
+
all_landmarks: all of the landmarks
|
| 243 |
+
|
| 244 |
+
Returns:
|
| 245 |
+
ear: The eyes aspect ratio
|
| 246 |
+
"""
|
| 247 |
+
wl = np.sqrt(((all_landmarks[33,:2]-all_landmarks[133,:2])**2).sum())
|
| 248 |
+
hl1 = np.sqrt(((all_landmarks[159,:2]-all_landmarks[145,:2])**2).sum())
|
| 249 |
+
hl2 = np.sqrt(((all_landmarks[158,:2]-all_landmarks[153,:2])**2).sum())
|
| 250 |
+
hl = (hl1 + hl2) / 2
|
| 251 |
+
|
| 252 |
+
wr = np.sqrt(((all_landmarks[362,:2]-all_landmarks[263,:2])**2).sum())
|
| 253 |
+
hr1 = np.sqrt(((all_landmarks[385,:2]-all_landmarks[380,:2])**2).sum())
|
| 254 |
+
hr2 = np.sqrt(((all_landmarks[386,:2]-all_landmarks[374,:2])**2).sum())
|
| 255 |
+
hr = (hr1 + hr2) / 2
|
| 256 |
+
|
| 257 |
+
ear = ((wl / hl + wr / hr) / 2)
|
| 258 |
+
|
| 259 |
+
return ear
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
def get_model_inputs(
|
| 263 |
+
image,
|
| 264 |
+
image_rgb,
|
| 265 |
+
face_mesh,
|
| 266 |
+
camera_matrix,
|
| 267 |
+
pcf,
|
| 268 |
+
image_size,
|
| 269 |
+
dst_cof,
|
| 270 |
+
some_landmarks_ids,
|
| 271 |
+
show_features=False,
|
| 272 |
+
return_face=False
|
| 273 |
+
):
|
| 274 |
+
"""
|
| 275 |
+
Preparing the models inputs. Eyes images, face rotation, face position, iris locations in image
|
| 276 |
+
|
| 277 |
+
Parameters:
|
| 278 |
+
image: the frame (BGR)
|
| 279 |
+
image_rgb: the frame (RGB) for face mesh model
|
| 280 |
+
face_mesh: The face mesh model
|
| 281 |
+
camera_matrix: The intrinsic matrix
|
| 282 |
+
pcf: pcf object for calculating the face vectors
|
| 283 |
+
image_size: frame size
|
| 284 |
+
dst_cof: distortion coefficients of the camera
|
| 285 |
+
some_landmarks_ids: the landmarks needed for calculation of face vectors
|
| 286 |
+
show_features: whether or not show the inputs
|
| 287 |
+
return_face: whether or not return the face image
|
| 288 |
+
|
| 289 |
+
Returns:
|
| 290 |
+
success: whether or not the eyes extraction was successful
|
| 291 |
+
image: the frame
|
| 292 |
+
eyes_gray: the eyes image which is gray scale
|
| 293 |
+
features_vector: 10 values for face rotation, face position and iris locations
|
| 294 |
+
eye_ratio: The eyes aspect ratio
|
| 295 |
+
face_img: face image
|
| 296 |
+
"""
|
| 297 |
+
left_eye_landmarks_ids = (33, 133)
|
| 298 |
+
right_eye_landmarks_ids = (362, 263)
|
| 299 |
+
jaw_landmarks_ids = (61, 291, 199)
|
| 300 |
+
focal_length = camera_matrix[0, 0]
|
| 301 |
+
success = False
|
| 302 |
+
eyes_gray = None
|
| 303 |
+
features = []
|
| 304 |
+
features_vector = []
|
| 305 |
+
eye_ratio = 0.0
|
| 306 |
+
face_img = []
|
| 307 |
+
face_size = (300, 350)
|
| 308 |
+
|
| 309 |
+
mfl = face_mesh.multi_face_landmarks
|
| 310 |
+
# If there is any landmark
|
| 311 |
+
if mfl:
|
| 312 |
+
all_landmarks = np.array([(lm.x, lm.y, lm.z) for lm in mfl[0].landmark])
|
| 313 |
+
all_landmarks_pixels = np.array(all_landmarks[:,:2] * image_size, np.uint32)
|
| 314 |
+
|
| 315 |
+
eye_ratio = get_eyes_ratio(all_landmarks)
|
| 316 |
+
|
| 317 |
+
head_pose_landmarks = all_landmarks.T
|
| 318 |
+
metric_landmarks, _ = get_metric_landmarks(head_pose_landmarks.copy(), pcf)
|
| 319 |
+
|
| 320 |
+
some_landmarks_model = metric_landmarks[:, some_landmarks_ids].T
|
| 321 |
+
some_landmarks_image = (all_landmarks[some_landmarks_ids, :2] * image_size)
|
| 322 |
+
|
| 323 |
+
# Caluculating the face vector and face position
|
| 324 |
+
(
|
| 325 |
+
_,
|
| 326 |
+
rotation_vector,
|
| 327 |
+
translation_vector
|
| 328 |
+
) = cv2.solvePnP(
|
| 329 |
+
some_landmarks_model,
|
| 330 |
+
some_landmarks_image,
|
| 331 |
+
camera_matrix,
|
| 332 |
+
dst_cof,
|
| 333 |
+
flags=cv2.SOLVEPNP_ITERATIVE
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
features.append(rotation_vector.reshape((3,)))
|
| 337 |
+
features.append(translation_vector.reshape((3,)))
|
| 338 |
+
|
| 339 |
+
# calculating iris location
|
| 340 |
+
(
|
| 341 |
+
success_left,
|
| 342 |
+
_,
|
| 343 |
+
_,
|
| 344 |
+
left_iris_landmarks,
|
| 345 |
+
_,
|
| 346 |
+
left_iris_landmarks_respect_face
|
| 347 |
+
) = fl2d(
|
| 348 |
+
image_rgb,
|
| 349 |
+
all_landmarks[left_eye_landmarks_ids, :].T,
|
| 350 |
+
image_size,
|
| 351 |
+
is_right_eye=False,
|
| 352 |
+
focal_length=focal_length
|
| 353 |
+
)
|
| 354 |
+
|
| 355 |
+
(
|
| 356 |
+
success_right,
|
| 357 |
+
_,
|
| 358 |
+
_,
|
| 359 |
+
right_iris_landmarks,
|
| 360 |
+
_,
|
| 361 |
+
right_iris_landmarks_respect_face
|
| 362 |
+
) = fl2d(
|
| 363 |
+
image_rgb,
|
| 364 |
+
all_landmarks[right_eye_landmarks_ids, :].T,
|
| 365 |
+
image_size,
|
| 366 |
+
is_right_eye=True,
|
| 367 |
+
focal_length=focal_length
|
| 368 |
+
)
|
| 369 |
+
|
| 370 |
+
if success_left and success_right:
|
| 371 |
+
left_eye_pixels = all_landmarks_pixels[left_eye_landmarks_ids, :2]
|
| 372 |
+
left_eye_tl, left_eye_br = get_eyes_pixels(left_eye_pixels)
|
| 373 |
+
eye_left = image_rgb[left_eye_tl[1]:left_eye_br[1], left_eye_tl[0]:left_eye_br[0]]
|
| 374 |
+
|
| 375 |
+
right_eye_pixels = all_landmarks_pixels[right_eye_landmarks_ids, :2]
|
| 376 |
+
right_eye_tl, right_eye_br = get_eyes_pixels(right_eye_pixels)
|
| 377 |
+
eye_right = image_rgb[right_eye_tl[1]:right_eye_br[1], right_eye_tl[0]:right_eye_br[0]]
|
| 378 |
+
|
| 379 |
+
if eye_left.any() and eye_right.any():
|
| 380 |
+
success = True
|
| 381 |
+
|
| 382 |
+
features.append(left_iris_landmarks_respect_face[0, :2])
|
| 383 |
+
features.append(right_iris_landmarks_respect_face[0, :2])
|
| 384 |
+
|
| 385 |
+
for feats in features:
|
| 386 |
+
for feat in feats:
|
| 387 |
+
features_vector.append(feat)
|
| 388 |
+
features_vector = np.array(features_vector)
|
| 389 |
+
|
| 390 |
+
eye_left_resize = cv2.resize(eye_left, EYE_SIZE, interpolation=cv2.INTER_AREA)
|
| 391 |
+
eye_right_resize = cv2.resize(eye_right, EYE_SIZE, interpolation=cv2.INTER_AREA)
|
| 392 |
+
|
| 393 |
+
eyes = np.concatenate([eye_left_resize, eye_right_resize])
|
| 394 |
+
eyes_gray = np.expand_dims(cv2.cvtColor(eyes, cv2.COLOR_RGB2GRAY), 2)
|
| 395 |
+
|
| 396 |
+
if return_face:
|
| 397 |
+
fp = get_face(all_landmarks_pixels)
|
| 398 |
+
face_img = image[fp[2]:fp[3], fp[0]:fp[1]]
|
| 399 |
+
face_img = cv2.resize(face_img, face_size, interpolation=cv2.INTER_AREA)
|
| 400 |
+
|
| 401 |
+
if show_features:
|
| 402 |
+
cv2.rectangle(image, left_eye_tl, left_eye_br, (190, 100, 40), 2)
|
| 403 |
+
cv2.rectangle(image, right_eye_tl, right_eye_br, (190, 100, 40), 2)
|
| 404 |
+
|
| 405 |
+
jaw_landmarks_pixels = all_landmarks_pixels[jaw_landmarks_ids, :2]
|
| 406 |
+
for pix in jaw_landmarks_pixels:
|
| 407 |
+
cv2.circle(image, pix, 2, (0, 255, 255), cv2.FILLED)
|
| 408 |
+
|
| 409 |
+
left_eye_landmarks_pixels = all_landmarks_pixels[left_eye_landmarks_ids, :2]
|
| 410 |
+
for pix in left_eye_landmarks_pixels:
|
| 411 |
+
cv2.circle(image, pix, 2, (255, 0, 255), cv2.FILLED)
|
| 412 |
+
|
| 413 |
+
right_eye_landmarks_pixels = all_landmarks_pixels[right_eye_landmarks_ids, :2]
|
| 414 |
+
for pix in right_eye_landmarks_pixels:
|
| 415 |
+
cv2.circle(image, pix, 2, (255, 0, 255), cv2.FILLED)
|
| 416 |
+
|
| 417 |
+
left_iris_pixel = np.array(
|
| 418 |
+
left_iris_landmarks[0, :2] * image_size).astype(np.uint32)
|
| 419 |
+
cv2.circle(image, left_iris_pixel, 4, (255, 255, 0), cv2.FILLED)
|
| 420 |
+
|
| 421 |
+
right_iris_pixel = np.array(
|
| 422 |
+
right_iris_landmarks[0, :2] * image_size).astype(np.uint32)
|
| 423 |
+
cv2.circle(image, right_iris_pixel, 4, (255, 255, 0), cv2.FILLED)
|
| 424 |
+
|
| 425 |
+
(nose_end_point2D, _) = cv2.projectPoints(
|
| 426 |
+
np.array([(0.0, 0.0, 25.0)]),
|
| 427 |
+
rotation_vector,
|
| 428 |
+
translation_vector,
|
| 429 |
+
camera_matrix,
|
| 430 |
+
dst_cof,
|
| 431 |
+
)
|
| 432 |
+
|
| 433 |
+
p1 = (int(some_landmarks_image[0][0]), int(some_landmarks_image[0][1]))
|
| 434 |
+
p2 = (int(nose_end_point2D[0][0][0]), int(nose_end_point2D[0][0][1]))
|
| 435 |
+
|
| 436 |
+
cv2.line(image, p1, p2, (127, 64, 255), 2)
|
| 437 |
+
|
| 438 |
+
return success, image, eyes_gray, features_vector, eye_ratio, face_img
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def get_time(i, t, print_time=False):
|
| 442 |
+
"""
|
| 443 |
+
getting time
|
| 444 |
+
|
| 445 |
+
Parameters:
|
| 446 |
+
i: the iterator
|
| 447 |
+
t: the time
|
| 448 |
+
print_time: whether or not print the time
|
| 449 |
+
|
| 450 |
+
Returns:
|
| 451 |
+
fps: Frame per second
|
| 452 |
+
"""
|
| 453 |
+
el_t = time.perf_counter() - t
|
| 454 |
+
fps = round(i / el_t, 2)
|
| 455 |
+
if print_time:
|
| 456 |
+
print(f"Elapsed time: {int(el_t / 60)}:{int(el_t % 60)}")
|
| 457 |
+
return fps
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
def create_dir(folders_list):
|
| 461 |
+
"""
|
| 462 |
+
creating direcotry
|
| 463 |
+
|
| 464 |
+
Parameters:
|
| 465 |
+
folders_list: folders' list
|
| 466 |
+
|
| 467 |
+
Returns:
|
| 468 |
+
fol_dir: folder directory
|
| 469 |
+
"""
|
| 470 |
+
fol_dir = ""
|
| 471 |
+
for fol in folders_list:
|
| 472 |
+
if fol[-1] != "/":
|
| 473 |
+
fol += "/"
|
| 474 |
+
fol_dir += fol
|
| 475 |
+
if not os.path.exists(fol_dir):
|
| 476 |
+
os.mkdir(fol_dir)
|
| 477 |
+
|
| 478 |
+
return fol_dir
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
def load(fol_dir, data_name):
|
| 482 |
+
"""
|
| 483 |
+
Loading the the data
|
| 484 |
+
|
| 485 |
+
Parameters:
|
| 486 |
+
fol_dir: folder directory
|
| 487 |
+
data_name: the data name
|
| 488 |
+
|
| 489 |
+
Returns:
|
| 490 |
+
data: the loaded data
|
| 491 |
+
"""
|
| 492 |
+
print("Loading data from " + fol_dir)
|
| 493 |
+
data = []
|
| 494 |
+
for dn in data_name:
|
| 495 |
+
with open(fol_dir + dn + ".pickle", 'rb') as f:
|
| 496 |
+
data.append(pickle.load(f))
|
| 497 |
+
return data
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def save(data, fol_dir, data_name):
|
| 501 |
+
"""
|
| 502 |
+
Saving the data
|
| 503 |
+
|
| 504 |
+
Parameters:
|
| 505 |
+
data: the data that we want to save
|
| 506 |
+
fol_dir: the folder directory
|
| 507 |
+
data_nmae: name of the file that we want put
|
| 508 |
+
|
| 509 |
+
Returns:
|
| 510 |
+
None
|
| 511 |
+
"""
|
| 512 |
+
print("Saving data in " + fol_dir)
|
| 513 |
+
for (d, dn) in zip(data, data_name):
|
| 514 |
+
with open(fol_dir + dn + ".pickle", 'wb') as f:
|
| 515 |
+
pickle.dump(d, f)
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
def remove(fol_dir, files=None):
|
| 519 |
+
"""
|
| 520 |
+
Removing the files
|
| 521 |
+
|
| 522 |
+
Parameters:
|
| 523 |
+
fol_dir: folder directory
|
| 524 |
+
files: the file that we want to remove
|
| 525 |
+
|
| 526 |
+
Returns:
|
| 527 |
+
None
|
| 528 |
+
"""
|
| 529 |
+
if files:
|
| 530 |
+
for fn in files:
|
| 531 |
+
file_dir = fol_dir + fn + ".pickle"
|
| 532 |
+
print("Removing " + file_dir)
|
| 533 |
+
os.remove(file_dir)
|
| 534 |
+
else:
|
| 535 |
+
print("Removing " + fol_dir)
|
| 536 |
+
shutil.rmtree(fol_dir)
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def file_existing(fol_dir, file_name):
|
| 540 |
+
"""
|
| 541 |
+
Checking the existance of the file
|
| 542 |
+
|
| 543 |
+
Parameters:
|
| 544 |
+
fol_dir: folder directory
|
| 545 |
+
file_name: file name
|
| 546 |
+
|
| 547 |
+
Returns:
|
| 548 |
+
file_exist: whether or not the file exists
|
| 549 |
+
"""
|
| 550 |
+
files = os.listdir(fol_dir)
|
| 551 |
+
file_exist = False
|
| 552 |
+
if files:
|
| 553 |
+
for f in files:
|
| 554 |
+
if f == file_name:
|
| 555 |
+
file_exist = True
|
| 556 |
+
|
| 557 |
+
return file_exist
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
def pass_frames(cap, n_frames=5):
|
| 561 |
+
"""
|
| 562 |
+
Skipping the some frames
|
| 563 |
+
|
| 564 |
+
Parameters:
|
| 565 |
+
cap: camera objec
|
| 566 |
+
n_frames: number of frames that we want to pass
|
| 567 |
+
|
| 568 |
+
Returns:
|
| 569 |
+
None
|
| 570 |
+
"""
|
| 571 |
+
for _ in range(n_frames):
|
| 572 |
+
get_frame(cap)
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def show_clb_win(
|
| 576 |
+
win_name,
|
| 577 |
+
pnt=None,
|
| 578 |
+
pnt_prd=None,
|
| 579 |
+
texts=None,
|
| 580 |
+
win_color=BLACK,
|
| 581 |
+
win_size=(640, 480),
|
| 582 |
+
pnt_color=WHITE,
|
| 583 |
+
pnt_prd_color=BLUE
|
| 584 |
+
):
|
| 585 |
+
"""
|
| 586 |
+
Showing the calibration window
|
| 587 |
+
|
| 588 |
+
Parameters:
|
| 589 |
+
win_name: the windows name
|
| 590 |
+
pnt: the point for calibration
|
| 591 |
+
pnt_prd: the predicted point
|
| 592 |
+
texts: the texts that we want to put in the window
|
| 593 |
+
win_color: the window color
|
| 594 |
+
win_size: window size
|
| 595 |
+
pnt_color: the calibration point color
|
| 596 |
+
pnt_prd_color: the predicted point color
|
| 597 |
+
|
| 598 |
+
Returns:
|
| 599 |
+
None
|
| 600 |
+
"""
|
| 601 |
+
pnt_d = int(win_size[0] / 80.0)
|
| 602 |
+
clb_img = np.ones((win_size[1], win_size[0], 3))
|
| 603 |
+
clb_img[:, :, 0] = clb_img[:, :, 0] * win_color[0]
|
| 604 |
+
clb_img[:, :, 1] = clb_img[:, :, 1] * win_color[1]
|
| 605 |
+
clb_img[:, :, 2] = clb_img[:, :, 2] * win_color[2]
|
| 606 |
+
clb_img = clb_img.astype(np.uint8)
|
| 607 |
+
if np.array(pnt).any():
|
| 608 |
+
pxl = (np.array(pnt) * np.array(win_size)).astype(np.uint32)
|
| 609 |
+
cv2.circle(clb_img, pxl, pnt_d, pnt_color, cv2.FILLED)
|
| 610 |
+
if np.array(pnt_prd).any():
|
| 611 |
+
pxl_prd = (np.array(pnt_prd) * np.array(win_size)).astype(np.uint32)
|
| 612 |
+
cv2.circle(clb_img, pxl_prd, int(pnt_d / 2), pnt_prd_color, cv2.FILLED)
|
| 613 |
+
if texts:
|
| 614 |
+
for tx in texts:
|
| 615 |
+
cv2.putText(clb_img,
|
| 616 |
+
tx[0],
|
| 617 |
+
(int(win_size[0]*tx[1][0]), int(win_size[1]*tx[1][1])),
|
| 618 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 619 |
+
tx[2],
|
| 620 |
+
tx[3],
|
| 621 |
+
tx[4])
|
| 622 |
+
cv2.imshow(win_name, clb_img)
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
def big_win(win_name="", x_disp=0, y_disp=0):
|
| 626 |
+
"""
|
| 627 |
+
Make the calibration window full size
|
| 628 |
+
|
| 629 |
+
Paramters:
|
| 630 |
+
win_name: window name
|
| 631 |
+
x_disp: x coordinate
|
| 632 |
+
y_disp: y coordinate
|
| 633 |
+
|
| 634 |
+
Returns:
|
| 635 |
+
None
|
| 636 |
+
"""
|
| 637 |
+
cv2.namedWindow(win_name, cv2.WND_PROP_FULLSCREEN)
|
| 638 |
+
cv2.moveWindow(win_name, x_disp, y_disp)
|
| 639 |
+
cv2.setWindowProperty(win_name, cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)
|
| 640 |
+
|
| 641 |
+
|
| 642 |
+
def get_blink_v(t_vec, eyes_ratio):
|
| 643 |
+
"""
|
| 644 |
+
Calculating the blinking vector's velocity
|
| 645 |
+
|
| 646 |
+
Parameters:
|
| 647 |
+
t_vec: time vector
|
| 648 |
+
eyes_ratio: eyes aspect ratio vector
|
| 649 |
+
|
| 650 |
+
Returns:
|
| 651 |
+
blink_v: velocity of eyes aspect ratio vector
|
| 652 |
+
"""
|
| 653 |
+
blink_v = eyes_ratio.copy()
|
| 654 |
+
|
| 655 |
+
blink_v[1:] = (eyes_ratio[1:] - eyes_ratio[:-1]) / (t_vec[1:] - t_vec[:-1])
|
| 656 |
+
blink_v[0] = blink_v[1]
|
| 657 |
+
|
| 658 |
+
return blink_v
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def get_blink_duration(t_vec, blinking_period):
|
| 662 |
+
"""
|
| 663 |
+
Calculating the blink duration
|
| 664 |
+
|
| 665 |
+
Parameters:
|
| 666 |
+
t_vec: time vector
|
| 667 |
+
blinking_period: blinking period
|
| 668 |
+
|
| 669 |
+
Returns:
|
| 670 |
+
before_closing: before closing sample
|
| 671 |
+
after_closing: after closing sample
|
| 672 |
+
"""
|
| 673 |
+
dt = 1 / (t_vec[1:] - t_vec[:-1])
|
| 674 |
+
fps = dt.mean()
|
| 675 |
+
sampling_period = 1/fps
|
| 676 |
+
n_smp_blink = round(blinking_period/sampling_period)
|
| 677 |
+
before_closing = math.floor(n_smp_blink / 3)
|
| 678 |
+
after_closing = math.floor(2 * n_smp_blink / 3) - 1
|
| 679 |
+
|
| 680 |
+
return before_closing, after_closing
|
| 681 |
+
|
| 682 |
+
|
| 683 |
+
def get_blinking_vec(eyes_ratio_v, bc, ac, threshold):
|
| 684 |
+
"""
|
| 685 |
+
getting blinking vector
|
| 686 |
+
|
| 687 |
+
Parameters:
|
| 688 |
+
eyes_ratio_v: the vector of eye aspect ratio velocity
|
| 689 |
+
bc: before closing
|
| 690 |
+
ac: after closing
|
| 691 |
+
threshold: blinking threshold
|
| 692 |
+
|
| 693 |
+
Returns:
|
| 694 |
+
blinking: whether or not the user is blinking
|
| 695 |
+
eys_ratio_v_blink: the vector of blinking
|
| 696 |
+
"""
|
| 697 |
+
closed_eyes = (eyes_ratio_v > threshold)
|
| 698 |
+
blinking = closed_eyes.copy()
|
| 699 |
+
n_smp = blinking.shape[0]
|
| 700 |
+
eyes_ratio_v_blink = np.zeros((n_smp,))
|
| 701 |
+
|
| 702 |
+
for (i, ce) in enumerate(closed_eyes):
|
| 703 |
+
if ce and (i > bc) and (i < n_smp-ac):
|
| 704 |
+
for j in range(1, bc+1):
|
| 705 |
+
blinking[i-j] = True
|
| 706 |
+
for j in range(1, ac+1):
|
| 707 |
+
blinking[i+j] = True
|
| 708 |
+
|
| 709 |
+
eyes_ratio_v_blink[blinking] = threshold
|
| 710 |
+
|
| 711 |
+
return blinking, eyes_ratio_v_blink
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
def get_blinking(t_mat, eyes_ratio_mat, threshold=DEFAULT_BLINKING_THRESHOLD, normal_blinking_period=0.4):
|
| 715 |
+
"""
|
| 716 |
+
Getting blinking
|
| 717 |
+
|
| 718 |
+
Parameters:
|
| 719 |
+
t_mat: a list of time vectors
|
| 720 |
+
eyes_ratio_mat: a list of eyes aspect ratio vectors
|
| 721 |
+
threshold: blinking threshold
|
| 722 |
+
normal_blinking_period: normal blinking threshold
|
| 723 |
+
|
| 724 |
+
Returns:
|
| 725 |
+
eyes_ratio_v_mat: a list of eyes aspect ratio velocity vector
|
| 726 |
+
blinking_mat: a list of blinking vectors
|
| 727 |
+
eyes_ratio_v_blink_mat: a list of eyes aspect ratio boolians vectors
|
| 728 |
+
"""
|
| 729 |
+
eyes_ratio_v_mat = []
|
| 730 |
+
blinking_mat = []
|
| 731 |
+
eyes_ratio_v_blink_mat = []
|
| 732 |
+
|
| 733 |
+
for (k, eyes_ratio) in enumerate(eyes_ratio_mat):
|
| 734 |
+
t_vec = t_mat[k]
|
| 735 |
+
|
| 736 |
+
eyes_ratio_v_mat.append(get_blink_v(t_vec, eyes_ratio).copy())
|
| 737 |
+
|
| 738 |
+
bc, ac = get_blink_duration(t_vec, normal_blinking_period)
|
| 739 |
+
|
| 740 |
+
blinking, eyes_ratio_v_blink = get_blinking_vec(eyes_ratio_v_mat[-1], bc, ac, threshold)
|
| 741 |
+
|
| 742 |
+
blinking_mat.append(blinking)
|
| 743 |
+
eyes_ratio_v_blink_mat.append(eyes_ratio_v_blink)
|
| 744 |
+
|
| 745 |
+
return eyes_ratio_v_mat, blinking_mat, eyes_ratio_v_blink_mat
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
def find_max_mdl(fol_dir, a=3, b=-3):
|
| 749 |
+
"""
|
| 750 |
+
finding the maximum model number
|
| 751 |
+
|
| 752 |
+
Parameters:
|
| 753 |
+
fol_dir: folder directory
|
| 754 |
+
a: the first index of the model number
|
| 755 |
+
b: the last index of the model number
|
| 756 |
+
|
| 757 |
+
Returns:
|
| 758 |
+
max_num: maximum model number
|
| 759 |
+
"""
|
| 760 |
+
mdl_numbers = []
|
| 761 |
+
mdl_name = os.listdir(fol_dir)
|
| 762 |
+
if mdl_name:
|
| 763 |
+
for mn in mdl_name:
|
| 764 |
+
if mn[-3:] == ".h5":
|
| 765 |
+
mdl_num = int(mn[a:b])
|
| 766 |
+
mdl_numbers.append(mdl_num)
|
| 767 |
+
max_num = max(mdl_numbers)
|
| 768 |
+
else:
|
| 769 |
+
max_num = 0
|
| 770 |
+
|
| 771 |
+
return max_num
|
| 772 |
+
|
| 773 |
+
|
| 774 |
+
def get_threshold(er_dir, threshold):
|
| 775 |
+
"""
|
| 776 |
+
Getting the threshold. default, user offered or application offered
|
| 777 |
+
|
| 778 |
+
Parameters:
|
| 779 |
+
er_dir: directory of er file
|
| 780 |
+
thereshold: the threshold, 'd', 'ao', or 'uo'
|
| 781 |
+
|
| 782 |
+
Returns:
|
| 783 |
+
threshold: the threshold value
|
| 784 |
+
"""
|
| 785 |
+
if threshold == "d":
|
| 786 |
+
threshold = DEFAULT_BLINKING_THRESHOLD
|
| 787 |
+
elif threshold == "ao":
|
| 788 |
+
oth = "oth_app"
|
| 789 |
+
fe = file_existing(er_dir, oth + ".pickle")
|
| 790 |
+
if fe:
|
| 791 |
+
threshold = load(er_dir , [oth])[0]
|
| 792 |
+
else:
|
| 793 |
+
print("App offered threshold does not exist!! We use default threshold.")
|
| 794 |
+
threshold = DEFAULT_BLINKING_THRESHOLD
|
| 795 |
+
elif threshold == "uo":
|
| 796 |
+
oth = "oth_usr"
|
| 797 |
+
fe = file_existing(er_dir, oth + ".pickle")
|
| 798 |
+
if fe:
|
| 799 |
+
threshold = load(er_dir , [oth])[0]
|
| 800 |
+
else:
|
| 801 |
+
print("User offered threshold does not exist!! We use default threshold.")
|
| 802 |
+
threshold = DEFAULT_BLINKING_THRESHOLD
|
| 803 |
+
else:
|
| 804 |
+
threshold = float(threshold)
|
| 805 |
+
|
| 806 |
+
print(f"blinking threshold: {threshold}")
|
| 807 |
+
return threshold
|
| 808 |
+
|
| 809 |
+
|
| 810 |
+
# Getting some directories
|
| 811 |
+
models_dir = create_dir([PATH2ROOT_ABS+"models"])
|
| 812 |
+
io_dir = create_dir([models_dir+"io"])
|
| 813 |
+
et_dir = create_dir([models_dir+"et"])
|
| 814 |
+
io_raw_dir = create_dir([io_dir, RAW])
|
| 815 |
+
io_trained_dir = create_dir([io_dir, TRAINED])
|
| 816 |
+
et_raw_dir = create_dir([et_dir, RAW])
|
| 817 |
+
et_trained_dir = create_dir([et_dir, TRAINED])
|
| 818 |
+
files_dir = create_dir([PATH2ROOT_ABS+"other_files"])
|
| 819 |
+
scalers_dir = create_dir([files_dir, "scalers"])
|
| 820 |
+
subjects_dir = create_dir([PATH2ROOT+"subjects"])
|
| 821 |
+
monitors = get_monitors()
|
codes/base/face_geometry.py
ADDED
|
@@ -0,0 +1,2678 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Many parts taken from the cpp implementation from github.com/google/mediapipe
|
| 2 |
+
#
|
| 3 |
+
# Copyright 2020 The MediaPipe Authors.
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
|
| 17 |
+
import numpy as np
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Singleton(type):
|
| 21 |
+
_instances = {}
|
| 22 |
+
|
| 23 |
+
def __call__(cls, *args, **kwargs):
|
| 24 |
+
if cls not in cls._instances:
|
| 25 |
+
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
| 26 |
+
return cls._instances[cls]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class Debugger(metaclass=Singleton):
|
| 30 |
+
def set_debug(self, debug):
|
| 31 |
+
self.debug = debug
|
| 32 |
+
|
| 33 |
+
def toggle(self):
|
| 34 |
+
self.debug = not self.debug
|
| 35 |
+
|
| 36 |
+
def get_debug(self):
|
| 37 |
+
return self.debug
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
DEBUG = Debugger()
|
| 41 |
+
DEBUG.set_debug(False)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class PCF:
|
| 45 |
+
def __init__(
|
| 46 |
+
self,
|
| 47 |
+
near=1,
|
| 48 |
+
far=10000,
|
| 49 |
+
frame_height=1920,
|
| 50 |
+
frame_width=1080,
|
| 51 |
+
fy=1074.520446598223,
|
| 52 |
+
):
|
| 53 |
+
|
| 54 |
+
self.near = near
|
| 55 |
+
self.far = far
|
| 56 |
+
self.frame_height = frame_height
|
| 57 |
+
self.frame_width = frame_width
|
| 58 |
+
self.fy = fy
|
| 59 |
+
|
| 60 |
+
fov_y = 2 * np.arctan(frame_height / (2 * fy))
|
| 61 |
+
|
| 62 |
+
# kDegreesToRadians = np.pi / 180.0 # never used
|
| 63 |
+
height_at_near = 2 * near * np.tan(0.5 * fov_y)
|
| 64 |
+
width_at_near = frame_width * height_at_near / frame_height
|
| 65 |
+
# print(height_at_near)
|
| 66 |
+
|
| 67 |
+
self.fov_y = fov_y
|
| 68 |
+
self.left = -0.5 * width_at_near
|
| 69 |
+
self.right = 0.5 * width_at_near
|
| 70 |
+
self.bottom = -0.5 * height_at_near
|
| 71 |
+
self.top = 0.5 * height_at_near
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
canonical_metric_landmarks = np.array(
|
| 75 |
+
[
|
| 76 |
+
0.000000,
|
| 77 |
+
-3.406404,
|
| 78 |
+
5.979507,
|
| 79 |
+
0.499977,
|
| 80 |
+
0.652534,
|
| 81 |
+
0.000000,
|
| 82 |
+
-1.126865,
|
| 83 |
+
7.475604,
|
| 84 |
+
0.500026,
|
| 85 |
+
0.547487,
|
| 86 |
+
0.000000,
|
| 87 |
+
-2.089024,
|
| 88 |
+
6.058267,
|
| 89 |
+
0.499974,
|
| 90 |
+
0.602372,
|
| 91 |
+
-0.463928,
|
| 92 |
+
0.955357,
|
| 93 |
+
6.633583,
|
| 94 |
+
0.482113,
|
| 95 |
+
0.471979,
|
| 96 |
+
0.000000,
|
| 97 |
+
-0.463170,
|
| 98 |
+
7.586580,
|
| 99 |
+
0.500151,
|
| 100 |
+
0.527156,
|
| 101 |
+
0.000000,
|
| 102 |
+
0.365669,
|
| 103 |
+
7.242870,
|
| 104 |
+
0.499910,
|
| 105 |
+
0.498253,
|
| 106 |
+
0.000000,
|
| 107 |
+
2.473255,
|
| 108 |
+
5.788627,
|
| 109 |
+
0.499523,
|
| 110 |
+
0.401062,
|
| 111 |
+
-4.253081,
|
| 112 |
+
2.577646,
|
| 113 |
+
3.279702,
|
| 114 |
+
0.289712,
|
| 115 |
+
0.380764,
|
| 116 |
+
0.000000,
|
| 117 |
+
4.019042,
|
| 118 |
+
5.284764,
|
| 119 |
+
0.499955,
|
| 120 |
+
0.312398,
|
| 121 |
+
0.000000,
|
| 122 |
+
4.885979,
|
| 123 |
+
5.385258,
|
| 124 |
+
0.499987,
|
| 125 |
+
0.269919,
|
| 126 |
+
0.000000,
|
| 127 |
+
8.261778,
|
| 128 |
+
4.481535,
|
| 129 |
+
0.500023,
|
| 130 |
+
0.107050,
|
| 131 |
+
0.000000,
|
| 132 |
+
-3.706811,
|
| 133 |
+
5.864924,
|
| 134 |
+
0.500023,
|
| 135 |
+
0.666234,
|
| 136 |
+
0.000000,
|
| 137 |
+
-3.918301,
|
| 138 |
+
5.569430,
|
| 139 |
+
0.500016,
|
| 140 |
+
0.679224,
|
| 141 |
+
0.000000,
|
| 142 |
+
-3.994436,
|
| 143 |
+
5.219482,
|
| 144 |
+
0.500023,
|
| 145 |
+
0.692348,
|
| 146 |
+
0.000000,
|
| 147 |
+
-4.542400,
|
| 148 |
+
5.404754,
|
| 149 |
+
0.499977,
|
| 150 |
+
0.695278,
|
| 151 |
+
0.000000,
|
| 152 |
+
-4.745577,
|
| 153 |
+
5.529457,
|
| 154 |
+
0.499977,
|
| 155 |
+
0.705934,
|
| 156 |
+
0.000000,
|
| 157 |
+
-5.019567,
|
| 158 |
+
5.601448,
|
| 159 |
+
0.499977,
|
| 160 |
+
0.719385,
|
| 161 |
+
0.000000,
|
| 162 |
+
-5.365123,
|
| 163 |
+
5.535441,
|
| 164 |
+
0.499977,
|
| 165 |
+
0.737019,
|
| 166 |
+
0.000000,
|
| 167 |
+
-6.149624,
|
| 168 |
+
5.071372,
|
| 169 |
+
0.499968,
|
| 170 |
+
0.781371,
|
| 171 |
+
0.000000,
|
| 172 |
+
-1.501095,
|
| 173 |
+
7.112196,
|
| 174 |
+
0.499816,
|
| 175 |
+
0.562981,
|
| 176 |
+
-0.416106,
|
| 177 |
+
-1.466449,
|
| 178 |
+
6.447657,
|
| 179 |
+
0.473773,
|
| 180 |
+
0.573910,
|
| 181 |
+
-7.087960,
|
| 182 |
+
5.434801,
|
| 183 |
+
0.099620,
|
| 184 |
+
0.104907,
|
| 185 |
+
0.254141,
|
| 186 |
+
-2.628639,
|
| 187 |
+
2.035898,
|
| 188 |
+
3.848121,
|
| 189 |
+
0.365930,
|
| 190 |
+
0.409576,
|
| 191 |
+
-3.198363,
|
| 192 |
+
1.985815,
|
| 193 |
+
3.796952,
|
| 194 |
+
0.338758,
|
| 195 |
+
0.413025,
|
| 196 |
+
-3.775151,
|
| 197 |
+
2.039402,
|
| 198 |
+
3.646194,
|
| 199 |
+
0.311120,
|
| 200 |
+
0.409460,
|
| 201 |
+
-4.465819,
|
| 202 |
+
2.422950,
|
| 203 |
+
3.155168,
|
| 204 |
+
0.274658,
|
| 205 |
+
0.389131,
|
| 206 |
+
-2.164289,
|
| 207 |
+
2.189867,
|
| 208 |
+
3.851822,
|
| 209 |
+
0.393362,
|
| 210 |
+
0.403706,
|
| 211 |
+
-3.208229,
|
| 212 |
+
3.223926,
|
| 213 |
+
4.115822,
|
| 214 |
+
0.345234,
|
| 215 |
+
0.344011,
|
| 216 |
+
-2.673803,
|
| 217 |
+
3.205337,
|
| 218 |
+
4.092203,
|
| 219 |
+
0.370094,
|
| 220 |
+
0.346076,
|
| 221 |
+
-3.745193,
|
| 222 |
+
3.165286,
|
| 223 |
+
3.972409,
|
| 224 |
+
0.319322,
|
| 225 |
+
0.347265,
|
| 226 |
+
-4.161018,
|
| 227 |
+
3.059069,
|
| 228 |
+
3.719554,
|
| 229 |
+
0.297903,
|
| 230 |
+
0.353591,
|
| 231 |
+
-5.062006,
|
| 232 |
+
1.934418,
|
| 233 |
+
2.776093,
|
| 234 |
+
0.247792,
|
| 235 |
+
0.410810,
|
| 236 |
+
-2.266659,
|
| 237 |
+
-7.425768,
|
| 238 |
+
4.389812,
|
| 239 |
+
0.396889,
|
| 240 |
+
0.842755,
|
| 241 |
+
-4.445859,
|
| 242 |
+
2.663991,
|
| 243 |
+
3.173422,
|
| 244 |
+
0.280098,
|
| 245 |
+
0.375600,
|
| 246 |
+
-7.214530,
|
| 247 |
+
2.263009,
|
| 248 |
+
0.073150,
|
| 249 |
+
0.106310,
|
| 250 |
+
0.399956,
|
| 251 |
+
-5.799793,
|
| 252 |
+
2.349546,
|
| 253 |
+
2.204059,
|
| 254 |
+
0.209925,
|
| 255 |
+
0.391353,
|
| 256 |
+
-2.844939,
|
| 257 |
+
-0.720868,
|
| 258 |
+
4.433130,
|
| 259 |
+
0.355808,
|
| 260 |
+
0.534406,
|
| 261 |
+
-0.711452,
|
| 262 |
+
-3.329355,
|
| 263 |
+
5.877044,
|
| 264 |
+
0.471751,
|
| 265 |
+
0.650404,
|
| 266 |
+
-0.606033,
|
| 267 |
+
-3.924562,
|
| 268 |
+
5.444923,
|
| 269 |
+
0.474155,
|
| 270 |
+
0.680192,
|
| 271 |
+
-1.431615,
|
| 272 |
+
-3.500953,
|
| 273 |
+
5.496189,
|
| 274 |
+
0.439785,
|
| 275 |
+
0.657229,
|
| 276 |
+
-1.914910,
|
| 277 |
+
-3.803146,
|
| 278 |
+
5.028930,
|
| 279 |
+
0.414617,
|
| 280 |
+
0.666541,
|
| 281 |
+
-1.131043,
|
| 282 |
+
-3.973937,
|
| 283 |
+
5.189648,
|
| 284 |
+
0.450374,
|
| 285 |
+
0.680861,
|
| 286 |
+
-1.563548,
|
| 287 |
+
-4.082763,
|
| 288 |
+
4.842263,
|
| 289 |
+
0.428771,
|
| 290 |
+
0.682691,
|
| 291 |
+
-2.650112,
|
| 292 |
+
-5.003649,
|
| 293 |
+
4.188483,
|
| 294 |
+
0.374971,
|
| 295 |
+
0.727805,
|
| 296 |
+
-0.427049,
|
| 297 |
+
-1.094134,
|
| 298 |
+
7.360529,
|
| 299 |
+
0.486717,
|
| 300 |
+
0.547629,
|
| 301 |
+
-0.496396,
|
| 302 |
+
-0.475659,
|
| 303 |
+
7.440358,
|
| 304 |
+
0.485301,
|
| 305 |
+
0.527395,
|
| 306 |
+
-5.253307,
|
| 307 |
+
3.881582,
|
| 308 |
+
3.363159,
|
| 309 |
+
0.257765,
|
| 310 |
+
0.314490,
|
| 311 |
+
-1.718698,
|
| 312 |
+
0.974609,
|
| 313 |
+
4.558359,
|
| 314 |
+
0.401223,
|
| 315 |
+
0.455172,
|
| 316 |
+
-1.608635,
|
| 317 |
+
-0.942516,
|
| 318 |
+
5.814193,
|
| 319 |
+
0.429819,
|
| 320 |
+
0.548615,
|
| 321 |
+
-1.651267,
|
| 322 |
+
-0.610868,
|
| 323 |
+
5.581319,
|
| 324 |
+
0.421352,
|
| 325 |
+
0.533741,
|
| 326 |
+
-4.765501,
|
| 327 |
+
-0.701554,
|
| 328 |
+
3.534632,
|
| 329 |
+
0.276896,
|
| 330 |
+
0.532057,
|
| 331 |
+
-0.478306,
|
| 332 |
+
0.295766,
|
| 333 |
+
7.101013,
|
| 334 |
+
0.483370,
|
| 335 |
+
0.499587,
|
| 336 |
+
-3.734964,
|
| 337 |
+
4.508230,
|
| 338 |
+
4.550454,
|
| 339 |
+
0.337212,
|
| 340 |
+
0.282883,
|
| 341 |
+
-4.588603,
|
| 342 |
+
4.302037,
|
| 343 |
+
4.048484,
|
| 344 |
+
0.296392,
|
| 345 |
+
0.293243,
|
| 346 |
+
-6.279331,
|
| 347 |
+
6.615427,
|
| 348 |
+
1.425850,
|
| 349 |
+
0.169295,
|
| 350 |
+
0.193814,
|
| 351 |
+
-1.220941,
|
| 352 |
+
4.142165,
|
| 353 |
+
5.106035,
|
| 354 |
+
0.447580,
|
| 355 |
+
0.302610,
|
| 356 |
+
-2.193489,
|
| 357 |
+
3.100317,
|
| 358 |
+
4.000575,
|
| 359 |
+
0.392390,
|
| 360 |
+
0.353888,
|
| 361 |
+
-3.102642,
|
| 362 |
+
-4.352984,
|
| 363 |
+
4.095905,
|
| 364 |
+
0.354490,
|
| 365 |
+
0.696784,
|
| 366 |
+
-6.719682,
|
| 367 |
+
-4.788645,
|
| 368 |
+
-1.745401,
|
| 369 |
+
0.067305,
|
| 370 |
+
0.730105,
|
| 371 |
+
-1.193824,
|
| 372 |
+
-1.306795,
|
| 373 |
+
5.737747,
|
| 374 |
+
0.442739,
|
| 375 |
+
0.572826,
|
| 376 |
+
-0.729766,
|
| 377 |
+
-1.593712,
|
| 378 |
+
5.833208,
|
| 379 |
+
0.457098,
|
| 380 |
+
0.584792,
|
| 381 |
+
-2.456206,
|
| 382 |
+
-4.342621,
|
| 383 |
+
4.283884,
|
| 384 |
+
0.381974,
|
| 385 |
+
0.694711,
|
| 386 |
+
-2.204823,
|
| 387 |
+
-4.304508,
|
| 388 |
+
4.162499,
|
| 389 |
+
0.392389,
|
| 390 |
+
0.694203,
|
| 391 |
+
-4.985894,
|
| 392 |
+
4.802461,
|
| 393 |
+
3.751977,
|
| 394 |
+
0.277076,
|
| 395 |
+
0.271932,
|
| 396 |
+
-1.592294,
|
| 397 |
+
-1.257709,
|
| 398 |
+
5.456949,
|
| 399 |
+
0.422552,
|
| 400 |
+
0.563233,
|
| 401 |
+
-2.644548,
|
| 402 |
+
4.524654,
|
| 403 |
+
4.921559,
|
| 404 |
+
0.385919,
|
| 405 |
+
0.281364,
|
| 406 |
+
-2.760292,
|
| 407 |
+
5.100971,
|
| 408 |
+
5.015990,
|
| 409 |
+
0.383103,
|
| 410 |
+
0.255840,
|
| 411 |
+
-3.523964,
|
| 412 |
+
8.005976,
|
| 413 |
+
3.729163,
|
| 414 |
+
0.331431,
|
| 415 |
+
0.119714,
|
| 416 |
+
-5.599763,
|
| 417 |
+
5.715470,
|
| 418 |
+
2.724259,
|
| 419 |
+
0.229924,
|
| 420 |
+
0.232003,
|
| 421 |
+
-3.063932,
|
| 422 |
+
6.566144,
|
| 423 |
+
4.529981,
|
| 424 |
+
0.364501,
|
| 425 |
+
0.189114,
|
| 426 |
+
-5.720968,
|
| 427 |
+
4.254584,
|
| 428 |
+
2.830852,
|
| 429 |
+
0.229622,
|
| 430 |
+
0.299541,
|
| 431 |
+
-6.374393,
|
| 432 |
+
4.785590,
|
| 433 |
+
1.591691,
|
| 434 |
+
0.173287,
|
| 435 |
+
0.278748,
|
| 436 |
+
-0.672728,
|
| 437 |
+
-3.688016,
|
| 438 |
+
5.737804,
|
| 439 |
+
0.472879,
|
| 440 |
+
0.666198,
|
| 441 |
+
-1.262560,
|
| 442 |
+
-3.787691,
|
| 443 |
+
5.417779,
|
| 444 |
+
0.446828,
|
| 445 |
+
0.668527,
|
| 446 |
+
-1.732553,
|
| 447 |
+
-3.952767,
|
| 448 |
+
5.000579,
|
| 449 |
+
0.422762,
|
| 450 |
+
0.673890,
|
| 451 |
+
-1.043625,
|
| 452 |
+
-1.464973,
|
| 453 |
+
5.662455,
|
| 454 |
+
0.445308,
|
| 455 |
+
0.580066,
|
| 456 |
+
-2.321234,
|
| 457 |
+
-4.329069,
|
| 458 |
+
4.258156,
|
| 459 |
+
0.388103,
|
| 460 |
+
0.693961,
|
| 461 |
+
-2.056846,
|
| 462 |
+
-4.477671,
|
| 463 |
+
4.520883,
|
| 464 |
+
0.403039,
|
| 465 |
+
0.706540,
|
| 466 |
+
-2.153084,
|
| 467 |
+
-4.276322,
|
| 468 |
+
4.038093,
|
| 469 |
+
0.403629,
|
| 470 |
+
0.693953,
|
| 471 |
+
-0.946874,
|
| 472 |
+
-1.035249,
|
| 473 |
+
6.512274,
|
| 474 |
+
0.460042,
|
| 475 |
+
0.557139,
|
| 476 |
+
-1.469132,
|
| 477 |
+
-4.036351,
|
| 478 |
+
4.604908,
|
| 479 |
+
0.431158,
|
| 480 |
+
0.692366,
|
| 481 |
+
-1.024340,
|
| 482 |
+
-3.989851,
|
| 483 |
+
4.926693,
|
| 484 |
+
0.452182,
|
| 485 |
+
0.692366,
|
| 486 |
+
-0.533422,
|
| 487 |
+
-3.993222,
|
| 488 |
+
5.138202,
|
| 489 |
+
0.475387,
|
| 490 |
+
0.692366,
|
| 491 |
+
-0.769720,
|
| 492 |
+
-6.095394,
|
| 493 |
+
4.985883,
|
| 494 |
+
0.465828,
|
| 495 |
+
0.779190,
|
| 496 |
+
-0.699606,
|
| 497 |
+
-5.291850,
|
| 498 |
+
5.448304,
|
| 499 |
+
0.472329,
|
| 500 |
+
0.736226,
|
| 501 |
+
-0.669687,
|
| 502 |
+
-4.949770,
|
| 503 |
+
5.509612,
|
| 504 |
+
0.473087,
|
| 505 |
+
0.717857,
|
| 506 |
+
-0.630947,
|
| 507 |
+
-4.695101,
|
| 508 |
+
5.449371,
|
| 509 |
+
0.473122,
|
| 510 |
+
0.704626,
|
| 511 |
+
-0.583218,
|
| 512 |
+
-4.517982,
|
| 513 |
+
5.339869,
|
| 514 |
+
0.473033,
|
| 515 |
+
0.695278,
|
| 516 |
+
-1.537170,
|
| 517 |
+
-4.423206,
|
| 518 |
+
4.745470,
|
| 519 |
+
0.427942,
|
| 520 |
+
0.695278,
|
| 521 |
+
-1.615600,
|
| 522 |
+
-4.475942,
|
| 523 |
+
4.813632,
|
| 524 |
+
0.426479,
|
| 525 |
+
0.703540,
|
| 526 |
+
-1.729053,
|
| 527 |
+
-4.618680,
|
| 528 |
+
4.854463,
|
| 529 |
+
0.423162,
|
| 530 |
+
0.711846,
|
| 531 |
+
-1.838624,
|
| 532 |
+
-4.828746,
|
| 533 |
+
4.823737,
|
| 534 |
+
0.418309,
|
| 535 |
+
0.720063,
|
| 536 |
+
-2.368250,
|
| 537 |
+
-3.106237,
|
| 538 |
+
4.868096,
|
| 539 |
+
0.390095,
|
| 540 |
+
0.639573,
|
| 541 |
+
-7.542244,
|
| 542 |
+
-1.049282,
|
| 543 |
+
-2.431321,
|
| 544 |
+
0.013954,
|
| 545 |
+
0.560034,
|
| 546 |
+
0.000000,
|
| 547 |
+
-1.724003,
|
| 548 |
+
6.601390,
|
| 549 |
+
0.499914,
|
| 550 |
+
0.580147,
|
| 551 |
+
-1.826614,
|
| 552 |
+
-4.399531,
|
| 553 |
+
4.399021,
|
| 554 |
+
0.413200,
|
| 555 |
+
0.695400,
|
| 556 |
+
-1.929558,
|
| 557 |
+
-4.411831,
|
| 558 |
+
4.497052,
|
| 559 |
+
0.409626,
|
| 560 |
+
0.701823,
|
| 561 |
+
-0.597442,
|
| 562 |
+
-2.013686,
|
| 563 |
+
5.866456,
|
| 564 |
+
0.468080,
|
| 565 |
+
0.601535,
|
| 566 |
+
-1.405627,
|
| 567 |
+
-1.714196,
|
| 568 |
+
5.241087,
|
| 569 |
+
0.422729,
|
| 570 |
+
0.585985,
|
| 571 |
+
-0.662449,
|
| 572 |
+
-1.819321,
|
| 573 |
+
5.863759,
|
| 574 |
+
0.463080,
|
| 575 |
+
0.593784,
|
| 576 |
+
-2.342340,
|
| 577 |
+
0.572222,
|
| 578 |
+
4.294303,
|
| 579 |
+
0.372120,
|
| 580 |
+
0.473414,
|
| 581 |
+
-3.327324,
|
| 582 |
+
0.104863,
|
| 583 |
+
4.113860,
|
| 584 |
+
0.334562,
|
| 585 |
+
0.496073,
|
| 586 |
+
-1.726175,
|
| 587 |
+
-0.919165,
|
| 588 |
+
5.273355,
|
| 589 |
+
0.411671,
|
| 590 |
+
0.546965,
|
| 591 |
+
-5.133204,
|
| 592 |
+
7.485602,
|
| 593 |
+
2.660442,
|
| 594 |
+
0.242176,
|
| 595 |
+
0.147676,
|
| 596 |
+
-4.538641,
|
| 597 |
+
6.319907,
|
| 598 |
+
3.683424,
|
| 599 |
+
0.290777,
|
| 600 |
+
0.201446,
|
| 601 |
+
-3.986562,
|
| 602 |
+
5.109487,
|
| 603 |
+
4.466315,
|
| 604 |
+
0.327338,
|
| 605 |
+
0.256527,
|
| 606 |
+
-2.169681,
|
| 607 |
+
-5.440433,
|
| 608 |
+
4.455874,
|
| 609 |
+
0.399510,
|
| 610 |
+
0.748921,
|
| 611 |
+
-1.395634,
|
| 612 |
+
5.011963,
|
| 613 |
+
5.316032,
|
| 614 |
+
0.441728,
|
| 615 |
+
0.261676,
|
| 616 |
+
-1.619500,
|
| 617 |
+
6.599217,
|
| 618 |
+
4.921106,
|
| 619 |
+
0.429765,
|
| 620 |
+
0.187834,
|
| 621 |
+
-1.891399,
|
| 622 |
+
8.236377,
|
| 623 |
+
4.274997,
|
| 624 |
+
0.412198,
|
| 625 |
+
0.108901,
|
| 626 |
+
-4.195832,
|
| 627 |
+
2.235205,
|
| 628 |
+
3.375099,
|
| 629 |
+
0.288955,
|
| 630 |
+
0.398952,
|
| 631 |
+
-5.733342,
|
| 632 |
+
1.411738,
|
| 633 |
+
2.431726,
|
| 634 |
+
0.218937,
|
| 635 |
+
0.435411,
|
| 636 |
+
-1.859887,
|
| 637 |
+
2.355757,
|
| 638 |
+
3.843181,
|
| 639 |
+
0.412782,
|
| 640 |
+
0.398970,
|
| 641 |
+
-4.988612,
|
| 642 |
+
3.074654,
|
| 643 |
+
3.083858,
|
| 644 |
+
0.257135,
|
| 645 |
+
0.355440,
|
| 646 |
+
-1.303263,
|
| 647 |
+
1.416453,
|
| 648 |
+
4.831091,
|
| 649 |
+
0.427685,
|
| 650 |
+
0.437961,
|
| 651 |
+
-1.305757,
|
| 652 |
+
-0.672779,
|
| 653 |
+
6.415959,
|
| 654 |
+
0.448340,
|
| 655 |
+
0.536936,
|
| 656 |
+
-6.465170,
|
| 657 |
+
0.937119,
|
| 658 |
+
1.689873,
|
| 659 |
+
0.178560,
|
| 660 |
+
0.457554,
|
| 661 |
+
-5.258659,
|
| 662 |
+
0.945811,
|
| 663 |
+
2.974312,
|
| 664 |
+
0.247308,
|
| 665 |
+
0.457194,
|
| 666 |
+
-4.432338,
|
| 667 |
+
0.722096,
|
| 668 |
+
3.522615,
|
| 669 |
+
0.286267,
|
| 670 |
+
0.467675,
|
| 671 |
+
-3.300681,
|
| 672 |
+
0.861641,
|
| 673 |
+
3.872784,
|
| 674 |
+
0.332828,
|
| 675 |
+
0.460712,
|
| 676 |
+
-2.430178,
|
| 677 |
+
1.131492,
|
| 678 |
+
4.039035,
|
| 679 |
+
0.368756,
|
| 680 |
+
0.447207,
|
| 681 |
+
-1.820731,
|
| 682 |
+
1.467954,
|
| 683 |
+
4.224124,
|
| 684 |
+
0.398964,
|
| 685 |
+
0.432655,
|
| 686 |
+
-0.563221,
|
| 687 |
+
2.307693,
|
| 688 |
+
5.566789,
|
| 689 |
+
0.476410,
|
| 690 |
+
0.405806,
|
| 691 |
+
-6.338145,
|
| 692 |
+
-0.529279,
|
| 693 |
+
1.881175,
|
| 694 |
+
0.189241,
|
| 695 |
+
0.523924,
|
| 696 |
+
-5.587698,
|
| 697 |
+
3.208071,
|
| 698 |
+
2.687839,
|
| 699 |
+
0.228962,
|
| 700 |
+
0.348951,
|
| 701 |
+
-0.242624,
|
| 702 |
+
-1.462857,
|
| 703 |
+
7.071491,
|
| 704 |
+
0.490726,
|
| 705 |
+
0.562401,
|
| 706 |
+
-1.611251,
|
| 707 |
+
0.339326,
|
| 708 |
+
4.895421,
|
| 709 |
+
0.404670,
|
| 710 |
+
0.485133,
|
| 711 |
+
-7.743095,
|
| 712 |
+
2.364999,
|
| 713 |
+
-2.005167,
|
| 714 |
+
0.019469,
|
| 715 |
+
0.401564,
|
| 716 |
+
-1.391142,
|
| 717 |
+
1.851048,
|
| 718 |
+
4.448999,
|
| 719 |
+
0.426243,
|
| 720 |
+
0.420431,
|
| 721 |
+
-1.785794,
|
| 722 |
+
-0.978284,
|
| 723 |
+
4.850470,
|
| 724 |
+
0.396993,
|
| 725 |
+
0.548797,
|
| 726 |
+
-4.670959,
|
| 727 |
+
2.664461,
|
| 728 |
+
3.084075,
|
| 729 |
+
0.266470,
|
| 730 |
+
0.376977,
|
| 731 |
+
-1.333970,
|
| 732 |
+
-0.283761,
|
| 733 |
+
6.097047,
|
| 734 |
+
0.439121,
|
| 735 |
+
0.518958,
|
| 736 |
+
-7.270895,
|
| 737 |
+
-2.890917,
|
| 738 |
+
-2.252455,
|
| 739 |
+
0.032314,
|
| 740 |
+
0.644357,
|
| 741 |
+
-1.856432,
|
| 742 |
+
2.585245,
|
| 743 |
+
3.757904,
|
| 744 |
+
0.419054,
|
| 745 |
+
0.387155,
|
| 746 |
+
-0.923388,
|
| 747 |
+
0.073076,
|
| 748 |
+
6.671944,
|
| 749 |
+
0.462783,
|
| 750 |
+
0.505747,
|
| 751 |
+
-5.000589,
|
| 752 |
+
-6.135128,
|
| 753 |
+
1.892523,
|
| 754 |
+
0.238979,
|
| 755 |
+
0.779745,
|
| 756 |
+
-5.085276,
|
| 757 |
+
-7.178590,
|
| 758 |
+
0.714711,
|
| 759 |
+
0.198221,
|
| 760 |
+
0.831938,
|
| 761 |
+
-7.159291,
|
| 762 |
+
-0.811820,
|
| 763 |
+
-0.072044,
|
| 764 |
+
0.107550,
|
| 765 |
+
0.540755,
|
| 766 |
+
-5.843051,
|
| 767 |
+
-5.248023,
|
| 768 |
+
0.924091,
|
| 769 |
+
0.183610,
|
| 770 |
+
0.740257,
|
| 771 |
+
-6.847258,
|
| 772 |
+
3.662916,
|
| 773 |
+
0.724695,
|
| 774 |
+
0.134410,
|
| 775 |
+
0.333683,
|
| 776 |
+
-2.412942,
|
| 777 |
+
-8.258853,
|
| 778 |
+
4.119213,
|
| 779 |
+
0.385764,
|
| 780 |
+
0.883154,
|
| 781 |
+
-0.179909,
|
| 782 |
+
-1.689864,
|
| 783 |
+
6.573301,
|
| 784 |
+
0.490967,
|
| 785 |
+
0.579378,
|
| 786 |
+
-2.103655,
|
| 787 |
+
-0.163946,
|
| 788 |
+
4.566119,
|
| 789 |
+
0.382385,
|
| 790 |
+
0.508573,
|
| 791 |
+
-6.407571,
|
| 792 |
+
2.236021,
|
| 793 |
+
1.560843,
|
| 794 |
+
0.174399,
|
| 795 |
+
0.397671,
|
| 796 |
+
-3.670075,
|
| 797 |
+
2.360153,
|
| 798 |
+
3.635230,
|
| 799 |
+
0.318785,
|
| 800 |
+
0.396235,
|
| 801 |
+
-3.177186,
|
| 802 |
+
2.294265,
|
| 803 |
+
3.775704,
|
| 804 |
+
0.343364,
|
| 805 |
+
0.400597,
|
| 806 |
+
-2.196121,
|
| 807 |
+
-4.598322,
|
| 808 |
+
4.479786,
|
| 809 |
+
0.396100,
|
| 810 |
+
0.710217,
|
| 811 |
+
-6.234883,
|
| 812 |
+
-1.944430,
|
| 813 |
+
1.663542,
|
| 814 |
+
0.187885,
|
| 815 |
+
0.588538,
|
| 816 |
+
-1.292924,
|
| 817 |
+
-9.295920,
|
| 818 |
+
4.094063,
|
| 819 |
+
0.430987,
|
| 820 |
+
0.944065,
|
| 821 |
+
-3.210651,
|
| 822 |
+
-8.533278,
|
| 823 |
+
2.802001,
|
| 824 |
+
0.318993,
|
| 825 |
+
0.898285,
|
| 826 |
+
-4.068926,
|
| 827 |
+
-7.993109,
|
| 828 |
+
1.925119,
|
| 829 |
+
0.266248,
|
| 830 |
+
0.869701,
|
| 831 |
+
0.000000,
|
| 832 |
+
6.545390,
|
| 833 |
+
5.027311,
|
| 834 |
+
0.500023,
|
| 835 |
+
0.190576,
|
| 836 |
+
0.000000,
|
| 837 |
+
-9.403378,
|
| 838 |
+
4.264492,
|
| 839 |
+
0.499977,
|
| 840 |
+
0.954453,
|
| 841 |
+
-2.724032,
|
| 842 |
+
2.315802,
|
| 843 |
+
3.777151,
|
| 844 |
+
0.366170,
|
| 845 |
+
0.398822,
|
| 846 |
+
-2.288460,
|
| 847 |
+
2.398891,
|
| 848 |
+
3.697603,
|
| 849 |
+
0.393207,
|
| 850 |
+
0.395537,
|
| 851 |
+
-1.998311,
|
| 852 |
+
2.496547,
|
| 853 |
+
3.689148,
|
| 854 |
+
0.410373,
|
| 855 |
+
0.391080,
|
| 856 |
+
-6.130040,
|
| 857 |
+
3.399261,
|
| 858 |
+
2.038516,
|
| 859 |
+
0.194993,
|
| 860 |
+
0.342102,
|
| 861 |
+
-2.288460,
|
| 862 |
+
2.886504,
|
| 863 |
+
3.775031,
|
| 864 |
+
0.388665,
|
| 865 |
+
0.362284,
|
| 866 |
+
-2.724032,
|
| 867 |
+
2.961810,
|
| 868 |
+
3.871767,
|
| 869 |
+
0.365962,
|
| 870 |
+
0.355971,
|
| 871 |
+
-3.177186,
|
| 872 |
+
2.964136,
|
| 873 |
+
3.876973,
|
| 874 |
+
0.343364,
|
| 875 |
+
0.355357,
|
| 876 |
+
-3.670075,
|
| 877 |
+
2.927714,
|
| 878 |
+
3.724325,
|
| 879 |
+
0.318785,
|
| 880 |
+
0.358340,
|
| 881 |
+
-4.018389,
|
| 882 |
+
2.857357,
|
| 883 |
+
3.482983,
|
| 884 |
+
0.301415,
|
| 885 |
+
0.363156,
|
| 886 |
+
-7.555811,
|
| 887 |
+
4.106811,
|
| 888 |
+
-0.991917,
|
| 889 |
+
0.058133,
|
| 890 |
+
0.319076,
|
| 891 |
+
-4.018389,
|
| 892 |
+
2.483695,
|
| 893 |
+
3.440898,
|
| 894 |
+
0.301415,
|
| 895 |
+
0.387449,
|
| 896 |
+
0.000000,
|
| 897 |
+
-2.521945,
|
| 898 |
+
5.932265,
|
| 899 |
+
0.499988,
|
| 900 |
+
0.618434,
|
| 901 |
+
-1.776217,
|
| 902 |
+
-2.683946,
|
| 903 |
+
5.213116,
|
| 904 |
+
0.415838,
|
| 905 |
+
0.624196,
|
| 906 |
+
-1.222237,
|
| 907 |
+
-1.182444,
|
| 908 |
+
5.952465,
|
| 909 |
+
0.445682,
|
| 910 |
+
0.566077,
|
| 911 |
+
-0.731493,
|
| 912 |
+
-2.536683,
|
| 913 |
+
5.815343,
|
| 914 |
+
0.465844,
|
| 915 |
+
0.620641,
|
| 916 |
+
0.000000,
|
| 917 |
+
3.271027,
|
| 918 |
+
5.236015,
|
| 919 |
+
0.499923,
|
| 920 |
+
0.351524,
|
| 921 |
+
-4.135272,
|
| 922 |
+
-6.996638,
|
| 923 |
+
2.671970,
|
| 924 |
+
0.288719,
|
| 925 |
+
0.819946,
|
| 926 |
+
-3.311811,
|
| 927 |
+
-7.660815,
|
| 928 |
+
3.382963,
|
| 929 |
+
0.335279,
|
| 930 |
+
0.852820,
|
| 931 |
+
-1.313701,
|
| 932 |
+
-8.639995,
|
| 933 |
+
4.702456,
|
| 934 |
+
0.440512,
|
| 935 |
+
0.902419,
|
| 936 |
+
-5.940524,
|
| 937 |
+
-6.223629,
|
| 938 |
+
-0.631468,
|
| 939 |
+
0.128294,
|
| 940 |
+
0.791941,
|
| 941 |
+
-1.998311,
|
| 942 |
+
2.743838,
|
| 943 |
+
3.744030,
|
| 944 |
+
0.408772,
|
| 945 |
+
0.373894,
|
| 946 |
+
-0.901447,
|
| 947 |
+
1.236992,
|
| 948 |
+
5.754256,
|
| 949 |
+
0.455607,
|
| 950 |
+
0.451801,
|
| 951 |
+
0.000000,
|
| 952 |
+
-8.765243,
|
| 953 |
+
4.891441,
|
| 954 |
+
0.499877,
|
| 955 |
+
0.908990,
|
| 956 |
+
-2.308977,
|
| 957 |
+
-8.974196,
|
| 958 |
+
3.609070,
|
| 959 |
+
0.375437,
|
| 960 |
+
0.924192,
|
| 961 |
+
-6.954154,
|
| 962 |
+
-2.439843,
|
| 963 |
+
-0.131163,
|
| 964 |
+
0.114210,
|
| 965 |
+
0.615022,
|
| 966 |
+
-1.098819,
|
| 967 |
+
-4.458788,
|
| 968 |
+
5.120727,
|
| 969 |
+
0.448662,
|
| 970 |
+
0.695278,
|
| 971 |
+
-1.181124,
|
| 972 |
+
-4.579996,
|
| 973 |
+
5.189564,
|
| 974 |
+
0.448020,
|
| 975 |
+
0.704632,
|
| 976 |
+
-1.255818,
|
| 977 |
+
-4.787901,
|
| 978 |
+
5.237051,
|
| 979 |
+
0.447112,
|
| 980 |
+
0.715808,
|
| 981 |
+
-1.325085,
|
| 982 |
+
-5.106507,
|
| 983 |
+
5.205010,
|
| 984 |
+
0.444832,
|
| 985 |
+
0.730794,
|
| 986 |
+
-1.546388,
|
| 987 |
+
-5.819392,
|
| 988 |
+
4.757893,
|
| 989 |
+
0.430012,
|
| 990 |
+
0.766809,
|
| 991 |
+
-1.953754,
|
| 992 |
+
-4.183892,
|
| 993 |
+
4.431713,
|
| 994 |
+
0.406787,
|
| 995 |
+
0.685673,
|
| 996 |
+
-2.117802,
|
| 997 |
+
-4.137093,
|
| 998 |
+
4.555096,
|
| 999 |
+
0.400738,
|
| 1000 |
+
0.681069,
|
| 1001 |
+
-2.285339,
|
| 1002 |
+
-4.051196,
|
| 1003 |
+
4.582438,
|
| 1004 |
+
0.392400,
|
| 1005 |
+
0.677703,
|
| 1006 |
+
-2.850160,
|
| 1007 |
+
-3.665720,
|
| 1008 |
+
4.484994,
|
| 1009 |
+
0.367856,
|
| 1010 |
+
0.663919,
|
| 1011 |
+
-5.278538,
|
| 1012 |
+
-2.238942,
|
| 1013 |
+
2.861224,
|
| 1014 |
+
0.247923,
|
| 1015 |
+
0.601333,
|
| 1016 |
+
-0.946709,
|
| 1017 |
+
1.907628,
|
| 1018 |
+
5.196779,
|
| 1019 |
+
0.452770,
|
| 1020 |
+
0.420850,
|
| 1021 |
+
-1.314173,
|
| 1022 |
+
3.104912,
|
| 1023 |
+
4.231404,
|
| 1024 |
+
0.436392,
|
| 1025 |
+
0.359887,
|
| 1026 |
+
-1.780000,
|
| 1027 |
+
2.860000,
|
| 1028 |
+
3.881555,
|
| 1029 |
+
0.416164,
|
| 1030 |
+
0.368714,
|
| 1031 |
+
-1.845110,
|
| 1032 |
+
-4.098880,
|
| 1033 |
+
4.247264,
|
| 1034 |
+
0.413386,
|
| 1035 |
+
0.692366,
|
| 1036 |
+
-5.436187,
|
| 1037 |
+
-4.030482,
|
| 1038 |
+
2.109852,
|
| 1039 |
+
0.228018,
|
| 1040 |
+
0.683572,
|
| 1041 |
+
-0.766444,
|
| 1042 |
+
3.182131,
|
| 1043 |
+
4.861453,
|
| 1044 |
+
0.468268,
|
| 1045 |
+
0.352671,
|
| 1046 |
+
-1.938616,
|
| 1047 |
+
-6.614410,
|
| 1048 |
+
4.521085,
|
| 1049 |
+
0.411362,
|
| 1050 |
+
0.804327,
|
| 1051 |
+
0.000000,
|
| 1052 |
+
1.059413,
|
| 1053 |
+
6.774605,
|
| 1054 |
+
0.499989,
|
| 1055 |
+
0.469825,
|
| 1056 |
+
-0.516573,
|
| 1057 |
+
1.583572,
|
| 1058 |
+
6.148363,
|
| 1059 |
+
0.479154,
|
| 1060 |
+
0.442654,
|
| 1061 |
+
0.000000,
|
| 1062 |
+
1.728369,
|
| 1063 |
+
6.316750,
|
| 1064 |
+
0.499974,
|
| 1065 |
+
0.439637,
|
| 1066 |
+
-1.246815,
|
| 1067 |
+
0.230297,
|
| 1068 |
+
5.681036,
|
| 1069 |
+
0.432112,
|
| 1070 |
+
0.493589,
|
| 1071 |
+
0.000000,
|
| 1072 |
+
-7.942194,
|
| 1073 |
+
5.181173,
|
| 1074 |
+
0.499886,
|
| 1075 |
+
0.866917,
|
| 1076 |
+
0.000000,
|
| 1077 |
+
-6.991499,
|
| 1078 |
+
5.153478,
|
| 1079 |
+
0.499913,
|
| 1080 |
+
0.821729,
|
| 1081 |
+
-0.997827,
|
| 1082 |
+
-6.930921,
|
| 1083 |
+
4.979576,
|
| 1084 |
+
0.456549,
|
| 1085 |
+
0.819201,
|
| 1086 |
+
-3.288807,
|
| 1087 |
+
-5.382514,
|
| 1088 |
+
3.795752,
|
| 1089 |
+
0.344549,
|
| 1090 |
+
0.745439,
|
| 1091 |
+
-2.311631,
|
| 1092 |
+
-1.566237,
|
| 1093 |
+
4.590085,
|
| 1094 |
+
0.378909,
|
| 1095 |
+
0.574010,
|
| 1096 |
+
-2.680250,
|
| 1097 |
+
-6.111567,
|
| 1098 |
+
4.096152,
|
| 1099 |
+
0.374293,
|
| 1100 |
+
0.780185,
|
| 1101 |
+
-3.832928,
|
| 1102 |
+
-1.537326,
|
| 1103 |
+
4.137731,
|
| 1104 |
+
0.319688,
|
| 1105 |
+
0.570738,
|
| 1106 |
+
-2.961860,
|
| 1107 |
+
-2.274215,
|
| 1108 |
+
4.440943,
|
| 1109 |
+
0.357155,
|
| 1110 |
+
0.604270,
|
| 1111 |
+
-4.386901,
|
| 1112 |
+
-2.683286,
|
| 1113 |
+
3.643886,
|
| 1114 |
+
0.295284,
|
| 1115 |
+
0.621581,
|
| 1116 |
+
-1.217295,
|
| 1117 |
+
-7.834465,
|
| 1118 |
+
4.969286,
|
| 1119 |
+
0.447750,
|
| 1120 |
+
0.862477,
|
| 1121 |
+
-1.542374,
|
| 1122 |
+
-0.136843,
|
| 1123 |
+
5.201008,
|
| 1124 |
+
0.410986,
|
| 1125 |
+
0.508723,
|
| 1126 |
+
-3.878377,
|
| 1127 |
+
-6.041764,
|
| 1128 |
+
3.311079,
|
| 1129 |
+
0.313951,
|
| 1130 |
+
0.775308,
|
| 1131 |
+
-3.084037,
|
| 1132 |
+
-6.809842,
|
| 1133 |
+
3.814195,
|
| 1134 |
+
0.354128,
|
| 1135 |
+
0.812553,
|
| 1136 |
+
-3.747321,
|
| 1137 |
+
-4.503545,
|
| 1138 |
+
3.726453,
|
| 1139 |
+
0.324548,
|
| 1140 |
+
0.703993,
|
| 1141 |
+
-6.094129,
|
| 1142 |
+
-3.205991,
|
| 1143 |
+
1.473482,
|
| 1144 |
+
0.189096,
|
| 1145 |
+
0.646300,
|
| 1146 |
+
-4.588995,
|
| 1147 |
+
-4.728726,
|
| 1148 |
+
2.983221,
|
| 1149 |
+
0.279777,
|
| 1150 |
+
0.714658,
|
| 1151 |
+
-6.583231,
|
| 1152 |
+
-3.941269,
|
| 1153 |
+
0.070268,
|
| 1154 |
+
0.133823,
|
| 1155 |
+
0.682701,
|
| 1156 |
+
-3.492580,
|
| 1157 |
+
-3.195820,
|
| 1158 |
+
4.130198,
|
| 1159 |
+
0.336768,
|
| 1160 |
+
0.644733,
|
| 1161 |
+
-1.255543,
|
| 1162 |
+
0.802341,
|
| 1163 |
+
5.307551,
|
| 1164 |
+
0.429884,
|
| 1165 |
+
0.466522,
|
| 1166 |
+
-1.126122,
|
| 1167 |
+
-0.933602,
|
| 1168 |
+
6.538785,
|
| 1169 |
+
0.455528,
|
| 1170 |
+
0.548623,
|
| 1171 |
+
-1.443109,
|
| 1172 |
+
-1.142774,
|
| 1173 |
+
5.905127,
|
| 1174 |
+
0.437114,
|
| 1175 |
+
0.558896,
|
| 1176 |
+
-0.923043,
|
| 1177 |
+
-0.529042,
|
| 1178 |
+
7.003423,
|
| 1179 |
+
0.467288,
|
| 1180 |
+
0.529925,
|
| 1181 |
+
-1.755386,
|
| 1182 |
+
3.529117,
|
| 1183 |
+
4.327696,
|
| 1184 |
+
0.414712,
|
| 1185 |
+
0.335220,
|
| 1186 |
+
-2.632589,
|
| 1187 |
+
3.713828,
|
| 1188 |
+
4.364629,
|
| 1189 |
+
0.377046,
|
| 1190 |
+
0.322778,
|
| 1191 |
+
-3.388062,
|
| 1192 |
+
3.721976,
|
| 1193 |
+
4.309028,
|
| 1194 |
+
0.344108,
|
| 1195 |
+
0.320151,
|
| 1196 |
+
-4.075766,
|
| 1197 |
+
3.675413,
|
| 1198 |
+
4.076063,
|
| 1199 |
+
0.312876,
|
| 1200 |
+
0.322332,
|
| 1201 |
+
-4.622910,
|
| 1202 |
+
3.474691,
|
| 1203 |
+
3.646321,
|
| 1204 |
+
0.283526,
|
| 1205 |
+
0.333190,
|
| 1206 |
+
-5.171755,
|
| 1207 |
+
2.535753,
|
| 1208 |
+
2.670867,
|
| 1209 |
+
0.241246,
|
| 1210 |
+
0.382786,
|
| 1211 |
+
-7.297331,
|
| 1212 |
+
0.763172,
|
| 1213 |
+
-0.048769,
|
| 1214 |
+
0.102986,
|
| 1215 |
+
0.468763,
|
| 1216 |
+
-4.706828,
|
| 1217 |
+
1.651000,
|
| 1218 |
+
3.109532,
|
| 1219 |
+
0.267612,
|
| 1220 |
+
0.424560,
|
| 1221 |
+
-4.071712,
|
| 1222 |
+
1.476821,
|
| 1223 |
+
3.476944,
|
| 1224 |
+
0.297879,
|
| 1225 |
+
0.433176,
|
| 1226 |
+
-3.269817,
|
| 1227 |
+
1.470659,
|
| 1228 |
+
3.731945,
|
| 1229 |
+
0.333434,
|
| 1230 |
+
0.433878,
|
| 1231 |
+
-2.527572,
|
| 1232 |
+
1.617311,
|
| 1233 |
+
3.865444,
|
| 1234 |
+
0.366427,
|
| 1235 |
+
0.426116,
|
| 1236 |
+
-1.970894,
|
| 1237 |
+
1.858505,
|
| 1238 |
+
3.961782,
|
| 1239 |
+
0.396012,
|
| 1240 |
+
0.416696,
|
| 1241 |
+
-1.579543,
|
| 1242 |
+
2.097941,
|
| 1243 |
+
4.084996,
|
| 1244 |
+
0.420121,
|
| 1245 |
+
0.410228,
|
| 1246 |
+
-7.664182,
|
| 1247 |
+
0.673132,
|
| 1248 |
+
-2.435867,
|
| 1249 |
+
0.007561,
|
| 1250 |
+
0.480777,
|
| 1251 |
+
-1.397041,
|
| 1252 |
+
-1.340139,
|
| 1253 |
+
5.630378,
|
| 1254 |
+
0.432949,
|
| 1255 |
+
0.569518,
|
| 1256 |
+
-0.884838,
|
| 1257 |
+
0.658740,
|
| 1258 |
+
6.233232,
|
| 1259 |
+
0.458639,
|
| 1260 |
+
0.479089,
|
| 1261 |
+
-0.767097,
|
| 1262 |
+
-0.968035,
|
| 1263 |
+
7.077932,
|
| 1264 |
+
0.473466,
|
| 1265 |
+
0.545744,
|
| 1266 |
+
-0.460213,
|
| 1267 |
+
-1.334106,
|
| 1268 |
+
6.787447,
|
| 1269 |
+
0.476088,
|
| 1270 |
+
0.563830,
|
| 1271 |
+
-0.748618,
|
| 1272 |
+
-1.067994,
|
| 1273 |
+
6.798303,
|
| 1274 |
+
0.468472,
|
| 1275 |
+
0.555057,
|
| 1276 |
+
-1.236408,
|
| 1277 |
+
-1.585568,
|
| 1278 |
+
5.480490,
|
| 1279 |
+
0.433991,
|
| 1280 |
+
0.582362,
|
| 1281 |
+
-0.387306,
|
| 1282 |
+
-1.409990,
|
| 1283 |
+
6.957705,
|
| 1284 |
+
0.483518,
|
| 1285 |
+
0.562984,
|
| 1286 |
+
-0.319925,
|
| 1287 |
+
-1.607931,
|
| 1288 |
+
6.508676,
|
| 1289 |
+
0.482483,
|
| 1290 |
+
0.577849,
|
| 1291 |
+
-1.639633,
|
| 1292 |
+
2.556298,
|
| 1293 |
+
3.863736,
|
| 1294 |
+
0.426450,
|
| 1295 |
+
0.389799,
|
| 1296 |
+
-1.255645,
|
| 1297 |
+
2.467144,
|
| 1298 |
+
4.203800,
|
| 1299 |
+
0.438999,
|
| 1300 |
+
0.396495,
|
| 1301 |
+
-1.031362,
|
| 1302 |
+
2.382663,
|
| 1303 |
+
4.615849,
|
| 1304 |
+
0.450067,
|
| 1305 |
+
0.400434,
|
| 1306 |
+
-4.253081,
|
| 1307 |
+
2.772296,
|
| 1308 |
+
3.315305,
|
| 1309 |
+
0.289712,
|
| 1310 |
+
0.368253,
|
| 1311 |
+
-4.530000,
|
| 1312 |
+
2.910000,
|
| 1313 |
+
3.339685,
|
| 1314 |
+
0.276670,
|
| 1315 |
+
0.363373,
|
| 1316 |
+
0.463928,
|
| 1317 |
+
0.955357,
|
| 1318 |
+
6.633583,
|
| 1319 |
+
0.517862,
|
| 1320 |
+
0.471948,
|
| 1321 |
+
4.253081,
|
| 1322 |
+
2.577646,
|
| 1323 |
+
3.279702,
|
| 1324 |
+
0.710288,
|
| 1325 |
+
0.380764,
|
| 1326 |
+
0.416106,
|
| 1327 |
+
-1.466449,
|
| 1328 |
+
6.447657,
|
| 1329 |
+
0.526227,
|
| 1330 |
+
0.573910,
|
| 1331 |
+
7.087960,
|
| 1332 |
+
5.434801,
|
| 1333 |
+
0.099620,
|
| 1334 |
+
0.895093,
|
| 1335 |
+
0.254141,
|
| 1336 |
+
2.628639,
|
| 1337 |
+
2.035898,
|
| 1338 |
+
3.848121,
|
| 1339 |
+
0.634070,
|
| 1340 |
+
0.409576,
|
| 1341 |
+
3.198363,
|
| 1342 |
+
1.985815,
|
| 1343 |
+
3.796952,
|
| 1344 |
+
0.661242,
|
| 1345 |
+
0.413025,
|
| 1346 |
+
3.775151,
|
| 1347 |
+
2.039402,
|
| 1348 |
+
3.646194,
|
| 1349 |
+
0.688880,
|
| 1350 |
+
0.409460,
|
| 1351 |
+
4.465819,
|
| 1352 |
+
2.422950,
|
| 1353 |
+
3.155168,
|
| 1354 |
+
0.725342,
|
| 1355 |
+
0.389131,
|
| 1356 |
+
2.164289,
|
| 1357 |
+
2.189867,
|
| 1358 |
+
3.851822,
|
| 1359 |
+
0.606630,
|
| 1360 |
+
0.403705,
|
| 1361 |
+
3.208229,
|
| 1362 |
+
3.223926,
|
| 1363 |
+
4.115822,
|
| 1364 |
+
0.654766,
|
| 1365 |
+
0.344011,
|
| 1366 |
+
2.673803,
|
| 1367 |
+
3.205337,
|
| 1368 |
+
4.092203,
|
| 1369 |
+
0.629906,
|
| 1370 |
+
0.346076,
|
| 1371 |
+
3.745193,
|
| 1372 |
+
3.165286,
|
| 1373 |
+
3.972409,
|
| 1374 |
+
0.680678,
|
| 1375 |
+
0.347265,
|
| 1376 |
+
4.161018,
|
| 1377 |
+
3.059069,
|
| 1378 |
+
3.719554,
|
| 1379 |
+
0.702097,
|
| 1380 |
+
0.353591,
|
| 1381 |
+
5.062006,
|
| 1382 |
+
1.934418,
|
| 1383 |
+
2.776093,
|
| 1384 |
+
0.752212,
|
| 1385 |
+
0.410805,
|
| 1386 |
+
2.266659,
|
| 1387 |
+
-7.425768,
|
| 1388 |
+
4.389812,
|
| 1389 |
+
0.602918,
|
| 1390 |
+
0.842863,
|
| 1391 |
+
4.445859,
|
| 1392 |
+
2.663991,
|
| 1393 |
+
3.173422,
|
| 1394 |
+
0.719902,
|
| 1395 |
+
0.375600,
|
| 1396 |
+
7.214530,
|
| 1397 |
+
2.263009,
|
| 1398 |
+
0.073150,
|
| 1399 |
+
0.893693,
|
| 1400 |
+
0.399960,
|
| 1401 |
+
5.799793,
|
| 1402 |
+
2.349546,
|
| 1403 |
+
2.204059,
|
| 1404 |
+
0.790082,
|
| 1405 |
+
0.391354,
|
| 1406 |
+
2.844939,
|
| 1407 |
+
-0.720868,
|
| 1408 |
+
4.433130,
|
| 1409 |
+
0.643998,
|
| 1410 |
+
0.534488,
|
| 1411 |
+
0.711452,
|
| 1412 |
+
-3.329355,
|
| 1413 |
+
5.877044,
|
| 1414 |
+
0.528249,
|
| 1415 |
+
0.650404,
|
| 1416 |
+
0.606033,
|
| 1417 |
+
-3.924562,
|
| 1418 |
+
5.444923,
|
| 1419 |
+
0.525850,
|
| 1420 |
+
0.680191,
|
| 1421 |
+
1.431615,
|
| 1422 |
+
-3.500953,
|
| 1423 |
+
5.496189,
|
| 1424 |
+
0.560215,
|
| 1425 |
+
0.657229,
|
| 1426 |
+
1.914910,
|
| 1427 |
+
-3.803146,
|
| 1428 |
+
5.028930,
|
| 1429 |
+
0.585384,
|
| 1430 |
+
0.666541,
|
| 1431 |
+
1.131043,
|
| 1432 |
+
-3.973937,
|
| 1433 |
+
5.189648,
|
| 1434 |
+
0.549626,
|
| 1435 |
+
0.680861,
|
| 1436 |
+
1.563548,
|
| 1437 |
+
-4.082763,
|
| 1438 |
+
4.842263,
|
| 1439 |
+
0.571228,
|
| 1440 |
+
0.682692,
|
| 1441 |
+
2.650112,
|
| 1442 |
+
-5.003649,
|
| 1443 |
+
4.188483,
|
| 1444 |
+
0.624852,
|
| 1445 |
+
0.728099,
|
| 1446 |
+
0.427049,
|
| 1447 |
+
-1.094134,
|
| 1448 |
+
7.360529,
|
| 1449 |
+
0.513050,
|
| 1450 |
+
0.547282,
|
| 1451 |
+
0.496396,
|
| 1452 |
+
-0.475659,
|
| 1453 |
+
7.440358,
|
| 1454 |
+
0.515097,
|
| 1455 |
+
0.527252,
|
| 1456 |
+
5.253307,
|
| 1457 |
+
3.881582,
|
| 1458 |
+
3.363159,
|
| 1459 |
+
0.742247,
|
| 1460 |
+
0.314507,
|
| 1461 |
+
1.718698,
|
| 1462 |
+
0.974609,
|
| 1463 |
+
4.558359,
|
| 1464 |
+
0.598631,
|
| 1465 |
+
0.454979,
|
| 1466 |
+
1.608635,
|
| 1467 |
+
-0.942516,
|
| 1468 |
+
5.814193,
|
| 1469 |
+
0.570338,
|
| 1470 |
+
0.548575,
|
| 1471 |
+
1.651267,
|
| 1472 |
+
-0.610868,
|
| 1473 |
+
5.581319,
|
| 1474 |
+
0.578632,
|
| 1475 |
+
0.533623,
|
| 1476 |
+
4.765501,
|
| 1477 |
+
-0.701554,
|
| 1478 |
+
3.534632,
|
| 1479 |
+
0.723087,
|
| 1480 |
+
0.532054,
|
| 1481 |
+
0.478306,
|
| 1482 |
+
0.295766,
|
| 1483 |
+
7.101013,
|
| 1484 |
+
0.516446,
|
| 1485 |
+
0.499639,
|
| 1486 |
+
3.734964,
|
| 1487 |
+
4.508230,
|
| 1488 |
+
4.550454,
|
| 1489 |
+
0.662801,
|
| 1490 |
+
0.282918,
|
| 1491 |
+
4.588603,
|
| 1492 |
+
4.302037,
|
| 1493 |
+
4.048484,
|
| 1494 |
+
0.703624,
|
| 1495 |
+
0.293271,
|
| 1496 |
+
6.279331,
|
| 1497 |
+
6.615427,
|
| 1498 |
+
1.425850,
|
| 1499 |
+
0.830705,
|
| 1500 |
+
0.193814,
|
| 1501 |
+
1.220941,
|
| 1502 |
+
4.142165,
|
| 1503 |
+
5.106035,
|
| 1504 |
+
0.552386,
|
| 1505 |
+
0.302568,
|
| 1506 |
+
2.193489,
|
| 1507 |
+
3.100317,
|
| 1508 |
+
4.000575,
|
| 1509 |
+
0.607610,
|
| 1510 |
+
0.353888,
|
| 1511 |
+
3.102642,
|
| 1512 |
+
-4.352984,
|
| 1513 |
+
4.095905,
|
| 1514 |
+
0.645429,
|
| 1515 |
+
0.696707,
|
| 1516 |
+
6.719682,
|
| 1517 |
+
-4.788645,
|
| 1518 |
+
-1.745401,
|
| 1519 |
+
0.932695,
|
| 1520 |
+
0.730105,
|
| 1521 |
+
1.193824,
|
| 1522 |
+
-1.306795,
|
| 1523 |
+
5.737747,
|
| 1524 |
+
0.557261,
|
| 1525 |
+
0.572826,
|
| 1526 |
+
0.729766,
|
| 1527 |
+
-1.593712,
|
| 1528 |
+
5.833208,
|
| 1529 |
+
0.542902,
|
| 1530 |
+
0.584792,
|
| 1531 |
+
2.456206,
|
| 1532 |
+
-4.342621,
|
| 1533 |
+
4.283884,
|
| 1534 |
+
0.618026,
|
| 1535 |
+
0.694711,
|
| 1536 |
+
2.204823,
|
| 1537 |
+
-4.304508,
|
| 1538 |
+
4.162499,
|
| 1539 |
+
0.607591,
|
| 1540 |
+
0.694203,
|
| 1541 |
+
4.985894,
|
| 1542 |
+
4.802461,
|
| 1543 |
+
3.751977,
|
| 1544 |
+
0.722943,
|
| 1545 |
+
0.271963,
|
| 1546 |
+
1.592294,
|
| 1547 |
+
-1.257709,
|
| 1548 |
+
5.456949,
|
| 1549 |
+
0.577414,
|
| 1550 |
+
0.563167,
|
| 1551 |
+
2.644548,
|
| 1552 |
+
4.524654,
|
| 1553 |
+
4.921559,
|
| 1554 |
+
0.614083,
|
| 1555 |
+
0.281387,
|
| 1556 |
+
2.760292,
|
| 1557 |
+
5.100971,
|
| 1558 |
+
5.015990,
|
| 1559 |
+
0.616907,
|
| 1560 |
+
0.255886,
|
| 1561 |
+
3.523964,
|
| 1562 |
+
8.005976,
|
| 1563 |
+
3.729163,
|
| 1564 |
+
0.668509,
|
| 1565 |
+
0.119914,
|
| 1566 |
+
5.599763,
|
| 1567 |
+
5.715470,
|
| 1568 |
+
2.724259,
|
| 1569 |
+
0.770092,
|
| 1570 |
+
0.232021,
|
| 1571 |
+
3.063932,
|
| 1572 |
+
6.566144,
|
| 1573 |
+
4.529981,
|
| 1574 |
+
0.635536,
|
| 1575 |
+
0.189249,
|
| 1576 |
+
5.720968,
|
| 1577 |
+
4.254584,
|
| 1578 |
+
2.830852,
|
| 1579 |
+
0.770391,
|
| 1580 |
+
0.299556,
|
| 1581 |
+
6.374393,
|
| 1582 |
+
4.785590,
|
| 1583 |
+
1.591691,
|
| 1584 |
+
0.826722,
|
| 1585 |
+
0.278755,
|
| 1586 |
+
0.672728,
|
| 1587 |
+
-3.688016,
|
| 1588 |
+
5.737804,
|
| 1589 |
+
0.527121,
|
| 1590 |
+
0.666198,
|
| 1591 |
+
1.262560,
|
| 1592 |
+
-3.787691,
|
| 1593 |
+
5.417779,
|
| 1594 |
+
0.553172,
|
| 1595 |
+
0.668527,
|
| 1596 |
+
1.732553,
|
| 1597 |
+
-3.952767,
|
| 1598 |
+
5.000579,
|
| 1599 |
+
0.577238,
|
| 1600 |
+
0.673890,
|
| 1601 |
+
1.043625,
|
| 1602 |
+
-1.464973,
|
| 1603 |
+
5.662455,
|
| 1604 |
+
0.554692,
|
| 1605 |
+
0.580066,
|
| 1606 |
+
2.321234,
|
| 1607 |
+
-4.329069,
|
| 1608 |
+
4.258156,
|
| 1609 |
+
0.611897,
|
| 1610 |
+
0.693961,
|
| 1611 |
+
2.056846,
|
| 1612 |
+
-4.477671,
|
| 1613 |
+
4.520883,
|
| 1614 |
+
0.596961,
|
| 1615 |
+
0.706540,
|
| 1616 |
+
2.153084,
|
| 1617 |
+
-4.276322,
|
| 1618 |
+
4.038093,
|
| 1619 |
+
0.596371,
|
| 1620 |
+
0.693953,
|
| 1621 |
+
0.946874,
|
| 1622 |
+
-1.035249,
|
| 1623 |
+
6.512274,
|
| 1624 |
+
0.539958,
|
| 1625 |
+
0.557139,
|
| 1626 |
+
1.469132,
|
| 1627 |
+
-4.036351,
|
| 1628 |
+
4.604908,
|
| 1629 |
+
0.568842,
|
| 1630 |
+
0.692366,
|
| 1631 |
+
1.024340,
|
| 1632 |
+
-3.989851,
|
| 1633 |
+
4.926693,
|
| 1634 |
+
0.547818,
|
| 1635 |
+
0.692366,
|
| 1636 |
+
0.533422,
|
| 1637 |
+
-3.993222,
|
| 1638 |
+
5.138202,
|
| 1639 |
+
0.524613,
|
| 1640 |
+
0.692366,
|
| 1641 |
+
0.769720,
|
| 1642 |
+
-6.095394,
|
| 1643 |
+
4.985883,
|
| 1644 |
+
0.534090,
|
| 1645 |
+
0.779141,
|
| 1646 |
+
0.699606,
|
| 1647 |
+
-5.291850,
|
| 1648 |
+
5.448304,
|
| 1649 |
+
0.527671,
|
| 1650 |
+
0.736226,
|
| 1651 |
+
0.669687,
|
| 1652 |
+
-4.949770,
|
| 1653 |
+
5.509612,
|
| 1654 |
+
0.526913,
|
| 1655 |
+
0.717857,
|
| 1656 |
+
0.630947,
|
| 1657 |
+
-4.695101,
|
| 1658 |
+
5.449371,
|
| 1659 |
+
0.526878,
|
| 1660 |
+
0.704626,
|
| 1661 |
+
0.583218,
|
| 1662 |
+
-4.517982,
|
| 1663 |
+
5.339869,
|
| 1664 |
+
0.526967,
|
| 1665 |
+
0.695278,
|
| 1666 |
+
1.537170,
|
| 1667 |
+
-4.423206,
|
| 1668 |
+
4.745470,
|
| 1669 |
+
0.572058,
|
| 1670 |
+
0.695278,
|
| 1671 |
+
1.615600,
|
| 1672 |
+
-4.475942,
|
| 1673 |
+
4.813632,
|
| 1674 |
+
0.573521,
|
| 1675 |
+
0.703540,
|
| 1676 |
+
1.729053,
|
| 1677 |
+
-4.618680,
|
| 1678 |
+
4.854463,
|
| 1679 |
+
0.576838,
|
| 1680 |
+
0.711846,
|
| 1681 |
+
1.838624,
|
| 1682 |
+
-4.828746,
|
| 1683 |
+
4.823737,
|
| 1684 |
+
0.581691,
|
| 1685 |
+
0.720063,
|
| 1686 |
+
2.368250,
|
| 1687 |
+
-3.106237,
|
| 1688 |
+
4.868096,
|
| 1689 |
+
0.609945,
|
| 1690 |
+
0.639910,
|
| 1691 |
+
7.542244,
|
| 1692 |
+
-1.049282,
|
| 1693 |
+
-2.431321,
|
| 1694 |
+
0.986046,
|
| 1695 |
+
0.560034,
|
| 1696 |
+
1.826614,
|
| 1697 |
+
-4.399531,
|
| 1698 |
+
4.399021,
|
| 1699 |
+
0.586800,
|
| 1700 |
+
0.695400,
|
| 1701 |
+
1.929558,
|
| 1702 |
+
-4.411831,
|
| 1703 |
+
4.497052,
|
| 1704 |
+
0.590372,
|
| 1705 |
+
0.701823,
|
| 1706 |
+
0.597442,
|
| 1707 |
+
-2.013686,
|
| 1708 |
+
5.866456,
|
| 1709 |
+
0.531915,
|
| 1710 |
+
0.601537,
|
| 1711 |
+
1.405627,
|
| 1712 |
+
-1.714196,
|
| 1713 |
+
5.241087,
|
| 1714 |
+
0.577268,
|
| 1715 |
+
0.585935,
|
| 1716 |
+
0.662449,
|
| 1717 |
+
-1.819321,
|
| 1718 |
+
5.863759,
|
| 1719 |
+
0.536915,
|
| 1720 |
+
0.593786,
|
| 1721 |
+
2.342340,
|
| 1722 |
+
0.572222,
|
| 1723 |
+
4.294303,
|
| 1724 |
+
0.627543,
|
| 1725 |
+
0.473352,
|
| 1726 |
+
3.327324,
|
| 1727 |
+
0.104863,
|
| 1728 |
+
4.113860,
|
| 1729 |
+
0.665586,
|
| 1730 |
+
0.495951,
|
| 1731 |
+
1.726175,
|
| 1732 |
+
-0.919165,
|
| 1733 |
+
5.273355,
|
| 1734 |
+
0.588354,
|
| 1735 |
+
0.546862,
|
| 1736 |
+
5.133204,
|
| 1737 |
+
7.485602,
|
| 1738 |
+
2.660442,
|
| 1739 |
+
0.757824,
|
| 1740 |
+
0.147676,
|
| 1741 |
+
4.538641,
|
| 1742 |
+
6.319907,
|
| 1743 |
+
3.683424,
|
| 1744 |
+
0.709250,
|
| 1745 |
+
0.201508,
|
| 1746 |
+
3.986562,
|
| 1747 |
+
5.109487,
|
| 1748 |
+
4.466315,
|
| 1749 |
+
0.672684,
|
| 1750 |
+
0.256581,
|
| 1751 |
+
2.169681,
|
| 1752 |
+
-5.440433,
|
| 1753 |
+
4.455874,
|
| 1754 |
+
0.600409,
|
| 1755 |
+
0.749005,
|
| 1756 |
+
1.395634,
|
| 1757 |
+
5.011963,
|
| 1758 |
+
5.316032,
|
| 1759 |
+
0.558266,
|
| 1760 |
+
0.261672,
|
| 1761 |
+
1.619500,
|
| 1762 |
+
6.599217,
|
| 1763 |
+
4.921106,
|
| 1764 |
+
0.570304,
|
| 1765 |
+
0.187871,
|
| 1766 |
+
1.891399,
|
| 1767 |
+
8.236377,
|
| 1768 |
+
4.274997,
|
| 1769 |
+
0.588166,
|
| 1770 |
+
0.109044,
|
| 1771 |
+
4.195832,
|
| 1772 |
+
2.235205,
|
| 1773 |
+
3.375099,
|
| 1774 |
+
0.711045,
|
| 1775 |
+
0.398952,
|
| 1776 |
+
5.733342,
|
| 1777 |
+
1.411738,
|
| 1778 |
+
2.431726,
|
| 1779 |
+
0.781070,
|
| 1780 |
+
0.435405,
|
| 1781 |
+
1.859887,
|
| 1782 |
+
2.355757,
|
| 1783 |
+
3.843181,
|
| 1784 |
+
0.587247,
|
| 1785 |
+
0.398932,
|
| 1786 |
+
4.988612,
|
| 1787 |
+
3.074654,
|
| 1788 |
+
3.083858,
|
| 1789 |
+
0.742870,
|
| 1790 |
+
0.355446,
|
| 1791 |
+
1.303263,
|
| 1792 |
+
1.416453,
|
| 1793 |
+
4.831091,
|
| 1794 |
+
0.572156,
|
| 1795 |
+
0.437652,
|
| 1796 |
+
1.305757,
|
| 1797 |
+
-0.672779,
|
| 1798 |
+
6.415959,
|
| 1799 |
+
0.551868,
|
| 1800 |
+
0.536570,
|
| 1801 |
+
6.465170,
|
| 1802 |
+
0.937119,
|
| 1803 |
+
1.689873,
|
| 1804 |
+
0.821442,
|
| 1805 |
+
0.457556,
|
| 1806 |
+
5.258659,
|
| 1807 |
+
0.945811,
|
| 1808 |
+
2.974312,
|
| 1809 |
+
0.752702,
|
| 1810 |
+
0.457182,
|
| 1811 |
+
4.432338,
|
| 1812 |
+
0.722096,
|
| 1813 |
+
3.522615,
|
| 1814 |
+
0.713757,
|
| 1815 |
+
0.467627,
|
| 1816 |
+
3.300681,
|
| 1817 |
+
0.861641,
|
| 1818 |
+
3.872784,
|
| 1819 |
+
0.667113,
|
| 1820 |
+
0.460673,
|
| 1821 |
+
2.430178,
|
| 1822 |
+
1.131492,
|
| 1823 |
+
4.039035,
|
| 1824 |
+
0.631101,
|
| 1825 |
+
0.447154,
|
| 1826 |
+
1.820731,
|
| 1827 |
+
1.467954,
|
| 1828 |
+
4.224124,
|
| 1829 |
+
0.600862,
|
| 1830 |
+
0.432473,
|
| 1831 |
+
0.563221,
|
| 1832 |
+
2.307693,
|
| 1833 |
+
5.566789,
|
| 1834 |
+
0.523481,
|
| 1835 |
+
0.405627,
|
| 1836 |
+
6.338145,
|
| 1837 |
+
-0.529279,
|
| 1838 |
+
1.881175,
|
| 1839 |
+
0.810748,
|
| 1840 |
+
0.523926,
|
| 1841 |
+
5.587698,
|
| 1842 |
+
3.208071,
|
| 1843 |
+
2.687839,
|
| 1844 |
+
0.771046,
|
| 1845 |
+
0.348959,
|
| 1846 |
+
0.242624,
|
| 1847 |
+
-1.462857,
|
| 1848 |
+
7.071491,
|
| 1849 |
+
0.509127,
|
| 1850 |
+
0.562718,
|
| 1851 |
+
1.611251,
|
| 1852 |
+
0.339326,
|
| 1853 |
+
4.895421,
|
| 1854 |
+
0.595293,
|
| 1855 |
+
0.485024,
|
| 1856 |
+
7.743095,
|
| 1857 |
+
2.364999,
|
| 1858 |
+
-2.005167,
|
| 1859 |
+
0.980531,
|
| 1860 |
+
0.401564,
|
| 1861 |
+
1.391142,
|
| 1862 |
+
1.851048,
|
| 1863 |
+
4.448999,
|
| 1864 |
+
0.573500,
|
| 1865 |
+
0.420000,
|
| 1866 |
+
1.785794,
|
| 1867 |
+
-0.978284,
|
| 1868 |
+
4.850470,
|
| 1869 |
+
0.602995,
|
| 1870 |
+
0.548688,
|
| 1871 |
+
4.670959,
|
| 1872 |
+
2.664461,
|
| 1873 |
+
3.084075,
|
| 1874 |
+
0.733530,
|
| 1875 |
+
0.376977,
|
| 1876 |
+
1.333970,
|
| 1877 |
+
-0.283761,
|
| 1878 |
+
6.097047,
|
| 1879 |
+
0.560611,
|
| 1880 |
+
0.519017,
|
| 1881 |
+
7.270895,
|
| 1882 |
+
-2.890917,
|
| 1883 |
+
-2.252455,
|
| 1884 |
+
0.967686,
|
| 1885 |
+
0.644357,
|
| 1886 |
+
1.856432,
|
| 1887 |
+
2.585245,
|
| 1888 |
+
3.757904,
|
| 1889 |
+
0.580985,
|
| 1890 |
+
0.387160,
|
| 1891 |
+
0.923388,
|
| 1892 |
+
0.073076,
|
| 1893 |
+
6.671944,
|
| 1894 |
+
0.537728,
|
| 1895 |
+
0.505385,
|
| 1896 |
+
5.000589,
|
| 1897 |
+
-6.135128,
|
| 1898 |
+
1.892523,
|
| 1899 |
+
0.760966,
|
| 1900 |
+
0.779753,
|
| 1901 |
+
5.085276,
|
| 1902 |
+
-7.178590,
|
| 1903 |
+
0.714711,
|
| 1904 |
+
0.801779,
|
| 1905 |
+
0.831938,
|
| 1906 |
+
7.159291,
|
| 1907 |
+
-0.811820,
|
| 1908 |
+
-0.072044,
|
| 1909 |
+
0.892441,
|
| 1910 |
+
0.540761,
|
| 1911 |
+
5.843051,
|
| 1912 |
+
-5.248023,
|
| 1913 |
+
0.924091,
|
| 1914 |
+
0.816351,
|
| 1915 |
+
0.740260,
|
| 1916 |
+
6.847258,
|
| 1917 |
+
3.662916,
|
| 1918 |
+
0.724695,
|
| 1919 |
+
0.865595,
|
| 1920 |
+
0.333687,
|
| 1921 |
+
2.412942,
|
| 1922 |
+
-8.258853,
|
| 1923 |
+
4.119213,
|
| 1924 |
+
0.614074,
|
| 1925 |
+
0.883246,
|
| 1926 |
+
0.179909,
|
| 1927 |
+
-1.689864,
|
| 1928 |
+
6.573301,
|
| 1929 |
+
0.508953,
|
| 1930 |
+
0.579438,
|
| 1931 |
+
2.103655,
|
| 1932 |
+
-0.163946,
|
| 1933 |
+
4.566119,
|
| 1934 |
+
0.617942,
|
| 1935 |
+
0.508316,
|
| 1936 |
+
6.407571,
|
| 1937 |
+
2.236021,
|
| 1938 |
+
1.560843,
|
| 1939 |
+
0.825608,
|
| 1940 |
+
0.397675,
|
| 1941 |
+
3.670075,
|
| 1942 |
+
2.360153,
|
| 1943 |
+
3.635230,
|
| 1944 |
+
0.681215,
|
| 1945 |
+
0.396235,
|
| 1946 |
+
3.177186,
|
| 1947 |
+
2.294265,
|
| 1948 |
+
3.775704,
|
| 1949 |
+
0.656636,
|
| 1950 |
+
0.400597,
|
| 1951 |
+
2.196121,
|
| 1952 |
+
-4.598322,
|
| 1953 |
+
4.479786,
|
| 1954 |
+
0.603900,
|
| 1955 |
+
0.710217,
|
| 1956 |
+
6.234883,
|
| 1957 |
+
-1.944430,
|
| 1958 |
+
1.663542,
|
| 1959 |
+
0.812086,
|
| 1960 |
+
0.588539,
|
| 1961 |
+
1.292924,
|
| 1962 |
+
-9.295920,
|
| 1963 |
+
4.094063,
|
| 1964 |
+
0.568013,
|
| 1965 |
+
0.944565,
|
| 1966 |
+
3.210651,
|
| 1967 |
+
-8.533278,
|
| 1968 |
+
2.802001,
|
| 1969 |
+
0.681008,
|
| 1970 |
+
0.898285,
|
| 1971 |
+
4.068926,
|
| 1972 |
+
-7.993109,
|
| 1973 |
+
1.925119,
|
| 1974 |
+
0.733752,
|
| 1975 |
+
0.869701,
|
| 1976 |
+
2.724032,
|
| 1977 |
+
2.315802,
|
| 1978 |
+
3.777151,
|
| 1979 |
+
0.633830,
|
| 1980 |
+
0.398822,
|
| 1981 |
+
2.288460,
|
| 1982 |
+
2.398891,
|
| 1983 |
+
3.697603,
|
| 1984 |
+
0.606793,
|
| 1985 |
+
0.395537,
|
| 1986 |
+
1.998311,
|
| 1987 |
+
2.496547,
|
| 1988 |
+
3.689148,
|
| 1989 |
+
0.589660,
|
| 1990 |
+
0.391062,
|
| 1991 |
+
6.130040,
|
| 1992 |
+
3.399261,
|
| 1993 |
+
2.038516,
|
| 1994 |
+
0.805016,
|
| 1995 |
+
0.342108,
|
| 1996 |
+
2.288460,
|
| 1997 |
+
2.886504,
|
| 1998 |
+
3.775031,
|
| 1999 |
+
0.611335,
|
| 2000 |
+
0.362284,
|
| 2001 |
+
2.724032,
|
| 2002 |
+
2.961810,
|
| 2003 |
+
3.871767,
|
| 2004 |
+
0.634038,
|
| 2005 |
+
0.355971,
|
| 2006 |
+
3.177186,
|
| 2007 |
+
2.964136,
|
| 2008 |
+
3.876973,
|
| 2009 |
+
0.656636,
|
| 2010 |
+
0.355357,
|
| 2011 |
+
3.670075,
|
| 2012 |
+
2.927714,
|
| 2013 |
+
3.724325,
|
| 2014 |
+
0.681215,
|
| 2015 |
+
0.358340,
|
| 2016 |
+
4.018389,
|
| 2017 |
+
2.857357,
|
| 2018 |
+
3.482983,
|
| 2019 |
+
0.698585,
|
| 2020 |
+
0.363156,
|
| 2021 |
+
7.555811,
|
| 2022 |
+
4.106811,
|
| 2023 |
+
-0.991917,
|
| 2024 |
+
0.941867,
|
| 2025 |
+
0.319076,
|
| 2026 |
+
4.018389,
|
| 2027 |
+
2.483695,
|
| 2028 |
+
3.440898,
|
| 2029 |
+
0.698585,
|
| 2030 |
+
0.387449,
|
| 2031 |
+
1.776217,
|
| 2032 |
+
-2.683946,
|
| 2033 |
+
5.213116,
|
| 2034 |
+
0.584177,
|
| 2035 |
+
0.624107,
|
| 2036 |
+
1.222237,
|
| 2037 |
+
-1.182444,
|
| 2038 |
+
5.952465,
|
| 2039 |
+
0.554318,
|
| 2040 |
+
0.566077,
|
| 2041 |
+
0.731493,
|
| 2042 |
+
-2.536683,
|
| 2043 |
+
5.815343,
|
| 2044 |
+
0.534154,
|
| 2045 |
+
0.620640,
|
| 2046 |
+
4.135272,
|
| 2047 |
+
-6.996638,
|
| 2048 |
+
2.671970,
|
| 2049 |
+
0.711218,
|
| 2050 |
+
0.819975,
|
| 2051 |
+
3.311811,
|
| 2052 |
+
-7.660815,
|
| 2053 |
+
3.382963,
|
| 2054 |
+
0.664630,
|
| 2055 |
+
0.852871,
|
| 2056 |
+
1.313701,
|
| 2057 |
+
-8.639995,
|
| 2058 |
+
4.702456,
|
| 2059 |
+
0.559100,
|
| 2060 |
+
0.902632,
|
| 2061 |
+
5.940524,
|
| 2062 |
+
-6.223629,
|
| 2063 |
+
-0.631468,
|
| 2064 |
+
0.871706,
|
| 2065 |
+
0.791941,
|
| 2066 |
+
1.998311,
|
| 2067 |
+
2.743838,
|
| 2068 |
+
3.744030,
|
| 2069 |
+
0.591234,
|
| 2070 |
+
0.373894,
|
| 2071 |
+
0.901447,
|
| 2072 |
+
1.236992,
|
| 2073 |
+
5.754256,
|
| 2074 |
+
0.544341,
|
| 2075 |
+
0.451584,
|
| 2076 |
+
2.308977,
|
| 2077 |
+
-8.974196,
|
| 2078 |
+
3.609070,
|
| 2079 |
+
0.624563,
|
| 2080 |
+
0.924192,
|
| 2081 |
+
6.954154,
|
| 2082 |
+
-2.439843,
|
| 2083 |
+
-0.131163,
|
| 2084 |
+
0.885770,
|
| 2085 |
+
0.615029,
|
| 2086 |
+
1.098819,
|
| 2087 |
+
-4.458788,
|
| 2088 |
+
5.120727,
|
| 2089 |
+
0.551338,
|
| 2090 |
+
0.695278,
|
| 2091 |
+
1.181124,
|
| 2092 |
+
-4.579996,
|
| 2093 |
+
5.189564,
|
| 2094 |
+
0.551980,
|
| 2095 |
+
0.704632,
|
| 2096 |
+
1.255818,
|
| 2097 |
+
-4.787901,
|
| 2098 |
+
5.237051,
|
| 2099 |
+
0.552888,
|
| 2100 |
+
0.715808,
|
| 2101 |
+
1.325085,
|
| 2102 |
+
-5.106507,
|
| 2103 |
+
5.205010,
|
| 2104 |
+
0.555168,
|
| 2105 |
+
0.730794,
|
| 2106 |
+
1.546388,
|
| 2107 |
+
-5.819392,
|
| 2108 |
+
4.757893,
|
| 2109 |
+
0.569944,
|
| 2110 |
+
0.767035,
|
| 2111 |
+
1.953754,
|
| 2112 |
+
-4.183892,
|
| 2113 |
+
4.431713,
|
| 2114 |
+
0.593203,
|
| 2115 |
+
0.685676,
|
| 2116 |
+
2.117802,
|
| 2117 |
+
-4.137093,
|
| 2118 |
+
4.555096,
|
| 2119 |
+
0.599262,
|
| 2120 |
+
0.681069,
|
| 2121 |
+
2.285339,
|
| 2122 |
+
-4.051196,
|
| 2123 |
+
4.582438,
|
| 2124 |
+
0.607600,
|
| 2125 |
+
0.677703,
|
| 2126 |
+
2.850160,
|
| 2127 |
+
-3.665720,
|
| 2128 |
+
4.484994,
|
| 2129 |
+
0.631938,
|
| 2130 |
+
0.663500,
|
| 2131 |
+
5.278538,
|
| 2132 |
+
-2.238942,
|
| 2133 |
+
2.861224,
|
| 2134 |
+
0.752033,
|
| 2135 |
+
0.601315,
|
| 2136 |
+
0.946709,
|
| 2137 |
+
1.907628,
|
| 2138 |
+
5.196779,
|
| 2139 |
+
0.547226,
|
| 2140 |
+
0.420395,
|
| 2141 |
+
1.314173,
|
| 2142 |
+
3.104912,
|
| 2143 |
+
4.231404,
|
| 2144 |
+
0.563544,
|
| 2145 |
+
0.359828,
|
| 2146 |
+
1.780000,
|
| 2147 |
+
2.860000,
|
| 2148 |
+
3.881555,
|
| 2149 |
+
0.583841,
|
| 2150 |
+
0.368714,
|
| 2151 |
+
1.845110,
|
| 2152 |
+
-4.098880,
|
| 2153 |
+
4.247264,
|
| 2154 |
+
0.586614,
|
| 2155 |
+
0.692366,
|
| 2156 |
+
5.436187,
|
| 2157 |
+
-4.030482,
|
| 2158 |
+
2.109852,
|
| 2159 |
+
0.771915,
|
| 2160 |
+
0.683578,
|
| 2161 |
+
0.766444,
|
| 2162 |
+
3.182131,
|
| 2163 |
+
4.861453,
|
| 2164 |
+
0.531597,
|
| 2165 |
+
0.352483,
|
| 2166 |
+
1.938616,
|
| 2167 |
+
-6.614410,
|
| 2168 |
+
4.521085,
|
| 2169 |
+
0.588371,
|
| 2170 |
+
0.804441,
|
| 2171 |
+
0.516573,
|
| 2172 |
+
1.583572,
|
| 2173 |
+
6.148363,
|
| 2174 |
+
0.520797,
|
| 2175 |
+
0.442565,
|
| 2176 |
+
1.246815,
|
| 2177 |
+
0.230297,
|
| 2178 |
+
5.681036,
|
| 2179 |
+
0.567985,
|
| 2180 |
+
0.493479,
|
| 2181 |
+
0.997827,
|
| 2182 |
+
-6.930921,
|
| 2183 |
+
4.979576,
|
| 2184 |
+
0.543283,
|
| 2185 |
+
0.819255,
|
| 2186 |
+
3.288807,
|
| 2187 |
+
-5.382514,
|
| 2188 |
+
3.795752,
|
| 2189 |
+
0.655317,
|
| 2190 |
+
0.745515,
|
| 2191 |
+
2.311631,
|
| 2192 |
+
-1.566237,
|
| 2193 |
+
4.590085,
|
| 2194 |
+
0.621009,
|
| 2195 |
+
0.574018,
|
| 2196 |
+
2.680250,
|
| 2197 |
+
-6.111567,
|
| 2198 |
+
4.096152,
|
| 2199 |
+
0.625560,
|
| 2200 |
+
0.780312,
|
| 2201 |
+
3.832928,
|
| 2202 |
+
-1.537326,
|
| 2203 |
+
4.137731,
|
| 2204 |
+
0.680198,
|
| 2205 |
+
0.570719,
|
| 2206 |
+
2.961860,
|
| 2207 |
+
-2.274215,
|
| 2208 |
+
4.440943,
|
| 2209 |
+
0.642764,
|
| 2210 |
+
0.604338,
|
| 2211 |
+
4.386901,
|
| 2212 |
+
-2.683286,
|
| 2213 |
+
3.643886,
|
| 2214 |
+
0.704663,
|
| 2215 |
+
0.621530,
|
| 2216 |
+
1.217295,
|
| 2217 |
+
-7.834465,
|
| 2218 |
+
4.969286,
|
| 2219 |
+
0.552012,
|
| 2220 |
+
0.862592,
|
| 2221 |
+
1.542374,
|
| 2222 |
+
-0.136843,
|
| 2223 |
+
5.201008,
|
| 2224 |
+
0.589072,
|
| 2225 |
+
0.508637,
|
| 2226 |
+
3.878377,
|
| 2227 |
+
-6.041764,
|
| 2228 |
+
3.311079,
|
| 2229 |
+
0.685945,
|
| 2230 |
+
0.775357,
|
| 2231 |
+
3.084037,
|
| 2232 |
+
-6.809842,
|
| 2233 |
+
3.814195,
|
| 2234 |
+
0.645735,
|
| 2235 |
+
0.812640,
|
| 2236 |
+
3.747321,
|
| 2237 |
+
-4.503545,
|
| 2238 |
+
3.726453,
|
| 2239 |
+
0.675343,
|
| 2240 |
+
0.703978,
|
| 2241 |
+
6.094129,
|
| 2242 |
+
-3.205991,
|
| 2243 |
+
1.473482,
|
| 2244 |
+
0.810858,
|
| 2245 |
+
0.646305,
|
| 2246 |
+
4.588995,
|
| 2247 |
+
-4.728726,
|
| 2248 |
+
2.983221,
|
| 2249 |
+
0.720122,
|
| 2250 |
+
0.714667,
|
| 2251 |
+
6.583231,
|
| 2252 |
+
-3.941269,
|
| 2253 |
+
0.070268,
|
| 2254 |
+
0.866152,
|
| 2255 |
+
0.682705,
|
| 2256 |
+
3.492580,
|
| 2257 |
+
-3.195820,
|
| 2258 |
+
4.130198,
|
| 2259 |
+
0.663187,
|
| 2260 |
+
0.644597,
|
| 2261 |
+
1.255543,
|
| 2262 |
+
0.802341,
|
| 2263 |
+
5.307551,
|
| 2264 |
+
0.570082,
|
| 2265 |
+
0.466326,
|
| 2266 |
+
1.126122,
|
| 2267 |
+
-0.933602,
|
| 2268 |
+
6.538785,
|
| 2269 |
+
0.544562,
|
| 2270 |
+
0.548376,
|
| 2271 |
+
1.443109,
|
| 2272 |
+
-1.142774,
|
| 2273 |
+
5.905127,
|
| 2274 |
+
0.562759,
|
| 2275 |
+
0.558785,
|
| 2276 |
+
0.923043,
|
| 2277 |
+
-0.529042,
|
| 2278 |
+
7.003423,
|
| 2279 |
+
0.531987,
|
| 2280 |
+
0.530140,
|
| 2281 |
+
1.755386,
|
| 2282 |
+
3.529117,
|
| 2283 |
+
4.327696,
|
| 2284 |
+
0.585271,
|
| 2285 |
+
0.335177,
|
| 2286 |
+
2.632589,
|
| 2287 |
+
3.713828,
|
| 2288 |
+
4.364629,
|
| 2289 |
+
0.622953,
|
| 2290 |
+
0.322779,
|
| 2291 |
+
3.388062,
|
| 2292 |
+
3.721976,
|
| 2293 |
+
4.309028,
|
| 2294 |
+
0.655896,
|
| 2295 |
+
0.320163,
|
| 2296 |
+
4.075766,
|
| 2297 |
+
3.675413,
|
| 2298 |
+
4.076063,
|
| 2299 |
+
0.687132,
|
| 2300 |
+
0.322346,
|
| 2301 |
+
4.622910,
|
| 2302 |
+
3.474691,
|
| 2303 |
+
3.646321,
|
| 2304 |
+
0.716482,
|
| 2305 |
+
0.333201,
|
| 2306 |
+
5.171755,
|
| 2307 |
+
2.535753,
|
| 2308 |
+
2.670867,
|
| 2309 |
+
0.758757,
|
| 2310 |
+
0.382787,
|
| 2311 |
+
7.297331,
|
| 2312 |
+
0.763172,
|
| 2313 |
+
-0.048769,
|
| 2314 |
+
0.897013,
|
| 2315 |
+
0.468769,
|
| 2316 |
+
4.706828,
|
| 2317 |
+
1.651000,
|
| 2318 |
+
3.109532,
|
| 2319 |
+
0.732392,
|
| 2320 |
+
0.424547,
|
| 2321 |
+
4.071712,
|
| 2322 |
+
1.476821,
|
| 2323 |
+
3.476944,
|
| 2324 |
+
0.702114,
|
| 2325 |
+
0.433163,
|
| 2326 |
+
3.269817,
|
| 2327 |
+
1.470659,
|
| 2328 |
+
3.731945,
|
| 2329 |
+
0.666525,
|
| 2330 |
+
0.433866,
|
| 2331 |
+
2.527572,
|
| 2332 |
+
1.617311,
|
| 2333 |
+
3.865444,
|
| 2334 |
+
0.633505,
|
| 2335 |
+
0.426088,
|
| 2336 |
+
1.970894,
|
| 2337 |
+
1.858505,
|
| 2338 |
+
3.961782,
|
| 2339 |
+
0.603876,
|
| 2340 |
+
0.416587,
|
| 2341 |
+
1.579543,
|
| 2342 |
+
2.097941,
|
| 2343 |
+
4.084996,
|
| 2344 |
+
0.579658,
|
| 2345 |
+
0.409945,
|
| 2346 |
+
7.664182,
|
| 2347 |
+
0.673132,
|
| 2348 |
+
-2.435867,
|
| 2349 |
+
0.992440,
|
| 2350 |
+
0.480777,
|
| 2351 |
+
1.397041,
|
| 2352 |
+
-1.340139,
|
| 2353 |
+
5.630378,
|
| 2354 |
+
0.567192,
|
| 2355 |
+
0.569420,
|
| 2356 |
+
0.884838,
|
| 2357 |
+
0.658740,
|
| 2358 |
+
6.233232,
|
| 2359 |
+
0.541366,
|
| 2360 |
+
0.478899,
|
| 2361 |
+
0.767097,
|
| 2362 |
+
-0.968035,
|
| 2363 |
+
7.077932,
|
| 2364 |
+
0.526564,
|
| 2365 |
+
0.546118,
|
| 2366 |
+
0.460213,
|
| 2367 |
+
-1.334106,
|
| 2368 |
+
6.787447,
|
| 2369 |
+
0.523913,
|
| 2370 |
+
0.563830,
|
| 2371 |
+
0.748618,
|
| 2372 |
+
-1.067994,
|
| 2373 |
+
6.798303,
|
| 2374 |
+
0.531529,
|
| 2375 |
+
0.555057,
|
| 2376 |
+
1.236408,
|
| 2377 |
+
-1.585568,
|
| 2378 |
+
5.480490,
|
| 2379 |
+
0.566036,
|
| 2380 |
+
0.582329,
|
| 2381 |
+
0.387306,
|
| 2382 |
+
-1.409990,
|
| 2383 |
+
6.957705,
|
| 2384 |
+
0.516311,
|
| 2385 |
+
0.563054,
|
| 2386 |
+
0.319925,
|
| 2387 |
+
-1.607931,
|
| 2388 |
+
6.508676,
|
| 2389 |
+
0.517472,
|
| 2390 |
+
0.577877,
|
| 2391 |
+
1.639633,
|
| 2392 |
+
2.556298,
|
| 2393 |
+
3.863736,
|
| 2394 |
+
0.573595,
|
| 2395 |
+
0.389807,
|
| 2396 |
+
1.255645,
|
| 2397 |
+
2.467144,
|
| 2398 |
+
4.203800,
|
| 2399 |
+
0.560698,
|
| 2400 |
+
0.395332,
|
| 2401 |
+
1.031362,
|
| 2402 |
+
2.382663,
|
| 2403 |
+
4.615849,
|
| 2404 |
+
0.549756,
|
| 2405 |
+
0.399751,
|
| 2406 |
+
4.253081,
|
| 2407 |
+
2.772296,
|
| 2408 |
+
3.315305,
|
| 2409 |
+
0.710288,
|
| 2410 |
+
0.368253,
|
| 2411 |
+
4.530000,
|
| 2412 |
+
2.910000,
|
| 2413 |
+
3.339685,
|
| 2414 |
+
0.723330,
|
| 2415 |
+
0.363373,
|
| 2416 |
+
]
|
| 2417 |
+
)
|
| 2418 |
+
canonical_metric_landmarks = np.reshape(
|
| 2419 |
+
canonical_metric_landmarks, (canonical_metric_landmarks.shape[0] // 5, 5)
|
| 2420 |
+
).T
|
| 2421 |
+
canonical_metric_landmarks = canonical_metric_landmarks[:3, :]
|
| 2422 |
+
|
| 2423 |
+
procrustes_landmark_basis = [
|
| 2424 |
+
(4, 0.070909939706326),
|
| 2425 |
+
(6, 0.032100144773722),
|
| 2426 |
+
(10, 0.008446550928056),
|
| 2427 |
+
(33, 0.058724168688059),
|
| 2428 |
+
(54, 0.007667080033571),
|
| 2429 |
+
(67, 0.009078059345484),
|
| 2430 |
+
(117, 0.009791937656701),
|
| 2431 |
+
(119, 0.014565368182957),
|
| 2432 |
+
(121, 0.018591361120343),
|
| 2433 |
+
(127, 0.005197994410992),
|
| 2434 |
+
(129, 0.120625205338001),
|
| 2435 |
+
(132, 0.005560018587857),
|
| 2436 |
+
(133, 0.05328618362546),
|
| 2437 |
+
(136, 0.066890455782413),
|
| 2438 |
+
(143, 0.014816547743976),
|
| 2439 |
+
(147, 0.014262833632529),
|
| 2440 |
+
(198, 0.025462191551924),
|
| 2441 |
+
(205, 0.047252278774977),
|
| 2442 |
+
(263, 0.058724168688059),
|
| 2443 |
+
(284, 0.007667080033571),
|
| 2444 |
+
(297, 0.009078059345484),
|
| 2445 |
+
(346, 0.009791937656701),
|
| 2446 |
+
(348, 0.014565368182957),
|
| 2447 |
+
(350, 0.018591361120343),
|
| 2448 |
+
(356, 0.005197994410992),
|
| 2449 |
+
(358, 0.120625205338001),
|
| 2450 |
+
(361, 0.005560018587857),
|
| 2451 |
+
(362, 0.05328618362546),
|
| 2452 |
+
(365, 0.066890455782413),
|
| 2453 |
+
(372, 0.014816547743976),
|
| 2454 |
+
(376, 0.014262833632529),
|
| 2455 |
+
(420, 0.025462191551924),
|
| 2456 |
+
(425, 0.047252278774977),
|
| 2457 |
+
]
|
| 2458 |
+
landmark_weights = np.zeros((canonical_metric_landmarks.shape[1],))
|
| 2459 |
+
for idx, weight in procrustes_landmark_basis:
|
| 2460 |
+
landmark_weights[idx] = weight
|
| 2461 |
+
|
| 2462 |
+
|
| 2463 |
+
def log(name, f):
|
| 2464 |
+
if DEBUG.get_debug():
|
| 2465 |
+
print(f"{name} logged:", f)
|
| 2466 |
+
print()
|
| 2467 |
+
|
| 2468 |
+
|
| 2469 |
+
def cpp_compare(name, np_matrix):
|
| 2470 |
+
if DEBUG.get_debug():
|
| 2471 |
+
# reorder cpp matrix as memory alignment is not correct
|
| 2472 |
+
cpp_matrix = np.load(f"{name}_cpp.npy")
|
| 2473 |
+
rows, cols = cpp_matrix.shape
|
| 2474 |
+
cpp_matrix = np.split(np.reshape(cpp_matrix, -1), cols)
|
| 2475 |
+
cpp_matrix = np.stack(cpp_matrix, 1)
|
| 2476 |
+
|
| 2477 |
+
print(f"{name}:", np.sum(np.abs(cpp_matrix - np_matrix[:rows, :cols]) ** 2))
|
| 2478 |
+
print()
|
| 2479 |
+
|
| 2480 |
+
|
| 2481 |
+
def get_metric_landmarks(screen_landmarks, pcf):
|
| 2482 |
+
screen_landmarks = project_xy(screen_landmarks, pcf)
|
| 2483 |
+
depth_offset = np.mean(screen_landmarks[2, :])
|
| 2484 |
+
|
| 2485 |
+
intermediate_landmarks = screen_landmarks.copy()
|
| 2486 |
+
intermediate_landmarks = change_handedness(intermediate_landmarks)
|
| 2487 |
+
first_iteration_scale = estimate_scale(intermediate_landmarks)
|
| 2488 |
+
|
| 2489 |
+
intermediate_landmarks = screen_landmarks.copy()
|
| 2490 |
+
intermediate_landmarks = move_and_rescale_z(
|
| 2491 |
+
pcf, depth_offset, first_iteration_scale, intermediate_landmarks
|
| 2492 |
+
)
|
| 2493 |
+
intermediate_landmarks = unproject_xy(pcf, intermediate_landmarks)
|
| 2494 |
+
intermediate_landmarks = change_handedness(intermediate_landmarks)
|
| 2495 |
+
second_iteration_scale = estimate_scale(intermediate_landmarks)
|
| 2496 |
+
|
| 2497 |
+
metric_landmarks = screen_landmarks.copy()
|
| 2498 |
+
total_scale = first_iteration_scale * second_iteration_scale
|
| 2499 |
+
metric_landmarks = move_and_rescale_z(
|
| 2500 |
+
pcf, depth_offset, total_scale, metric_landmarks
|
| 2501 |
+
)
|
| 2502 |
+
metric_landmarks = unproject_xy(pcf, metric_landmarks)
|
| 2503 |
+
metric_landmarks = change_handedness(metric_landmarks)
|
| 2504 |
+
|
| 2505 |
+
pose_transform_mat = solve_weighted_orthogonal_problem(
|
| 2506 |
+
canonical_metric_landmarks, metric_landmarks, landmark_weights
|
| 2507 |
+
)
|
| 2508 |
+
cpp_compare("pose_transform_mat", pose_transform_mat)
|
| 2509 |
+
|
| 2510 |
+
inv_pose_transform_mat = np.linalg.inv(pose_transform_mat)
|
| 2511 |
+
inv_pose_rotation = inv_pose_transform_mat[:3, :3]
|
| 2512 |
+
inv_pose_translation = inv_pose_transform_mat[:3, 3]
|
| 2513 |
+
|
| 2514 |
+
metric_landmarks = (
|
| 2515 |
+
inv_pose_rotation @ metric_landmarks + inv_pose_translation[:, None]
|
| 2516 |
+
)
|
| 2517 |
+
|
| 2518 |
+
return metric_landmarks, pose_transform_mat
|
| 2519 |
+
|
| 2520 |
+
|
| 2521 |
+
def project_xy(landmarks, pcf):
|
| 2522 |
+
x_scale = pcf.right - pcf.left
|
| 2523 |
+
y_scale = pcf.top - pcf.bottom
|
| 2524 |
+
x_translation = pcf.left
|
| 2525 |
+
y_translation = pcf.bottom
|
| 2526 |
+
|
| 2527 |
+
landmarks[1, :] = 1.0 - landmarks[1, :]
|
| 2528 |
+
|
| 2529 |
+
landmarks = landmarks * np.array([[x_scale, y_scale, x_scale]]).T
|
| 2530 |
+
landmarks = landmarks + np.array([[x_translation, y_translation, 0]]).T
|
| 2531 |
+
|
| 2532 |
+
return landmarks
|
| 2533 |
+
|
| 2534 |
+
|
| 2535 |
+
def change_handedness(landmarks):
|
| 2536 |
+
landmarks[2, :] *= -1.0
|
| 2537 |
+
|
| 2538 |
+
return landmarks
|
| 2539 |
+
|
| 2540 |
+
|
| 2541 |
+
def move_and_rescale_z(pcf, depth_offset, scale, landmarks):
|
| 2542 |
+
landmarks[2, :] = (landmarks[2, :] - depth_offset + pcf.near) / scale
|
| 2543 |
+
|
| 2544 |
+
return landmarks
|
| 2545 |
+
|
| 2546 |
+
|
| 2547 |
+
def unproject_xy(pcf, landmarks):
|
| 2548 |
+
landmarks[0, :] = landmarks[0, :] * landmarks[2, :] / pcf.near
|
| 2549 |
+
landmarks[1, :] = landmarks[1, :] * landmarks[2, :] / pcf.near
|
| 2550 |
+
|
| 2551 |
+
return landmarks
|
| 2552 |
+
|
| 2553 |
+
|
| 2554 |
+
def estimate_scale(landmarks):
|
| 2555 |
+
transform_mat = solve_weighted_orthogonal_problem(
|
| 2556 |
+
canonical_metric_landmarks, landmarks, landmark_weights
|
| 2557 |
+
)
|
| 2558 |
+
|
| 2559 |
+
return np.linalg.norm(transform_mat[:, 0])
|
| 2560 |
+
|
| 2561 |
+
|
| 2562 |
+
def extract_square_root(point_weights):
|
| 2563 |
+
return np.sqrt(point_weights)
|
| 2564 |
+
|
| 2565 |
+
|
| 2566 |
+
def solve_weighted_orthogonal_problem(source_points, target_points, point_weights):
|
| 2567 |
+
sqrt_weights = extract_square_root(point_weights)
|
| 2568 |
+
transform_mat = internal_solve_weighted_orthogonal_problem(
|
| 2569 |
+
source_points, target_points, sqrt_weights
|
| 2570 |
+
)
|
| 2571 |
+
return transform_mat
|
| 2572 |
+
|
| 2573 |
+
|
| 2574 |
+
def internal_solve_weighted_orthogonal_problem(sources, targets, sqrt_weights):
|
| 2575 |
+
cpp_compare("sources", sources)
|
| 2576 |
+
cpp_compare("targets", targets)
|
| 2577 |
+
|
| 2578 |
+
# tranposed(A_w).
|
| 2579 |
+
weighted_sources = sources * sqrt_weights[None, :]
|
| 2580 |
+
# tranposed(B_w).
|
| 2581 |
+
weighted_targets = targets * sqrt_weights[None, :]
|
| 2582 |
+
|
| 2583 |
+
cpp_compare("weighted_sources", weighted_sources)
|
| 2584 |
+
cpp_compare("weighted_targets", weighted_targets)
|
| 2585 |
+
|
| 2586 |
+
# w = tranposed(j_w) j_w.
|
| 2587 |
+
total_weight = np.sum(sqrt_weights * sqrt_weights)
|
| 2588 |
+
log("total_weight", total_weight)
|
| 2589 |
+
|
| 2590 |
+
# Let C = (j_w tranposed(j_w)) / (tranposed(j_w) j_w).
|
| 2591 |
+
# Note that C = tranposed(C), hence (I - C) = tranposed(I - C).
|
| 2592 |
+
#
|
| 2593 |
+
# tranposed(A_w) C = tranposed(A_w) j_w tranposed(j_w) / w =
|
| 2594 |
+
# (tranposed(A_w) j_w) tranposed(j_w) / w = c_w tranposed(j_w),
|
| 2595 |
+
#
|
| 2596 |
+
# where c_w = tranposed(A_w) j_w / w is a k x 1 vector calculated here:
|
| 2597 |
+
twice_weighted_sources = weighted_sources * sqrt_weights[None, :]
|
| 2598 |
+
source_center_of_mass = np.sum(twice_weighted_sources, axis=1) / total_weight
|
| 2599 |
+
log("source_center_of_mass", source_center_of_mass)
|
| 2600 |
+
|
| 2601 |
+
# tranposed((I - C) A_w) = tranposed(A_w) (I - C) =
|
| 2602 |
+
# tranposed(A_w) - tranposed(A_w) C = tranposed(A_w) - c_w tranposed(j_w).
|
| 2603 |
+
centered_weighted_sources = weighted_sources - np.matmul(
|
| 2604 |
+
source_center_of_mass[:, None], sqrt_weights[None, :]
|
| 2605 |
+
)
|
| 2606 |
+
cpp_compare("centered_weighted_sources", centered_weighted_sources)
|
| 2607 |
+
|
| 2608 |
+
design_matrix = np.matmul(weighted_targets, centered_weighted_sources.T)
|
| 2609 |
+
cpp_compare("design_matrix", design_matrix)
|
| 2610 |
+
log("design_matrix_norm", np.linalg.norm(design_matrix))
|
| 2611 |
+
|
| 2612 |
+
rotation = compute_optimal_rotation(design_matrix)
|
| 2613 |
+
|
| 2614 |
+
scale = compute_optimal_scale(
|
| 2615 |
+
centered_weighted_sources, weighted_sources, weighted_targets, rotation
|
| 2616 |
+
)
|
| 2617 |
+
log("scale", scale)
|
| 2618 |
+
|
| 2619 |
+
rotation_and_scale = scale * rotation
|
| 2620 |
+
|
| 2621 |
+
pointwise_diffs = weighted_targets - np.matmul(rotation_and_scale, weighted_sources)
|
| 2622 |
+
cpp_compare("pointwise_diffs", pointwise_diffs)
|
| 2623 |
+
|
| 2624 |
+
weighted_pointwise_diffs = pointwise_diffs * sqrt_weights[None, :]
|
| 2625 |
+
cpp_compare("weighted_pointwise_diffs", weighted_pointwise_diffs)
|
| 2626 |
+
|
| 2627 |
+
translation = np.sum(weighted_pointwise_diffs, axis=1) / total_weight
|
| 2628 |
+
log("translation", translation)
|
| 2629 |
+
|
| 2630 |
+
transform_mat = combine_transform_matrix(rotation_and_scale, translation)
|
| 2631 |
+
cpp_compare("transform_mat", transform_mat)
|
| 2632 |
+
|
| 2633 |
+
return transform_mat
|
| 2634 |
+
|
| 2635 |
+
|
| 2636 |
+
def compute_optimal_rotation(design_matrix):
|
| 2637 |
+
if np.linalg.norm(design_matrix) < 1e-9:
|
| 2638 |
+
print("Design matrix norm is too small!")
|
| 2639 |
+
|
| 2640 |
+
u, _, vh = np.linalg.svd(design_matrix, full_matrices=True)
|
| 2641 |
+
|
| 2642 |
+
postrotation = u
|
| 2643 |
+
prerotation = vh
|
| 2644 |
+
|
| 2645 |
+
if np.linalg.det(postrotation) * np.linalg.det(prerotation) < 0:
|
| 2646 |
+
postrotation[:, 2] = -1 * postrotation[:, 2]
|
| 2647 |
+
|
| 2648 |
+
cpp_compare("postrotation", postrotation)
|
| 2649 |
+
cpp_compare("prerotation", prerotation)
|
| 2650 |
+
|
| 2651 |
+
rotation = np.matmul(postrotation, prerotation)
|
| 2652 |
+
|
| 2653 |
+
cpp_compare("rotation", rotation)
|
| 2654 |
+
|
| 2655 |
+
return rotation
|
| 2656 |
+
|
| 2657 |
+
|
| 2658 |
+
def compute_optimal_scale(
|
| 2659 |
+
centered_weighted_sources, weighted_sources, weighted_targets, rotation
|
| 2660 |
+
):
|
| 2661 |
+
rotated_centered_weighted_sources = np.matmul(rotation, centered_weighted_sources)
|
| 2662 |
+
|
| 2663 |
+
numerator = np.sum(rotated_centered_weighted_sources * weighted_targets)
|
| 2664 |
+
denominator = np.sum(centered_weighted_sources * weighted_sources)
|
| 2665 |
+
|
| 2666 |
+
if denominator < 1e-9:
|
| 2667 |
+
print("Scale expression denominator is too small!")
|
| 2668 |
+
if numerator / denominator < 1e-9:
|
| 2669 |
+
print("Scale is too small!")
|
| 2670 |
+
|
| 2671 |
+
return numerator / denominator
|
| 2672 |
+
|
| 2673 |
+
|
| 2674 |
+
def combine_transform_matrix(r_and_s, t):
|
| 2675 |
+
result = np.eye(4)
|
| 2676 |
+
result[:3, :3] = r_and_s
|
| 2677 |
+
result[:3, 3] = t
|
| 2678 |
+
return result
|
codes/base/iris_lm_depth.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import cv2
|
| 3 |
+
import numpy as np
|
| 4 |
+
from codes.base.core import (
|
| 5 |
+
detections_to_rect,
|
| 6 |
+
landmarks_to_detections,
|
| 7 |
+
slice_from_roi,
|
| 8 |
+
tflite_inference,
|
| 9 |
+
transform_rect,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
PATH2ROOT_ABS = os.path.dirname(__file__) + "/../../"
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def from_landmarks_to_depth(
|
| 17 |
+
frame_rgb, eye_landmarks, image_size, is_right_eye=False, focal_length=None
|
| 18 |
+
):
|
| 19 |
+
"""
|
| 20 |
+
Predicting the iris position using eyes landmarks
|
| 21 |
+
|
| 22 |
+
Parameters:
|
| 23 |
+
frame_rgb: the frame (RGB)
|
| 24 |
+
eye_landmarks: the landmarks of the eyes
|
| 25 |
+
image_size: the image size
|
| 26 |
+
is_right_eye: whether or not it's the right eye
|
| 27 |
+
focal_length: the focal length of the camera
|
| 28 |
+
|
| 29 |
+
Returns:
|
| 30 |
+
success: whether or not the iris is detected
|
| 31 |
+
depth: distance to the camera
|
| 32 |
+
iris_size: the size of the iris
|
| 33 |
+
iris_landmarks: iris landmarks
|
| 34 |
+
eye_contours: eye contours
|
| 35 |
+
iris_landmarks_respect: iris landmarks respect to the eyes
|
| 36 |
+
"""
|
| 37 |
+
if focal_length is None:
|
| 38 |
+
focal_length = frame_rgb.shape[1]
|
| 39 |
+
|
| 40 |
+
detections = landmarks_to_detections(eye_landmarks)
|
| 41 |
+
rect = detections_to_rect(detections, image_size, rotation_vector_start_end=(0, 1))
|
| 42 |
+
roi = transform_rect(rect, image_size, scale_x=2.3, scale_y=2.3)
|
| 43 |
+
|
| 44 |
+
slice_y = slice_from_roi(roi, image_size, False)
|
| 45 |
+
slice_x = slice_from_roi(roi, image_size, True)
|
| 46 |
+
eye_image = frame_rgb[slice(*slice_y), slice(*slice_x), :]
|
| 47 |
+
position_in_frame = np.array((slice_x[0], slice_y[0], 0))
|
| 48 |
+
|
| 49 |
+
if eye_image.any():
|
| 50 |
+
success = True
|
| 51 |
+
eye_contours, iris_landmarks, eye_frame_low = detect_iris(
|
| 52 |
+
eye_image.copy(), is_right_eye=is_right_eye
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
iris_landmarks_respect = iris_landmarks.copy()
|
| 56 |
+
|
| 57 |
+
eye_contours[:, 0] = eye_contours[:, 0] * eye_image.shape[0]
|
| 58 |
+
eye_contours[:, 1] = eye_contours[:, 1] * eye_image.shape[1]
|
| 59 |
+
eye_contours = eye_contours + position_in_frame
|
| 60 |
+
|
| 61 |
+
eye_contours[:, 0] = eye_contours[:, 0] / frame_rgb.shape[1]
|
| 62 |
+
eye_contours[:, 1] = eye_contours[:, 1] / frame_rgb.shape[0]
|
| 63 |
+
|
| 64 |
+
iris_landmarks[:, 0] = iris_landmarks[:, 0] * eye_image.shape[0]
|
| 65 |
+
iris_landmarks[:, 1] = iris_landmarks[:, 1] * eye_image.shape[1]
|
| 66 |
+
iris_landmarks = iris_landmarks + position_in_frame
|
| 67 |
+
|
| 68 |
+
iris_landmarks[:, 0] = iris_landmarks[:, 0] / frame_rgb.shape[1]
|
| 69 |
+
iris_landmarks[:, 1] = iris_landmarks[:, 1] / frame_rgb.shape[0]
|
| 70 |
+
|
| 71 |
+
depth, iris_size = calculate_iris_depth(iris_landmarks, image_size, focal_length)
|
| 72 |
+
|
| 73 |
+
else:
|
| 74 |
+
success = False
|
| 75 |
+
depth = None
|
| 76 |
+
iris_size = None
|
| 77 |
+
iris_landmarks = None
|
| 78 |
+
eye_contours = None
|
| 79 |
+
iris_landmarks_respect = None
|
| 80 |
+
|
| 81 |
+
return success, depth, iris_size, iris_landmarks, eye_contours, iris_landmarks_respect
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def detect_iris(eye_frame, is_right_eye=False):
|
| 85 |
+
side_low = 64
|
| 86 |
+
eye_frame_low = cv2.resize(
|
| 87 |
+
eye_frame, (side_low, side_low), interpolation=cv2.INTER_AREA
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
model_path = PATH2ROOT_ABS + "models/iris/iris_landmark.tflite"
|
| 91 |
+
|
| 92 |
+
if is_right_eye:
|
| 93 |
+
eye_frame_low = np.fliplr(eye_frame_low)
|
| 94 |
+
|
| 95 |
+
outputs = tflite_inference(eye_frame_low / 127.5 - 1.0, model_path)
|
| 96 |
+
eye_contours_low = np.reshape(outputs[0], (71, 3))
|
| 97 |
+
iris_landmarks_low = np.reshape(outputs[1], (5, 3))
|
| 98 |
+
|
| 99 |
+
eye_contours = eye_contours_low / side_low
|
| 100 |
+
iris_landmarks = iris_landmarks_low / side_low
|
| 101 |
+
|
| 102 |
+
if is_right_eye:
|
| 103 |
+
eye_contours[:, 0] = 1 - eye_contours[:, 0]
|
| 104 |
+
iris_landmarks[:, 0] = 1 - iris_landmarks[:, 0]
|
| 105 |
+
|
| 106 |
+
return eye_contours, iris_landmarks, eye_frame_low
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def calculate_iris_depth(iris_landmarks, image_size, focal_length_pixel):
|
| 110 |
+
"""
|
| 111 |
+
iris_landmarks should be normalized to the complete image frame
|
| 112 |
+
depth in mm
|
| 113 |
+
"""
|
| 114 |
+
iris_size = calculate_iris_diameter(iris_landmarks, image_size)
|
| 115 |
+
depth = calculate_depth(
|
| 116 |
+
iris_landmarks[0, :], focal_length_pixel, iris_size, image_size
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
return depth, iris_size
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def get_depth(x0, y0, x1, y1):
|
| 123 |
+
return np.sqrt((x0 - x1) ** 2 + (y0 - y1) ** 2)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def get_landmark_depth(ld0, ld1, image_size):
|
| 127 |
+
return get_depth(
|
| 128 |
+
ld0[0] * image_size[0],
|
| 129 |
+
ld0[1] * image_size[1],
|
| 130 |
+
ld1[0] * image_size[0],
|
| 131 |
+
ld1[1] * image_size[1],
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def calculate_iris_diameter(iris_landmarks, image_size):
|
| 136 |
+
dist_vert = get_landmark_depth(
|
| 137 |
+
iris_landmarks[1, :], iris_landmarks[3, :], image_size
|
| 138 |
+
)
|
| 139 |
+
dist_hori = get_landmark_depth(
|
| 140 |
+
iris_landmarks[2, :], iris_landmarks[4, :], image_size
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
return (dist_hori + dist_vert) / 2.0
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def calculate_depth(center_landmark, focal_length_pixel, iris_size, image_size):
|
| 147 |
+
# Average fixed iris size across human beings.
|
| 148 |
+
human_iris_size_in_mm = 11.8
|
| 149 |
+
origin = np.array(image_size) / 2.0
|
| 150 |
+
center_landmark_pixel = center_landmark[:2] * np.array(image_size)
|
| 151 |
+
y = get_depth(
|
| 152 |
+
origin[0], origin[1], center_landmark_pixel[0], center_landmark_pixel[1]
|
| 153 |
+
)
|
| 154 |
+
x = np.sqrt(focal_length_pixel ** 2 + y ** 2)
|
| 155 |
+
depth = human_iris_size_in_mm * x / iris_size
|
| 156 |
+
|
| 157 |
+
return depth
|
codes/calibration.py
ADDED
|
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module is for calibration of the Owleye. The module includes the code to collect data from the user,
|
| 2 |
+
while they are looking at the white point. The molude contains one class called Clb. To understand this module, you should know
|
| 3 |
+
about Mediapipe landmark detection."""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import cv2
|
| 8 |
+
import time
|
| 9 |
+
from codes.base import eyeing as ey
|
| 10 |
+
import os
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
if os.name == "nt":
|
| 13 |
+
import winsound
|
| 14 |
+
elif os.name == "posix":
|
| 15 |
+
pass
|
| 16 |
+
from sklearn.utils import shuffle
|
| 17 |
+
import math
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
INFO = ("", "M", 25, "Email: ") # The information that goes to information.txt
|
| 21 |
+
CALIBRATION_GRID = (4, 200, 6, 100) # Calibration grid
|
| 22 |
+
|
| 23 |
+
# Class for calibration
|
| 24 |
+
class Clb(object):
|
| 25 |
+
running = True
|
| 26 |
+
|
| 27 |
+
@staticmethod
|
| 28 |
+
def create_grid(clb_grid):
|
| 29 |
+
"""
|
| 30 |
+
This method creates the desired grid points.
|
| 31 |
+
|
| 32 |
+
Parameters:
|
| 33 |
+
clb_grid: A list
|
| 34 |
+
|
| 35 |
+
Returns:
|
| 36 |
+
points: A list that contains n lists
|
| 37 |
+
"""
|
| 38 |
+
point_ratio = 0.012
|
| 39 |
+
if len(clb_grid) == 2:
|
| 40 |
+
# For going through just rows
|
| 41 |
+
rows = clb_grid[0]
|
| 42 |
+
points_in_row = clb_grid[1]
|
| 43 |
+
points = []
|
| 44 |
+
dy_rows = (1 - rows * point_ratio) / (rows - 1)
|
| 45 |
+
dx = (1 - points_in_row * point_ratio) / (points_in_row - 1)
|
| 46 |
+
|
| 47 |
+
for j in range(rows):
|
| 48 |
+
if j == 0:
|
| 49 |
+
p_y = j * (point_ratio + dy_rows) + 4.0 * point_ratio / 3.0
|
| 50 |
+
elif j == rows-1:
|
| 51 |
+
p_y = j * (point_ratio + dy_rows) - point_ratio / 3.0
|
| 52 |
+
else:
|
| 53 |
+
p_y = j * (point_ratio + dy_rows) + point_ratio / 2
|
| 54 |
+
smp_in_p = []
|
| 55 |
+
for i in range(points_in_row):
|
| 56 |
+
if i == 0:
|
| 57 |
+
p_x = i * (point_ratio + dx) + point_ratio
|
| 58 |
+
elif i == points_in_row - 1:
|
| 59 |
+
p_x = i * (point_ratio + dx)
|
| 60 |
+
else:
|
| 61 |
+
p_x = i * (point_ratio + dx) + point_ratio / 2
|
| 62 |
+
smp_in_p.append([p_x, p_y])
|
| 63 |
+
if j % 2 == 0:
|
| 64 |
+
points.append(smp_in_p)
|
| 65 |
+
else:
|
| 66 |
+
smp_in_p.reverse()
|
| 67 |
+
points.append(smp_in_p)
|
| 68 |
+
|
| 69 |
+
elif len(clb_grid) == 3:
|
| 70 |
+
# For appearing stationary (not moving)
|
| 71 |
+
rows = clb_grid[0]
|
| 72 |
+
cols = clb_grid[1]
|
| 73 |
+
smp_in_pnt = clb_grid[2]
|
| 74 |
+
points = []
|
| 75 |
+
dy = (1 - rows * point_ratio) / (rows - 1)
|
| 76 |
+
dx = (1 - cols * point_ratio) / (cols - 1)
|
| 77 |
+
|
| 78 |
+
for j in range(rows):
|
| 79 |
+
if j == 0:
|
| 80 |
+
p_y = j * (point_ratio + dy) + 4.0 * point_ratio / 3.0
|
| 81 |
+
elif j == rows - 1:
|
| 82 |
+
p_y = j * (point_ratio + dy) - point_ratio / 3.0
|
| 83 |
+
else:
|
| 84 |
+
p_y = j * (point_ratio + dy) + point_ratio / 2
|
| 85 |
+
for i in range(cols):
|
| 86 |
+
if i == 0:
|
| 87 |
+
p_x = i * (point_ratio + dx) + point_ratio
|
| 88 |
+
elif i == cols - 1:
|
| 89 |
+
p_x = i * (point_ratio + dx)
|
| 90 |
+
else:
|
| 91 |
+
p_x = i * (point_ratio + dx) + point_ratio / 2
|
| 92 |
+
smp_in_p = []
|
| 93 |
+
for k in range(smp_in_pnt):
|
| 94 |
+
smp_in_p.append([p_x, p_y])
|
| 95 |
+
points.append(smp_in_p)
|
| 96 |
+
|
| 97 |
+
elif len(clb_grid) == 4:
|
| 98 |
+
# For going through rows and columns. It is suggested
|
| 99 |
+
rows = clb_grid[0]
|
| 100 |
+
points_in_row = clb_grid[1]
|
| 101 |
+
cols = clb_grid[2]
|
| 102 |
+
points_in_col = clb_grid[3]
|
| 103 |
+
points = []
|
| 104 |
+
|
| 105 |
+
d_rows = (1 - rows * point_ratio) / (rows - 1)
|
| 106 |
+
dx = (1 - points_in_row * point_ratio) / (points_in_row - 1)
|
| 107 |
+
d_cols = (1 - cols * point_ratio) / (cols - 1)
|
| 108 |
+
dy = (1 - points_in_col * point_ratio) / (points_in_col - 1)
|
| 109 |
+
|
| 110 |
+
for j in range(rows):
|
| 111 |
+
if j == 0:
|
| 112 |
+
p_y = j * (point_ratio + d_rows) + 4.0 * point_ratio / 3.0
|
| 113 |
+
elif j == rows - 1:
|
| 114 |
+
p_y = j * (point_ratio + d_rows) - point_ratio / 3.0
|
| 115 |
+
else:
|
| 116 |
+
p_y = j * (point_ratio + d_rows) + point_ratio / 2
|
| 117 |
+
smp_in_p = []
|
| 118 |
+
for i in range(points_in_row):
|
| 119 |
+
if i == 0:
|
| 120 |
+
p_x = i * (point_ratio + dx) + point_ratio
|
| 121 |
+
elif i == points_in_row - 1:
|
| 122 |
+
p_x = i * (point_ratio + dx)
|
| 123 |
+
else:
|
| 124 |
+
p_x = i * (point_ratio + dx) + point_ratio / 2
|
| 125 |
+
smp_in_p.append([p_x, p_y])
|
| 126 |
+
if j % 2 == 0:
|
| 127 |
+
points.append(smp_in_p)
|
| 128 |
+
else:
|
| 129 |
+
smp_in_p.reverse()
|
| 130 |
+
points.append(smp_in_p)
|
| 131 |
+
for i in range(cols):
|
| 132 |
+
if i == 0:
|
| 133 |
+
p_x = i * (point_ratio + d_cols) + point_ratio
|
| 134 |
+
elif i == cols - 1:
|
| 135 |
+
p_x = i * (point_ratio + d_cols)
|
| 136 |
+
else:
|
| 137 |
+
p_x = i * (point_ratio + d_cols) + point_ratio / 2
|
| 138 |
+
smp_in_p = []
|
| 139 |
+
for j in range(points_in_col):
|
| 140 |
+
if j == 0:
|
| 141 |
+
p_y = j * (point_ratio + dy) + 4.0 * point_ratio / 3.0
|
| 142 |
+
elif j == points_in_col - 1:
|
| 143 |
+
p_y = j * (point_ratio + dy) - point_ratio / 3.0
|
| 144 |
+
else:
|
| 145 |
+
p_y = j * (point_ratio + dy) + point_ratio / 2
|
| 146 |
+
smp_in_p.append([p_x, p_y])
|
| 147 |
+
if i % 2 == 0:
|
| 148 |
+
points.append(smp_in_p)
|
| 149 |
+
else:
|
| 150 |
+
smp_in_p.reverse()
|
| 151 |
+
points.append(smp_in_p)
|
| 152 |
+
|
| 153 |
+
else:
|
| 154 |
+
print("\nPlease Enter a vector with length of 2-4!!")
|
| 155 |
+
points = None
|
| 156 |
+
quit()
|
| 157 |
+
|
| 158 |
+
return points
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def et(self, num, camera_id=0, info=INFO, clb_grid=CALIBRATION_GRID):
|
| 162 |
+
"""
|
| 163 |
+
Collecting the data (inputs and outputs of the models)
|
| 164 |
+
|
| 165 |
+
Parameters:
|
| 166 |
+
num: Subject's number
|
| 167 |
+
camera_id: Camera ID
|
| 168 |
+
info: Subject's information
|
| 169 |
+
clb_grid: Calibration grid
|
| 170 |
+
|
| 171 |
+
Retruns:
|
| 172 |
+
None
|
| 173 |
+
|
| 174 |
+
"""
|
| 175 |
+
|
| 176 |
+
print("\nCalibration started!")
|
| 177 |
+
|
| 178 |
+
# Some interactions with user
|
| 179 |
+
name, descriptions = info
|
| 180 |
+
tx0 = [["Follow WHITE point", (0.05, 0.25), 1.5, ey.RED, 3],
|
| 181 |
+
["SPACE --> start", (0.05, 0.5), 1.5, ey.RED, 3],
|
| 182 |
+
["ESC --> Stop", (0.05, 0.75), 1.5, ey.RED, 3]]
|
| 183 |
+
run_app = True
|
| 184 |
+
|
| 185 |
+
sbj_dir = ey.subjects_dir + f"{num}/"
|
| 186 |
+
if os.path.exists(sbj_dir):
|
| 187 |
+
tx1 = [["There is a subject in", (0.05, 0.2), 1.3, ey.RED, 2],
|
| 188 |
+
[f"{sbj_dir}.", (0.05, 0.4), 1.3, ey.RED, 2],
|
| 189 |
+
["Do you want to", (0.05, 0.6), 1.3, ey.RED, 2],
|
| 190 |
+
["remove it (y/n)?", (0.05, 0.8), 1.3, ey.RED, 2]]
|
| 191 |
+
|
| 192 |
+
win_name = "Subject exists"
|
| 193 |
+
ey.big_win(win_name, 0)
|
| 194 |
+
ey.show_clb_win(win_name, texts=tx1, win_color=ey.WHITE)
|
| 195 |
+
button = cv2.waitKey(0)
|
| 196 |
+
if button == 27 or (button == ord("q")) or (button == ord("Q")) or (button == ord("n")) or (button == ord("N")):
|
| 197 |
+
run_app = False
|
| 198 |
+
cv2.destroyWindow(win_name)
|
| 199 |
+
|
| 200 |
+
if run_app:
|
| 201 |
+
sbj_dir = ey.create_dir([sbj_dir])
|
| 202 |
+
clb_points = self.create_grid(clb_grid)
|
| 203 |
+
|
| 204 |
+
# Some landmarks needed for calculation of face vectors
|
| 205 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 206 |
+
|
| 207 |
+
(
|
| 208 |
+
frame_size,
|
| 209 |
+
camera_matrix,
|
| 210 |
+
dst_cof,
|
| 211 |
+
pcf
|
| 212 |
+
) = ey.get_camera_properties(camera_id)
|
| 213 |
+
|
| 214 |
+
face_mesh = ey.get_mesh()
|
| 215 |
+
|
| 216 |
+
fps_vec = []
|
| 217 |
+
t_mat = []
|
| 218 |
+
eyes_mat = []
|
| 219 |
+
inp_scalars_mat = []
|
| 220 |
+
points_loc_mat = []
|
| 221 |
+
eyes_ratio_mat = []
|
| 222 |
+
|
| 223 |
+
t0 = time.perf_counter()
|
| 224 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 225 |
+
ey.pass_frames(cap, 100)
|
| 226 |
+
|
| 227 |
+
win_name = "Information"
|
| 228 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 229 |
+
ey.show_clb_win(win_name, texts=tx0, win_color=ey.WHITE)
|
| 230 |
+
cv2.waitKey(10000)
|
| 231 |
+
cv2.destroyWindow(win_name)
|
| 232 |
+
|
| 233 |
+
# Going through monitors
|
| 234 |
+
for (i_m, m) in enumerate(ey.monitors):
|
| 235 |
+
win_name = f"Calibration-{i_m}"
|
| 236 |
+
ey.big_win(win_name, i_m * m.width)
|
| 237 |
+
# Going to each series of points (for example, one row of points)
|
| 238 |
+
for item in clb_points:
|
| 239 |
+
if not self.running and (i_m != 0):
|
| 240 |
+
break
|
| 241 |
+
t_vec = []
|
| 242 |
+
eyes_vec = []
|
| 243 |
+
inp_scalars_vec = []
|
| 244 |
+
points_loc_vec = []
|
| 245 |
+
eyes_ratio_vec = []
|
| 246 |
+
|
| 247 |
+
pnt = item[0]
|
| 248 |
+
ey.show_clb_win(win_name, pnt, win_color=ey.GRAY)
|
| 249 |
+
button = cv2.waitKey(0)
|
| 250 |
+
if button == 27 or (button == ord("q")) or (button == ord("Q")):
|
| 251 |
+
break
|
| 252 |
+
elif button == ord(' '):
|
| 253 |
+
ey.pass_frames(cap)
|
| 254 |
+
t1 = time.perf_counter()
|
| 255 |
+
s = len(item)
|
| 256 |
+
|
| 257 |
+
# Going through each point in each series
|
| 258 |
+
for pnt in item:
|
| 259 |
+
ey.show_clb_win(win_name, pnt)
|
| 260 |
+
button = cv2.waitKey(1)
|
| 261 |
+
if button == 27:
|
| 262 |
+
break
|
| 263 |
+
while True:
|
| 264 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap) # Get image
|
| 265 |
+
if frame_success:
|
| 266 |
+
results = face_mesh.process(frame_rgb) # Predicting the landmarks using image
|
| 267 |
+
|
| 268 |
+
# Getting the inputs of the models
|
| 269 |
+
(
|
| 270 |
+
features_success,
|
| 271 |
+
_,
|
| 272 |
+
eyes_frame_gray,
|
| 273 |
+
features_vector,
|
| 274 |
+
eyes_ratio,
|
| 275 |
+
_
|
| 276 |
+
) = ey.get_model_inputs(
|
| 277 |
+
frame,
|
| 278 |
+
frame_rgb,
|
| 279 |
+
results,
|
| 280 |
+
camera_matrix,
|
| 281 |
+
pcf,
|
| 282 |
+
frame_size,
|
| 283 |
+
dst_cof,
|
| 284 |
+
some_landmarks_ids
|
| 285 |
+
)
|
| 286 |
+
if features_success:
|
| 287 |
+
# Putting the inputs of the models into lists
|
| 288 |
+
t_vec.append(round(time.perf_counter() - t1, 3))
|
| 289 |
+
eyes_vec.append(eyes_frame_gray)
|
| 290 |
+
inp_scalars_vec.append(features_vector)
|
| 291 |
+
points_loc_vec.append([(pnt[0] + i_m)/len(ey.monitors), pnt[1]])
|
| 292 |
+
eyes_ratio_vec.append(eyes_ratio)
|
| 293 |
+
break
|
| 294 |
+
if not self.running:
|
| 295 |
+
break
|
| 296 |
+
fps_vec.append(ey.get_time(s, t1))
|
| 297 |
+
t_mat.append(np.array(t_vec))
|
| 298 |
+
eyes_mat.append(np.array(eyes_vec))
|
| 299 |
+
inp_scalars_mat.append(np.array(inp_scalars_vec))
|
| 300 |
+
points_loc_mat.append(np.array(points_loc_vec))
|
| 301 |
+
eyes_ratio_mat.append(np.array(eyes_ratio_vec))
|
| 302 |
+
|
| 303 |
+
if not self.running:
|
| 304 |
+
break
|
| 305 |
+
if button == 27 or (button == ord("q")) or (button == ord("Q")):
|
| 306 |
+
break
|
| 307 |
+
if button == 27 or (button == ord("q")) or (button == ord("Q")):
|
| 308 |
+
break
|
| 309 |
+
cv2.destroyWindow(win_name)
|
| 310 |
+
cap.release()
|
| 311 |
+
cv2.destroyAllWindows()
|
| 312 |
+
|
| 313 |
+
if button != 27 and (button != ord("q")) and (button != ord("Q")):
|
| 314 |
+
ey.get_time(0, t0, True)
|
| 315 |
+
print(f"Mean FPS : {np.array(fps_vec).mean()}")
|
| 316 |
+
|
| 317 |
+
f = open(sbj_dir + "Information.txt", "w+")
|
| 318 |
+
f.write(name + "\n" + descriptions + "\n" + str(datetime.now())[:16])
|
| 319 |
+
f.close()
|
| 320 |
+
|
| 321 |
+
et_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 322 |
+
ey.save([t_mat, eyes_mat, inp_scalars_mat, points_loc_mat, eyes_ratio_mat], et_dir, [ey.T, ey.X1, ey.X2, ey.Y, ey.ER])
|
| 323 |
+
|
| 324 |
+
else:
|
| 325 |
+
self.running = False
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
@staticmethod
|
| 329 |
+
def make_io(num, data_out):
|
| 330 |
+
"""
|
| 331 |
+
Mixing the data of calibration and out looking, to create a dataset of in-out
|
| 332 |
+
|
| 333 |
+
Parameters:
|
| 334 |
+
data_out: data of user's looking at outside of the screen
|
| 335 |
+
|
| 336 |
+
Returns:
|
| 337 |
+
None
|
| 338 |
+
"""
|
| 339 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 340 |
+
et_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 341 |
+
|
| 342 |
+
x1_et0, x2_et0 = ey.load(et_dir, [ey.X1, ey.X2])
|
| 343 |
+
x1_et = []
|
| 344 |
+
x2_et = []
|
| 345 |
+
for (x1_vec, x2_vec) in zip(x1_et0, x2_et0):
|
| 346 |
+
for (x10, x20) in zip(x1_vec, x2_vec):
|
| 347 |
+
x1_et.append(x10)
|
| 348 |
+
x2_et.append(x20)
|
| 349 |
+
x1_et = np.array(x1_et)
|
| 350 |
+
x2_et = np.array(x2_et)
|
| 351 |
+
|
| 352 |
+
x1_o, x2_o, y_o = data_out
|
| 353 |
+
smp_in_cls = int(x1_o.shape[0])
|
| 354 |
+
|
| 355 |
+
x1_et_shf, x2_et_shf = shuffle(x1_et, x2_et)
|
| 356 |
+
|
| 357 |
+
x1_i, x2_i = x1_et_shf[:smp_in_cls], x2_et_shf[:smp_in_cls]
|
| 358 |
+
y_i = np.zeros((smp_in_cls,))
|
| 359 |
+
|
| 360 |
+
x1_io = [np.concatenate((x1_i, x1_o))]
|
| 361 |
+
x2_io = [np.concatenate((x2_i, x2_o))]
|
| 362 |
+
y_io = [np.concatenate((y_i, y_o))]
|
| 363 |
+
|
| 364 |
+
io_dir = ey.create_dir([sbj_dir, ey.IO])
|
| 365 |
+
ey.save([x1_io, x2_io, y_io], io_dir, [ey.X1, ey.X2, ey.Y])
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def out(self, num, camera_id=0, n_smp_in_cls=300):
|
| 369 |
+
"""
|
| 370 |
+
Collecting data while the user is looking out of the screen
|
| 371 |
+
|
| 372 |
+
Parameters:
|
| 373 |
+
num: Subject number
|
| 374 |
+
camera_id: Camera ID
|
| 375 |
+
n_smp_in_cls: The number of samples for each class
|
| 376 |
+
|
| 377 |
+
Returns:
|
| 378 |
+
None
|
| 379 |
+
"""
|
| 380 |
+
print("Getting out data...")
|
| 381 |
+
out_class_num = 1
|
| 382 |
+
|
| 383 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 384 |
+
|
| 385 |
+
(
|
| 386 |
+
frame_size,
|
| 387 |
+
camera_matrix,
|
| 388 |
+
dst_cof,
|
| 389 |
+
pcf
|
| 390 |
+
) = ey.get_camera_properties(camera_id)
|
| 391 |
+
|
| 392 |
+
face_mesh = ey.get_mesh()
|
| 393 |
+
|
| 394 |
+
t0 = time.perf_counter()
|
| 395 |
+
eyes_data_gray = []
|
| 396 |
+
vector_inputs = []
|
| 397 |
+
output_class = []
|
| 398 |
+
fps_vec = []
|
| 399 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 400 |
+
ey.pass_frames(cap, 100)
|
| 401 |
+
tx0 = [["Look everywhere ", (0.05, 0.25), 1.3, ey.RED, 3],
|
| 402 |
+
["'out' of screen", (0.05, 0.5), 1.3, ey.RED, 3],
|
| 403 |
+
["SPACE --> start sampling", (0.05, 0.75), 1.3, ey.RED, 3]]
|
| 404 |
+
|
| 405 |
+
win_name = "out of screen"
|
| 406 |
+
ey.big_win(win_name, 0)
|
| 407 |
+
ey.show_clb_win(win_name, texts=tx0, win_color=ey.WHITE)
|
| 408 |
+
button = cv2.waitKey(0)
|
| 409 |
+
if button == 27 or (button == ord("q")) or (button == ord("Q")):
|
| 410 |
+
quit()
|
| 411 |
+
cv2.destroyWindow(win_name)
|
| 412 |
+
i = 0
|
| 413 |
+
ey.pass_frames(cap)
|
| 414 |
+
t1 = time.perf_counter()
|
| 415 |
+
|
| 416 |
+
# Going through frames
|
| 417 |
+
while True:
|
| 418 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 419 |
+
if frame_success:
|
| 420 |
+
# Predicting the face landmarks
|
| 421 |
+
results = face_mesh.process(frame_rgb)
|
| 422 |
+
|
| 423 |
+
# Calculating the face features
|
| 424 |
+
(
|
| 425 |
+
features_success,
|
| 426 |
+
_,
|
| 427 |
+
eyes_frame_gray,
|
| 428 |
+
features_vector,
|
| 429 |
+
_,
|
| 430 |
+
_
|
| 431 |
+
) = ey.get_model_inputs(
|
| 432 |
+
frame,
|
| 433 |
+
frame_rgb,
|
| 434 |
+
results,
|
| 435 |
+
camera_matrix,
|
| 436 |
+
pcf,
|
| 437 |
+
frame_size,
|
| 438 |
+
dst_cof,
|
| 439 |
+
some_landmarks_ids
|
| 440 |
+
)
|
| 441 |
+
if features_success:
|
| 442 |
+
eyes_data_gray.append(eyes_frame_gray)
|
| 443 |
+
vector_inputs.append(features_vector)
|
| 444 |
+
output_class.append(out_class_num)
|
| 445 |
+
|
| 446 |
+
i += 1
|
| 447 |
+
if i == n_smp_in_cls:
|
| 448 |
+
break
|
| 449 |
+
fps_vec.append(ey.get_time(i, t1))
|
| 450 |
+
print("Data collected")
|
| 451 |
+
if os.name == "nt":
|
| 452 |
+
winsound.PlaySound("SystemExit", winsound.SND_ALIAS)
|
| 453 |
+
|
| 454 |
+
cap.release()
|
| 455 |
+
cv2.destroyAllWindows()
|
| 456 |
+
ey.get_time(0, t0, True)
|
| 457 |
+
print(f"Mean FPS : {np.array(fps_vec).mean()}")
|
| 458 |
+
|
| 459 |
+
x1 = np.array(eyes_data_gray)
|
| 460 |
+
x2 = np.array(vector_inputs)
|
| 461 |
+
y = np.array(output_class)
|
| 462 |
+
|
| 463 |
+
print("Data collection finished!")
|
| 464 |
+
|
| 465 |
+
self.make_io(num, [x1, x2, y])
|
| 466 |
+
|
| 467 |
+
def calculate_threshold(self, num, camera_id=0):
|
| 468 |
+
"""
|
| 469 |
+
Calculating the blinking threshold automatically. Here we collect data and tell the user to blink during a certain time.
|
| 470 |
+
Then we gain the maximum value for thier eye movement velocity and it's considered as a blink. So, we tune the threshold
|
| 471 |
+
base don that.
|
| 472 |
+
|
| 473 |
+
Parameters:
|
| 474 |
+
num: subject number
|
| 475 |
+
camera_id: Camera ID
|
| 476 |
+
|
| 477 |
+
Returns:
|
| 478 |
+
None
|
| 479 |
+
"""
|
| 480 |
+
|
| 481 |
+
print("\nGetting eyes ratio...")
|
| 482 |
+
tx0 = [["Look somewhere", (0.02, 0.3), 1.1, ey.RED, 2],
|
| 483 |
+
["SPACE --> start/pause", (0.02, 0.6), 1.1, ey.RED, 2]]
|
| 484 |
+
tx1 = [["Blink", (0.39, 0.5), 1.6, ey.RED, 3]]
|
| 485 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 486 |
+
|
| 487 |
+
(
|
| 488 |
+
frame_size,
|
| 489 |
+
camera_matrix,
|
| 490 |
+
dst_cof,
|
| 491 |
+
pcf
|
| 492 |
+
) = ey.get_camera_properties(camera_id)
|
| 493 |
+
|
| 494 |
+
face_mesh = ey.get_mesh()
|
| 495 |
+
|
| 496 |
+
eyes_ratio_mat = []
|
| 497 |
+
t_mat = []
|
| 498 |
+
t0 = time.perf_counter()
|
| 499 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 500 |
+
ey.pass_frames(cap, 100)
|
| 501 |
+
|
| 502 |
+
# Going through frames, if the user pressed 'SPACE', the program will be paused, if they press 'q', the program will be stopped.
|
| 503 |
+
i = 0
|
| 504 |
+
while self.running:
|
| 505 |
+
win_name = f"Calibration-{i}"
|
| 506 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 507 |
+
|
| 508 |
+
eyes_ratio_vec = []
|
| 509 |
+
t_vec = []
|
| 510 |
+
ey.show_clb_win(win_name, win_color=ey.WHITE, texts=tx0)
|
| 511 |
+
|
| 512 |
+
button = cv2.waitKey(0)
|
| 513 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 514 |
+
break
|
| 515 |
+
elif button == ord(' '):
|
| 516 |
+
ey.pass_frames(cap)
|
| 517 |
+
t1 = time.perf_counter()
|
| 518 |
+
while self.running:
|
| 519 |
+
ey.show_clb_win(win_name, texts=tx1, win_color=ey.GRAY)
|
| 520 |
+
button = cv2.waitKey(1)
|
| 521 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27) or (button == ord(' ')):
|
| 522 |
+
break
|
| 523 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 524 |
+
if frame_success:
|
| 525 |
+
# Predicting the face landmarks
|
| 526 |
+
results = face_mesh.process(frame_rgb)
|
| 527 |
+
|
| 528 |
+
# Calculating the face features
|
| 529 |
+
(
|
| 530 |
+
features_success,
|
| 531 |
+
_,
|
| 532 |
+
_,
|
| 533 |
+
_,
|
| 534 |
+
eyes_ratio,
|
| 535 |
+
_
|
| 536 |
+
) = ey.get_model_inputs(
|
| 537 |
+
frame,
|
| 538 |
+
frame_rgb,
|
| 539 |
+
results,
|
| 540 |
+
camera_matrix,
|
| 541 |
+
pcf,
|
| 542 |
+
frame_size,
|
| 543 |
+
dst_cof,
|
| 544 |
+
some_landmarks_ids,
|
| 545 |
+
False,
|
| 546 |
+
|
| 547 |
+
)
|
| 548 |
+
if features_success:
|
| 549 |
+
t_vec.append(round(time.perf_counter() - t1, 3))
|
| 550 |
+
eyes_ratio_vec.append(eyes_ratio)
|
| 551 |
+
|
| 552 |
+
if not self.running:
|
| 553 |
+
break
|
| 554 |
+
t_mat.append(np.array(t_vec))
|
| 555 |
+
eyes_ratio_mat.append(np.array(eyes_ratio_vec))
|
| 556 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 557 |
+
break
|
| 558 |
+
if not self.running:
|
| 559 |
+
break
|
| 560 |
+
cv2.destroyWindow(win_name)
|
| 561 |
+
cap.release()
|
| 562 |
+
cv2.destroyAllWindows()
|
| 563 |
+
ey.get_time(0, t0, True)
|
| 564 |
+
|
| 565 |
+
eyes_ratio_v_mat = ey.get_blinking(t_mat, eyes_ratio_mat)[0]
|
| 566 |
+
|
| 567 |
+
offered_threshold = ey.DEFAULT_BLINKING_THRESHOLD
|
| 568 |
+
if len(eyes_ratio_v_mat) > 1:
|
| 569 |
+
max_values = []
|
| 570 |
+
for eyes_ratio_v_vec in eyes_ratio_v_mat:
|
| 571 |
+
max_values.append(eyes_ratio_v_vec.max())
|
| 572 |
+
offered_threshold = min(max_values) * 0.99
|
| 573 |
+
else:
|
| 574 |
+
if eyes_ratio_v_mat:
|
| 575 |
+
offered_threshold = eyes_ratio_v_mat[0].max() * 0.6
|
| 576 |
+
print(f"Offered Threshold: {offered_threshold}")
|
| 577 |
+
|
| 578 |
+
er_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.ER])
|
| 579 |
+
|
| 580 |
+
ey.save([t_mat, eyes_ratio_mat, offered_threshold], er_dir, [ey.T, ey.ER, "oth_app"])
|
| 581 |
+
|
| 582 |
+
|
codes/crt_train_models.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This file is for creating and training the neural network models for eye movement prediction. Also, this is for creating and training
|
| 2 |
+
The in-out model which is for predicting whether the subject is looking inside of the screen or outside of the screen. To understand this
|
| 3 |
+
module, you should know about how to build neural network models with keras and tensorflow"""
|
| 4 |
+
|
| 5 |
+
from tensorflow.keras.layers import (Input, Conv2D, Flatten, MaxPooling2D,
|
| 6 |
+
Dense, Concatenate)
|
| 7 |
+
from tensorflow.keras.models import Model
|
| 8 |
+
import numpy as np
|
| 9 |
+
import os
|
| 10 |
+
from tensorflow.keras.callbacks import EarlyStopping
|
| 11 |
+
from tensorflow.keras.models import load_model
|
| 12 |
+
from sklearn.preprocessing import StandardScaler
|
| 13 |
+
from sklearn.utils import shuffle
|
| 14 |
+
from joblib import dump as j_dump
|
| 15 |
+
from joblib import load as j_load
|
| 16 |
+
import random
|
| 17 |
+
from codes.base import eyeing as ey
|
| 18 |
+
from openpyxl import Workbook
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class Modeling():
|
| 22 |
+
@staticmethod
|
| 23 |
+
def create_io():
|
| 24 |
+
"""
|
| 25 |
+
creating in-out model (a CNN model) using tensorflow and keras
|
| 26 |
+
|
| 27 |
+
Parameters:
|
| 28 |
+
None
|
| 29 |
+
|
| 30 |
+
Returns:
|
| 31 |
+
None
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
print("Starting to create an empty in_out model...")
|
| 35 |
+
inp1_shape = (ey.EYE_SIZE[0], ey.EYE_SIZE[1]*2, 1)
|
| 36 |
+
x2_chosen_features = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
|
| 37 |
+
inp2_shape = (len(x2_chosen_features),)
|
| 38 |
+
|
| 39 |
+
inp1 = Input(inp1_shape)
|
| 40 |
+
layer = Conv2D(16, (11, 11), (1, 1), 'same', activation='relu')(inp1)
|
| 41 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 42 |
+
layer = Conv2D(32, (7, 7), (1, 1), 'same', activation='relu')(layer)
|
| 43 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 44 |
+
layer = Conv2D(64, (5, 5), (1, 1), 'same', activation='relu')(layer)
|
| 45 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 46 |
+
layer = Conv2D(128, (3, 3), (1, 1), activation='relu')(layer)
|
| 47 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 48 |
+
layer = Flatten()(layer)
|
| 49 |
+
inp2 = Input(inp2_shape)
|
| 50 |
+
layer = Concatenate()([layer, inp2])
|
| 51 |
+
layer = Dense(256, 'relu')(layer)
|
| 52 |
+
layer = Dense(128, 'relu')(layer)
|
| 53 |
+
layer = Dense(32, 'relu')(layer)
|
| 54 |
+
layer = Dense(8, 'relu')(layer)
|
| 55 |
+
output_layer = Dense(1, "sigmoid")(layer)
|
| 56 |
+
input_layers = [inp1, inp2]
|
| 57 |
+
model = Model(inputs=input_layers, outputs=output_layer)
|
| 58 |
+
model.compile(optimizer="adam", loss="binary_crossentropy", metrics="acc")
|
| 59 |
+
print(model.summary())
|
| 60 |
+
n_weights = np.sum([np.prod(v.get_shape()) for v in model.trainable_weights])
|
| 61 |
+
|
| 62 |
+
mdl_num = ey.find_max_mdl(ey.io_raw_dir) + 1
|
| 63 |
+
info = {"n_weights": n_weights,
|
| 64 |
+
"input1_shape": inp1_shape,
|
| 65 |
+
"input2_shape": inp2_shape,
|
| 66 |
+
"x2_chosen_features": x2_chosen_features}
|
| 67 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 68 |
+
mdl_dir = ey.io_raw_dir + mdl_name + ".h5"
|
| 69 |
+
model.save(mdl_dir)
|
| 70 |
+
ey.save([info], ey.io_raw_dir, [mdl_name])
|
| 71 |
+
print("\nEmpty in_out model created and saved to " + mdl_dir)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@staticmethod
|
| 75 |
+
def create_et():
|
| 76 |
+
"""
|
| 77 |
+
Creating eye tracking model (CNN model) using tensorflow and keras. You can change the structure in following, as you want.
|
| 78 |
+
|
| 79 |
+
Parameters:
|
| 80 |
+
None
|
| 81 |
+
|
| 82 |
+
Returns:
|
| 83 |
+
None
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
print("Starting to create empty eye_tracking models...")
|
| 88 |
+
inp1_shape = (ey.EYE_SIZE[0], ey.EYE_SIZE[1]*2, 1)
|
| 89 |
+
x2_chosen_features = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
|
| 90 |
+
inp2_shape = (len(x2_chosen_features),)
|
| 91 |
+
|
| 92 |
+
inp1 = Input(inp1_shape)
|
| 93 |
+
layer = Conv2D(16, (11, 11), (1, 1), 'same', activation='relu')(inp1)
|
| 94 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 95 |
+
layer = Conv2D(32, (7, 7), (1, 1), 'same', activation='relu')(layer)
|
| 96 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 97 |
+
layer = Conv2D(64, (5, 5), (1, 1), 'same', activation='relu')(layer)
|
| 98 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 99 |
+
layer = Conv2D(128, (3, 3), (1, 1), activation='relu')(layer)
|
| 100 |
+
layer = MaxPooling2D((2, 2), (2, 2))(layer)
|
| 101 |
+
layer = Flatten()(layer)
|
| 102 |
+
inp2 = Input(inp2_shape)
|
| 103 |
+
layer = Concatenate()([layer, inp2])
|
| 104 |
+
layer = Dense(256, 'relu')(layer)
|
| 105 |
+
layer = Dense(128, 'relu')(layer)
|
| 106 |
+
layer = Dense(32, 'relu')(layer)
|
| 107 |
+
layer = Dense(8, 'relu')(layer)
|
| 108 |
+
out = Dense(1, 'linear')(layer)
|
| 109 |
+
input_layers = [inp1, inp2]
|
| 110 |
+
model = Model(inputs=input_layers, outputs=out)
|
| 111 |
+
model.compile(optimizer='adam', loss='mse')
|
| 112 |
+
print(model.summary())
|
| 113 |
+
n_weights = np.sum([np.prod(v.get_shape()) for v in model.trainable_weights])
|
| 114 |
+
|
| 115 |
+
mdl_num = ey.find_max_mdl(ey.et_raw_dir) + 1
|
| 116 |
+
info = {"n_weights": n_weights,
|
| 117 |
+
"input1_shape": inp1_shape,
|
| 118 |
+
"input2_shape": inp2_shape,
|
| 119 |
+
"x2_chosen_features": x2_chosen_features}
|
| 120 |
+
|
| 121 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 122 |
+
mdl_dir = ey.et_raw_dir + mdl_name + ".h5"
|
| 123 |
+
model.save(mdl_dir)
|
| 124 |
+
ey.save([info], ey.et_raw_dir, [mdl_name])
|
| 125 |
+
print("\nEmpty eye_tracking model created and saved to " + mdl_dir)
|
| 126 |
+
|
| 127 |
+
@staticmethod
|
| 128 |
+
def train_io(
|
| 129 |
+
subjects,
|
| 130 |
+
models_list,
|
| 131 |
+
min_max_brightness_ratio=[[0.65, 1.45]],
|
| 132 |
+
r_train_list=[0.85],
|
| 133 |
+
n_epochs_patience=[[160, 10]],
|
| 134 |
+
save_scaler=False,
|
| 135 |
+
show_model=False
|
| 136 |
+
):
|
| 137 |
+
"""
|
| 138 |
+
Training the io models. This method uses the dataset in the io folder of subject's number folder. The parameters should be lists.
|
| 139 |
+
So, you can train each model with several parameters and hyper parameters to see which one works better.
|
| 140 |
+
|
| 141 |
+
Parameters:
|
| 142 |
+
subjects: a list of subject numbers that you want to train the model with them.
|
| 143 |
+
models_list: You can train several models at a same time. So, you can enter a list of model numbers
|
| 144 |
+
min_max_brightness_ratio: To make the models robust to the brightness, the eyes images are multiplies into a number between two considered numbers
|
| 145 |
+
r_train_list: The ratio for train dataset
|
| 146 |
+
n_epochs_patience: The number of epochs and patience to intrupt training
|
| 147 |
+
save_scaler: To save the scaler
|
| 148 |
+
show_model: To show the model
|
| 149 |
+
|
| 150 |
+
Returns:
|
| 151 |
+
None
|
| 152 |
+
"""
|
| 153 |
+
print("Starting to train in_out model...")
|
| 154 |
+
# Loading all subjects
|
| 155 |
+
x1_load = []
|
| 156 |
+
x2_load = []
|
| 157 |
+
y_load = []
|
| 158 |
+
for sbj in subjects:
|
| 159 |
+
data_io_dir = ey.create_dir([ey.subjects_dir, f"{sbj}", ey.IO])
|
| 160 |
+
x1_load0, x2_load0, y_load0 = ey.load(data_io_dir, [ey.X1, ey.X2, ey.Y])
|
| 161 |
+
for (x10, x20, y10) in zip(x1_load0[0], x2_load0[0], y_load0[0]):
|
| 162 |
+
x1_load.append(x10)
|
| 163 |
+
x2_load.append(x20)
|
| 164 |
+
y_load.append(y10)
|
| 165 |
+
|
| 166 |
+
x1_load = np.array(x1_load)
|
| 167 |
+
x2_load = np.array(x2_load)
|
| 168 |
+
y_load = np.array(y_load)
|
| 169 |
+
|
| 170 |
+
n_smp = x1_load.shape[0]
|
| 171 |
+
print(f"\nNumber of samples : {n_smp}")
|
| 172 |
+
|
| 173 |
+
# Going through each brightness in min_max_brightness_ratio list
|
| 174 |
+
j = 1
|
| 175 |
+
for mbr in min_max_brightness_ratio:
|
| 176 |
+
x1_new = x1_load.copy()
|
| 177 |
+
for (i, _) in enumerate(x1_load):
|
| 178 |
+
r = random.uniform(mbr[0], mbr[1])
|
| 179 |
+
x1_new[i] = (x1_new[i] * r).astype(np.uint8)
|
| 180 |
+
|
| 181 |
+
# Going through each model
|
| 182 |
+
for raw_mdl_num in models_list:
|
| 183 |
+
info = ey.load(ey.io_raw_dir, [ey.MDL + f"{raw_mdl_num}"])[0]
|
| 184 |
+
x2_chosen_features = info["x2_chosen_features"]
|
| 185 |
+
x2_new = x2_load[:, x2_chosen_features]
|
| 186 |
+
|
| 187 |
+
x1_shf, x2_shf, y_shf = shuffle(x1_new, x2_new, y_load)
|
| 188 |
+
|
| 189 |
+
x1_scaler = ey.X1_SCALER
|
| 190 |
+
x1 = x1_shf / x1_scaler
|
| 191 |
+
|
| 192 |
+
x2_scaler = StandardScaler()
|
| 193 |
+
x2 = x2_scaler.fit_transform(x2_shf)
|
| 194 |
+
|
| 195 |
+
scalers = [x1_scaler, x2_scaler]
|
| 196 |
+
if save_scaler:
|
| 197 |
+
j_dump(scalers, ey.scalers_dir + f"scl_io_{len(x2_chosen_features)}.bin")
|
| 198 |
+
|
| 199 |
+
# Going through each training ratio in r_train_list
|
| 200 |
+
for rt in r_train_list:
|
| 201 |
+
n_train = int(rt * n_smp)
|
| 202 |
+
x1_train, x2_train = x1[:n_train], x2[:n_train]
|
| 203 |
+
x1_val, x2_val = x1[n_train:], x2[n_train:]
|
| 204 |
+
|
| 205 |
+
y_train = y_shf[:n_train]
|
| 206 |
+
y_val = y_shf[n_train:]
|
| 207 |
+
print("\nTrain and val data shape:")
|
| 208 |
+
print(x1_train.shape, x1_val.shape, x2_train.shape, x2_val.shape,
|
| 209 |
+
y_train.shape, y_val.shape)
|
| 210 |
+
|
| 211 |
+
x_train = [x1_train, x2_train]
|
| 212 |
+
x_val = [x1_val, x2_val]
|
| 213 |
+
|
| 214 |
+
# Going throught each epoch and patience in n_epochs_patience
|
| 215 |
+
for nep in n_epochs_patience:
|
| 216 |
+
# Training the models
|
| 217 |
+
info["min_max_brightness_ratio"] = mbr
|
| 218 |
+
info["r_train"] = rt
|
| 219 |
+
info["n_epochs_patience"] = nep
|
| 220 |
+
cb = EarlyStopping(patience=nep[1], verbose=1, restore_best_weights=True)
|
| 221 |
+
|
| 222 |
+
raw_model_dir = ey.io_raw_dir + ey.MDL + f"{raw_mdl_num}.h5"
|
| 223 |
+
print("\nLoading blink_in_out model from " + raw_model_dir)
|
| 224 |
+
model = load_model(raw_model_dir)
|
| 225 |
+
if show_model:
|
| 226 |
+
print(model.summary())
|
| 227 |
+
|
| 228 |
+
print(f"\n<<<<<<< {j}-model:{raw_mdl_num}-min_max_ratio:{mbr}-r_train:{rt}-epoch_patience:{nep} >>>>>>>>")
|
| 229 |
+
model.fit(x_train,
|
| 230 |
+
y_train,
|
| 231 |
+
validation_data=(x_val, y_val),
|
| 232 |
+
epochs=nep[0],
|
| 233 |
+
callbacks=cb)
|
| 234 |
+
train_loss = model.evaluate(x_train, y_train)
|
| 235 |
+
val_loss = model.evaluate(x_val, y_val)
|
| 236 |
+
|
| 237 |
+
info["train_loss"] = train_loss
|
| 238 |
+
info["val_loss"] = val_loss
|
| 239 |
+
|
| 240 |
+
trained_mdl_num = ey.find_max_mdl(ey.io_trained_dir) + 1
|
| 241 |
+
mdl_name = ey.MDL + f'{trained_mdl_num}'
|
| 242 |
+
ey.save([info], ey.io_trained_dir, [mdl_name])
|
| 243 |
+
mdl_tr_dir = ey.io_trained_dir + mdl_name + ".h5"
|
| 244 |
+
model.save(mdl_tr_dir)
|
| 245 |
+
print("\nSaving in_out model in " + mdl_tr_dir)
|
| 246 |
+
j += 1
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
@staticmethod
|
| 250 |
+
def train_et(
|
| 251 |
+
subjects,
|
| 252 |
+
models_list,
|
| 253 |
+
min_max_brightness_ratio=[[0.65, 1.45]],
|
| 254 |
+
r_train_list=[0.8],
|
| 255 |
+
n_epochs_patience=[[100, 15]],
|
| 256 |
+
shift_samples=None,
|
| 257 |
+
blinking_threshold="d",
|
| 258 |
+
save_scaler=False,
|
| 259 |
+
show_model=False
|
| 260 |
+
):
|
| 261 |
+
"""
|
| 262 |
+
Training the et (base) models. This method uses the dataset in the et folder of subject's number folder. The parameters should be lists.
|
| 263 |
+
So, you can train each model with several parameters and hyper parameters to see which one works better.
|
| 264 |
+
|
| 265 |
+
Parameters:
|
| 266 |
+
subjects: a list of subject numbers that you want to train the model with them.
|
| 267 |
+
models_list: You can train several models at a same time. So, you can enter a list of model numbers
|
| 268 |
+
min_max_brightness_ratio: To make the models robust to the brightness, the eyes images are multiplies into a number between two considered numbers
|
| 269 |
+
r_train_list: The ratio for train dataset
|
| 270 |
+
n_epochs_patience: The number of epochs and patience to intrupt training
|
| 271 |
+
shift_samples: To shift sample if there is a high delay
|
| 272 |
+
blinking_threshold: It can have three types --> d: default, ao: app offered, uo: user offered
|
| 273 |
+
save_scaler: To save the scaler
|
| 274 |
+
show_model: To show the model
|
| 275 |
+
|
| 276 |
+
Returns:
|
| 277 |
+
None
|
| 278 |
+
"""
|
| 279 |
+
print("Starting to train eye_tracking models...")
|
| 280 |
+
|
| 281 |
+
# Loading all subjects
|
| 282 |
+
x1_load = []
|
| 283 |
+
x2_load = []
|
| 284 |
+
y_load = []
|
| 285 |
+
kk = 0
|
| 286 |
+
for sbj in subjects:
|
| 287 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{sbj}"])
|
| 288 |
+
sbj_clb_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 289 |
+
|
| 290 |
+
(
|
| 291 |
+
sbj_x1_load,
|
| 292 |
+
sbj_x2_load,
|
| 293 |
+
sbj_y_load,
|
| 294 |
+
sbj_t_mat,
|
| 295 |
+
sbj_eyes_ratio
|
| 296 |
+
) = ey.load(sbj_clb_dir, [ey.X1, ey.X2, ey.Y, ey.T, ey.ER])
|
| 297 |
+
|
| 298 |
+
# If there is any shifting samples, doing that
|
| 299 |
+
if shift_samples:
|
| 300 |
+
if shift_samples[kk]:
|
| 301 |
+
ii = 0
|
| 302 |
+
for (x11, x21, y1, t1, eyr1) in zip(sbj_x1_load, sbj_x2_load, sbj_y_load, sbj_t_mat, sbj_eyes_ratio):
|
| 303 |
+
sbj_t_mat[ii] = t1[:-shift_samples[kk]]
|
| 304 |
+
sbj_x1_load[ii] = x11[shift_samples[kk]:]
|
| 305 |
+
sbj_x2_load[ii] = x21[shift_samples[kk]:]
|
| 306 |
+
sbj_y_load[ii] = y1[:-shift_samples[kk]]
|
| 307 |
+
sbj_eyes_ratio[ii] = eyr1[shift_samples[kk]:]
|
| 308 |
+
ii += 1
|
| 309 |
+
|
| 310 |
+
kk += 1
|
| 311 |
+
sbj_er_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 312 |
+
|
| 313 |
+
# Removing the samples that are during blinking
|
| 314 |
+
sbj_blinking_threshold = ey.get_threshold(sbj_er_dir, blinking_threshold)
|
| 315 |
+
|
| 316 |
+
sbj_blinking = ey.get_blinking(sbj_t_mat, sbj_eyes_ratio, sbj_blinking_threshold)[1]
|
| 317 |
+
|
| 318 |
+
for (x11, x21, y1, b1) in zip(sbj_x1_load, sbj_x2_load, sbj_y_load, sbj_blinking):
|
| 319 |
+
for (x10, x20, y0, b0) in zip(x11, x21, y1, b1):
|
| 320 |
+
if not b0:
|
| 321 |
+
x1_load.append(x10)
|
| 322 |
+
x2_load.append(x20)
|
| 323 |
+
y_load.append(y0)
|
| 324 |
+
x1_load = np.array(x1_load)
|
| 325 |
+
x2_load = np.array(x2_load)
|
| 326 |
+
y_load = np.array(y_load)
|
| 327 |
+
n_smp = x1_load.shape[0]
|
| 328 |
+
print(f"\nNumber of samples : {n_smp}")
|
| 329 |
+
|
| 330 |
+
# Going through each brightness in min_max_brightness_ratio list
|
| 331 |
+
j = 1
|
| 332 |
+
for mbr in min_max_brightness_ratio:
|
| 333 |
+
x1_new = x1_load.copy()
|
| 334 |
+
for (i, _) in enumerate(x1_load):
|
| 335 |
+
r = random.uniform(mbr[0], mbr[1])
|
| 336 |
+
x1_new[i] = (x1_new[i] * r).astype(np.uint8)
|
| 337 |
+
|
| 338 |
+
# Going through each model
|
| 339 |
+
for raw_mdl_num in models_list:
|
| 340 |
+
info = ey.load(ey.et_raw_dir, [ey.MDL + f"{raw_mdl_num}"])[0]
|
| 341 |
+
x2_chosen_features = info["x2_chosen_features"]
|
| 342 |
+
x2_new = x2_load[:, x2_chosen_features]
|
| 343 |
+
|
| 344 |
+
x1_shf, x2_shf, y_hrz_shf, y_vrt_shf = shuffle(x1_new, x2_new, y_load[:, 0], y_load[:, 1])
|
| 345 |
+
|
| 346 |
+
x1_scaler = ey.X1_SCALER
|
| 347 |
+
x1 = x1_shf / x1_scaler
|
| 348 |
+
|
| 349 |
+
x2_scaler = StandardScaler()
|
| 350 |
+
x2 = x2_scaler.fit_transform(x2_shf)
|
| 351 |
+
y_scaler = ey.Y_SCALER
|
| 352 |
+
|
| 353 |
+
scalers = [x1_scaler, x2_scaler, y_scaler]
|
| 354 |
+
|
| 355 |
+
if save_scaler:
|
| 356 |
+
j_dump(scalers, ey.scalers_dir + f"scl_et_{len(x2_chosen_features)}.bin")
|
| 357 |
+
|
| 358 |
+
# Going through each training ratio in r_train_list
|
| 359 |
+
for rt in r_train_list:
|
| 360 |
+
n_train = int(rt * n_smp)
|
| 361 |
+
x1_train, x2_train = x1[:n_train], x2[:n_train]
|
| 362 |
+
x1_val, x2_val = x1[n_train:], x2[n_train:]
|
| 363 |
+
|
| 364 |
+
y_hrz_train, y_vrt_train = y_hrz_shf[:n_train], y_vrt_shf[:n_train]
|
| 365 |
+
y_hrz_val, y_vrt_val = y_hrz_shf[n_train:], y_vrt_shf[n_train:]
|
| 366 |
+
print("\nTrain and val data shape:")
|
| 367 |
+
print(x1_train.shape, x1_val.shape, x2_train.shape, x2_val.shape,
|
| 368 |
+
y_hrz_train.shape, y_hrz_val.shape, y_vrt_train.shape, y_vrt_val.shape)
|
| 369 |
+
|
| 370 |
+
x_train = [x1_train, x2_train]
|
| 371 |
+
x_val = [x1_val, x2_val]
|
| 372 |
+
|
| 373 |
+
# Going throught each epoch and patience in n_epochs_patience
|
| 374 |
+
for nep in n_epochs_patience:
|
| 375 |
+
# Training the models
|
| 376 |
+
info["min_max_brightness_ratio"] = mbr
|
| 377 |
+
info["r_train"] = rt
|
| 378 |
+
info["n_epochs_patience"] = nep
|
| 379 |
+
cb = EarlyStopping(patience=nep[1], verbose=1, restore_best_weights=True)
|
| 380 |
+
|
| 381 |
+
raw_model_dir = ey.et_raw_dir + ey.MDL + f"{raw_mdl_num}.h5"
|
| 382 |
+
print("\nLoading eye_tracking model from " + raw_model_dir)
|
| 383 |
+
model_hrz = load_model(raw_model_dir)
|
| 384 |
+
model_vrt = load_model(raw_model_dir)
|
| 385 |
+
if show_model:
|
| 386 |
+
print(model_hrz.summary())
|
| 387 |
+
|
| 388 |
+
trained_mdl_num = ey.find_max_mdl(ey.et_trained_dir, b=-7) + 1
|
| 389 |
+
|
| 390 |
+
print(f"\n<<<<<<< {j}-model-hrz:{raw_mdl_num}-min_max_ratio:{mbr}-r_train:{rt}-epoch_patience:{nep} >>>>>>>>")
|
| 391 |
+
model_hrz.fit(x_train,
|
| 392 |
+
y_hrz_train * y_scaler,
|
| 393 |
+
validation_data=(x_val, y_hrz_val * y_scaler),
|
| 394 |
+
epochs=nep[0],
|
| 395 |
+
callbacks=cb)
|
| 396 |
+
mdl_name = ey.MDL + f"{trained_mdl_num}"
|
| 397 |
+
mdl_hrz_tr_dir = ey.et_trained_dir + mdl_name + "-hrz.h5"
|
| 398 |
+
print("\nSaving horizontally eye_tracking model in " + mdl_hrz_tr_dir)
|
| 399 |
+
model_hrz.save(mdl_hrz_tr_dir)
|
| 400 |
+
hrz_train_loss = model_hrz.evaluate(x_train, y_hrz_train * y_scaler)
|
| 401 |
+
hrz_val_loss = model_hrz.evaluate(x_val, y_hrz_val * y_scaler)
|
| 402 |
+
info["hrz_train_loss"] = hrz_train_loss
|
| 403 |
+
info["hrz_val_loss"] = hrz_val_loss
|
| 404 |
+
|
| 405 |
+
print(f"\n<<<<<<< {j}-model-vrt:{raw_mdl_num}-min_max_ratio:{mbr}-r_train:{rt}-epoch_patience:{nep} >>>>>>>>")
|
| 406 |
+
model_vrt.fit(x_train,
|
| 407 |
+
y_vrt_train * y_scaler,
|
| 408 |
+
validation_data=(x_val, y_vrt_val * y_scaler),
|
| 409 |
+
epochs=nep[0],
|
| 410 |
+
callbacks=cb)
|
| 411 |
+
tr_model_vrt_dir = ey.et_trained_dir + mdl_name + f"-vrt.h5"
|
| 412 |
+
print("Saving vertically eye_tracking model in " + tr_model_vrt_dir)
|
| 413 |
+
model_vrt.save(tr_model_vrt_dir)
|
| 414 |
+
vrt_train_loss = model_vrt.evaluate(x_train, y_vrt_train * y_scaler)
|
| 415 |
+
vrt_val_loss = model_vrt.evaluate(x_val, y_vrt_val * y_scaler)
|
| 416 |
+
info["vrt_train_loss"] = vrt_train_loss
|
| 417 |
+
info["vrt_val_loss"] = vrt_val_loss
|
| 418 |
+
|
| 419 |
+
ey.save([info], ey.et_trained_dir, [mdl_name])
|
| 420 |
+
|
| 421 |
+
j += 1
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
@staticmethod
|
| 425 |
+
def get_models_information(io=True, raw=True, show_model=False):
|
| 426 |
+
"""
|
| 427 |
+
To write the models information in an excel file. It gets the information from attached pickle file for each model.
|
| 428 |
+
There are raw models and trained models in the io and the et.
|
| 429 |
+
|
| 430 |
+
Parameters:
|
| 431 |
+
io: If it's io or et
|
| 432 |
+
raw: If the model is trained or not
|
| 433 |
+
show_model: If you want to show the model
|
| 434 |
+
|
| 435 |
+
Returns:
|
| 436 |
+
None
|
| 437 |
+
"""
|
| 438 |
+
wb = Workbook()
|
| 439 |
+
ws = wb.active
|
| 440 |
+
ws['A1'] = "# of model"
|
| 441 |
+
ws['B1'] = "# of weights"
|
| 442 |
+
ws['C1'] = "input 1 shape"
|
| 443 |
+
ws['D1'] = "input 2 shape"
|
| 444 |
+
ws['E1'] = "x2 chosen features"
|
| 445 |
+
if io:
|
| 446 |
+
if raw:
|
| 447 |
+
files_name = os.listdir(ey.io_raw_dir)
|
| 448 |
+
if files_name:
|
| 449 |
+
for fn in files_name:
|
| 450 |
+
if fn[-7:] == ".pickle":
|
| 451 |
+
mdl_num = int(fn[3:-7])
|
| 452 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 453 |
+
if show_model:
|
| 454 |
+
mdl_dir = ey.io_raw_dir + mdl_name + ".h5"
|
| 455 |
+
mdl = load_model(mdl_dir)
|
| 456 |
+
print(f"<<<<<<<<<<<<<< {mdl_dir} >>>>>>>>>>>>>>")
|
| 457 |
+
print(mdl.summary())
|
| 458 |
+
info = ey.load(ey.io_raw_dir, [mdl_name])[0]
|
| 459 |
+
|
| 460 |
+
ws[f'A{mdl_num+1}'] = str(mdl_num)
|
| 461 |
+
ws[f'B{mdl_num+1}'] = str(info['n_weights'])
|
| 462 |
+
ws[f'C{mdl_num+1}'] = str(info['input1_shape'])
|
| 463 |
+
ws[f'D{mdl_num+1}'] = str(info['input2_shape'])
|
| 464 |
+
ws[f'E{mdl_num+1}'] = str(info['x2_chosen_features'])
|
| 465 |
+
else:
|
| 466 |
+
ws['F1'] = "min-Max brightness ratio"
|
| 467 |
+
ws['G1'] = "r_train"
|
| 468 |
+
ws['H1'] = "# of epochs and patience"
|
| 469 |
+
ws['I1'] = "train loss"
|
| 470 |
+
ws['J1'] = "val loss"
|
| 471 |
+
|
| 472 |
+
files_name = os.listdir(ey.io_trained_dir)
|
| 473 |
+
if files_name:
|
| 474 |
+
for fn in files_name:
|
| 475 |
+
if fn[-7:] == ".pickle":
|
| 476 |
+
mdl_num = int(fn[3:-7])
|
| 477 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 478 |
+
if show_model:
|
| 479 |
+
mdl_dir = ey.io_trained_dir + mdl_name + ".h5"
|
| 480 |
+
mdl = load_model(mdl_dir)
|
| 481 |
+
print(f"<<<<<<<<<<<<<< {mdl_dir} >>>>>>>>>>>>>>")
|
| 482 |
+
print(mdl.summary())
|
| 483 |
+
info = ey.load(ey.io_trained_dir, [mdl_name])[0]
|
| 484 |
+
|
| 485 |
+
ws[f'A{mdl_num+1}'] = str(mdl_num)
|
| 486 |
+
ws[f'B{mdl_num+1}'] = str(info['n_weights'])
|
| 487 |
+
ws[f'C{mdl_num+1}'] = str(info['input1_shape'])
|
| 488 |
+
ws[f'D{mdl_num+1}'] = str(info['input2_shape'])
|
| 489 |
+
ws[f'E{mdl_num+1}'] = str(info['x2_chosen_features'])
|
| 490 |
+
ws[f'F{mdl_num+1}'] = str(info['min_max_brightness_ratio'])
|
| 491 |
+
ws[f'G{mdl_num+1}'] = str(info['r_train'])
|
| 492 |
+
ws[f'H{mdl_num+1}'] = str(info['n_epochs_patience'])
|
| 493 |
+
ws[f'I{mdl_num+1}'] = str(info['train_loss'])
|
| 494 |
+
ws[f'J{mdl_num+1}'] = str(info['val_loss'])
|
| 495 |
+
|
| 496 |
+
else:
|
| 497 |
+
if raw:
|
| 498 |
+
files_name = os.listdir(ey.et_raw_dir)
|
| 499 |
+
if files_name:
|
| 500 |
+
for fn in files_name:
|
| 501 |
+
if fn[-7:] == ".pickle":
|
| 502 |
+
mdl_num = int(fn[3:-7])
|
| 503 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 504 |
+
if show_model:
|
| 505 |
+
mdl_dir = ey.et_raw_dir + mdl_name + ".h5"
|
| 506 |
+
mdl = load_model(mdl_dir)
|
| 507 |
+
print(f"<<<<<<<<<<<<<< {mdl_dir} >>>>>>>>>>>>>>")
|
| 508 |
+
print(mdl.summary())
|
| 509 |
+
info = ey.load(ey.et_raw_dir, [mdl_name])[0]
|
| 510 |
+
|
| 511 |
+
ws[f'A{mdl_num+1}'] = str(mdl_num)
|
| 512 |
+
ws[f'B{mdl_num+1}'] = str(info['n_weights'])
|
| 513 |
+
ws[f'C{mdl_num+1}'] = str(info['input1_shape'])
|
| 514 |
+
ws[f'D{mdl_num+1}'] = str(info['input2_shape'])
|
| 515 |
+
ws[f'E{mdl_num+1}'] = str(info['x2_chosen_features'])
|
| 516 |
+
|
| 517 |
+
else:
|
| 518 |
+
ws['F1'] = "min-Max brightness ratio"
|
| 519 |
+
ws['G1'] = "r_train"
|
| 520 |
+
ws['H1'] = "# of epochs and patience"
|
| 521 |
+
ws['I1'] = "model-hrz train loss"
|
| 522 |
+
ws['J1'] = "model-hrz val loss"
|
| 523 |
+
ws['K1'] = "model-vrt train loss"
|
| 524 |
+
ws['L1'] = "model-vrt val loss"
|
| 525 |
+
|
| 526 |
+
files_name = os.listdir(ey.et_trained_dir)
|
| 527 |
+
if files_name:
|
| 528 |
+
for fn in files_name:
|
| 529 |
+
if fn[-7:] == ".pickle":
|
| 530 |
+
mdl_num = int(fn[3:-7])
|
| 531 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 532 |
+
if show_model:
|
| 533 |
+
mdl_dir = ey.et_trained_dir + mdl_name + "-hrz.h5"
|
| 534 |
+
mdl = load_model(mdl_dir)
|
| 535 |
+
print(f"<<<<<<<<<<<<<< {mdl_dir} >>>>>>>>>>>>>>")
|
| 536 |
+
print(mdl.summary())
|
| 537 |
+
info = ey.load(ey.et_trained_dir, [mdl_name])[0]
|
| 538 |
+
|
| 539 |
+
ws[f'A{mdl_num+1}'] = str(mdl_num)
|
| 540 |
+
ws[f'B{mdl_num+1}'] = str(info['n_weights'])
|
| 541 |
+
ws[f'C{mdl_num+1}'] = str(info['input1_shape'])
|
| 542 |
+
ws[f'D{mdl_num+1}'] = str(info['input2_shape'])
|
| 543 |
+
ws[f'E{mdl_num+1}'] = str(info['x2_chosen_features'])
|
| 544 |
+
ws[f'F{mdl_num+1}'] = str(info['min_max_brightness_ratio'])
|
| 545 |
+
ws[f'G{mdl_num+1}'] = str(info['r_train'])
|
| 546 |
+
ws[f'H{mdl_num+1}'] = str(info['n_epochs_patience'])
|
| 547 |
+
ws[f'I{mdl_num+1}'] = str(info['hrz_train_loss'])
|
| 548 |
+
ws[f'J{mdl_num+1}'] = str(info['hrz_val_loss'])
|
| 549 |
+
ws[f'K{mdl_num+1}'] = str(info['vrt_train_loss'])
|
| 550 |
+
ws[f'L{mdl_num+1}'] = str(info['vrt_val_loss'])
|
| 551 |
+
|
| 552 |
+
if io and raw:
|
| 553 |
+
info_name = "info_io_raw"
|
| 554 |
+
elif io and not raw:
|
| 555 |
+
info_name = "info_io_trained"
|
| 556 |
+
elif not io and raw:
|
| 557 |
+
info_name = "info_et_raw"
|
| 558 |
+
else:
|
| 559 |
+
info_name = "info_et_trained"
|
| 560 |
+
|
| 561 |
+
wb.save(ey.files_dir + info_name + ".xlsx")
|
codes/eye_track.py
ADDED
|
@@ -0,0 +1,701 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module contains the EyeTrack method. In this method, the eye movements will be predicted using the inputs and retrained models.
|
| 2 |
+
Also the fixations will be calculated."""
|
| 3 |
+
|
| 4 |
+
from tensorflow.keras.models import load_model
|
| 5 |
+
import numpy as np
|
| 6 |
+
from joblib import load as j_load
|
| 7 |
+
from codes.base import eyeing as ey
|
| 8 |
+
from scipy import signal
|
| 9 |
+
from openpyxl import Workbook, load_workbook
|
| 10 |
+
import os
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class EyeTrack(object):
|
| 14 |
+
@staticmethod
|
| 15 |
+
def get_pixels(
|
| 16 |
+
subjects,
|
| 17 |
+
models_list=[1],
|
| 18 |
+
target_fol=ey.SMP,
|
| 19 |
+
shift_samples=None,
|
| 20 |
+
blinking_threshold="uo",
|
| 21 |
+
use_io=False,
|
| 22 |
+
delete_files=False
|
| 23 |
+
):
|
| 24 |
+
"""
|
| 25 |
+
Predicting the eye movements using the inputs (eyes images and face vectors). This can be done on sampling (smp) data,
|
| 26 |
+
tsting (acc) data or latency (ltn) data. You can predict outputs for several subjects and with several models, to exactly know
|
| 27 |
+
which one is working better. In this method, the samples that are during blinking, will be deleted. The values for eye movements
|
| 28 |
+
are between 0 and 1. It means it is independant to the size of screen.
|
| 29 |
+
|
| 30 |
+
Parameters:
|
| 31 |
+
subjects: list of subjects that we want to predict their eye viewpoints
|
| 32 |
+
models_list: list of models that we want to use them to predict the eye viewpoints
|
| 33 |
+
target_fol: the targeted folder that we want to predict its data. smp, acc, or ltn
|
| 34 |
+
shift_smaples: whether or not shift the inputs
|
| 35 |
+
blinking_threshold: blinking threshold. It can takes "d" as default, "uo" as user offered and "ao" as app offered.
|
| 36 |
+
use_io: whether or not use the io model
|
| 37 |
+
delete_files: whethere or not remove the inputs after prediction. Because of size of the saved images
|
| 38 |
+
"""
|
| 39 |
+
tfn = 1 # For sampling data
|
| 40 |
+
if target_fol == ey.ACC:
|
| 41 |
+
tfn = 2 # For testing data
|
| 42 |
+
elif target_fol == ey.LTN:
|
| 43 |
+
tfn = 3 # For latency data
|
| 44 |
+
|
| 45 |
+
out_threshold_min = 0.005
|
| 46 |
+
out_threshold_max = 0.995
|
| 47 |
+
latency_radius = 0.33
|
| 48 |
+
median_filter_window_size = 5
|
| 49 |
+
|
| 50 |
+
x1_scaler_et, x2_scaler_et, y_scaler = j_load(ey.scalers_dir + "scalers_et_main.bin")
|
| 51 |
+
if tfn == 1:
|
| 52 |
+
x1_scaler_io, x2_scaler_io = j_load(ey.scalers_dir + "scalers_io_main.bin")
|
| 53 |
+
mdl_io = load_model(ey.io_trained_dir + ey.MDL + "1.h5")
|
| 54 |
+
|
| 55 |
+
# Going through each subject's folder to predict their eye movement
|
| 56 |
+
kk = 0
|
| 57 |
+
for num in subjects:
|
| 58 |
+
print(f"<<<<<<<<<<<<<<<<<<<<< Subject {num} >>>>>>>>>>>>>>>>>>>>>>>")
|
| 59 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 60 |
+
sbj_models_dir = ey.create_dir([sbj_dir, ey.MDL])
|
| 61 |
+
target_dir = ey.create_dir([sbj_dir, target_fol])
|
| 62 |
+
|
| 63 |
+
# Loading the data and shifting the inputs if it's needed
|
| 64 |
+
if ey.file_existing(target_dir, ey.X1+".pickle"):
|
| 65 |
+
if tfn == 1:
|
| 66 |
+
t_load, sys_time_load, x1_load, x2_load, eyes_ratio = ey.load(target_dir, [ey.T, "sys_time", ey.X1, ey.X2, ey.ER])
|
| 67 |
+
if shift_samples:
|
| 68 |
+
if shift_samples[kk]:
|
| 69 |
+
ii = 0
|
| 70 |
+
for (x11, x21, t1, st1, eyr1) in zip(x1_load, x2_load, t_load, sys_time_load, eyes_ratio):
|
| 71 |
+
t_load[ii] = t1[:-shift_samples[kk]]
|
| 72 |
+
sys_time_load[ii] = st1[:-shift_samples[kk]]
|
| 73 |
+
x1_load[ii] = x11[shift_samples[kk]:]
|
| 74 |
+
x2_load[ii] = x21[shift_samples[kk]:]
|
| 75 |
+
eyes_ratio[ii] = eyr1[shift_samples[kk]:]
|
| 76 |
+
ii += 1
|
| 77 |
+
elif tfn == 2:
|
| 78 |
+
t_load, x1_load, x2_load, y_load, eyes_ratio = ey.load(target_dir, [ey.T, ey.X1, ey.X2, ey.Y, ey.ER])
|
| 79 |
+
if shift_samples:
|
| 80 |
+
if shift_samples[kk]:
|
| 81 |
+
ii = 0
|
| 82 |
+
for (x11, x21, y1, t1, eyr1) in zip(x1_load, x2_load, y_load, t_load, eyes_ratio):
|
| 83 |
+
t_load[ii] = t1[:-shift_samples[kk]]
|
| 84 |
+
x1_load[ii] = x11[shift_samples[kk]:]
|
| 85 |
+
x2_load[ii] = x21[shift_samples[kk]:]
|
| 86 |
+
y_load[ii] = y1[:-shift_samples[kk]]
|
| 87 |
+
eyes_ratio[ii] = eyr1[shift_samples[kk]:]
|
| 88 |
+
ii += 1
|
| 89 |
+
else:
|
| 90 |
+
t_load, x1_load, x2_load = ey.load(target_dir, [ey.T, ey.X1, ey.X2])
|
| 91 |
+
if shift_samples:
|
| 92 |
+
if shift_samples[kk]:
|
| 93 |
+
ii = 0
|
| 94 |
+
for (x11, x21, t1) in zip(x1_load, x2_load, t_load):
|
| 95 |
+
t_load[ii] = t1[:-shift_samples[kk]]
|
| 96 |
+
x1_load[ii] = x11[shift_samples[kk]:]
|
| 97 |
+
x2_load[ii] = x21[shift_samples[kk]:]
|
| 98 |
+
ii += 1
|
| 99 |
+
kk += 1
|
| 100 |
+
|
| 101 |
+
# Going through each model to predict the output
|
| 102 |
+
for mdl_num in models_list:
|
| 103 |
+
mdl_et_name = ey.MDL + f"{mdl_num}"
|
| 104 |
+
mdl_et_hrz_dir = sbj_models_dir + mdl_et_name + "-hrz.h5"
|
| 105 |
+
mdl_et_vrt_dir = sbj_models_dir + mdl_et_name + "-vrt.h5"
|
| 106 |
+
if ey.file_existing(sbj_models_dir, mdl_et_name + "-hrz.h5"):
|
| 107 |
+
info = ey.load(sbj_models_dir, [mdl_et_name])[0]
|
| 108 |
+
x2_chosen_features = info["x2_chosen_features"]
|
| 109 |
+
mdl_et_hrz = load_model(mdl_et_hrz_dir)
|
| 110 |
+
mdl_et_vrt = load_model(mdl_et_vrt_dir)
|
| 111 |
+
|
| 112 |
+
# x1_load and x2_load are lists of lists. So, we should predict each list
|
| 113 |
+
y_prd = []
|
| 114 |
+
for (x11, x21) in zip(x1_load, x2_load):
|
| 115 |
+
n_smp_vec = x11.shape[0]
|
| 116 |
+
x21_new = x21[:, x2_chosen_features]
|
| 117 |
+
x11_nrm = x11 / x1_scaler_et
|
| 118 |
+
x21_nrm = x2_scaler_et.transform(x21_new)
|
| 119 |
+
x0_nrm = [x11_nrm, x21_nrm]
|
| 120 |
+
|
| 121 |
+
y_hrz_prd = np.expand_dims(mdl_et_hrz.predict(x0_nrm).reshape((n_smp_vec,)), 1) / y_scaler
|
| 122 |
+
y_vrt_prd = np.expand_dims(mdl_et_vrt.predict(x0_nrm).reshape((n_smp_vec,)), 1) / y_scaler
|
| 123 |
+
|
| 124 |
+
y_prd.append(np.concatenate([y_hrz_prd, y_vrt_prd], 1))
|
| 125 |
+
|
| 126 |
+
# For calculation of latency, it's just needed to see if the subject is looking in left or right, not exact location
|
| 127 |
+
if tfn == 3:
|
| 128 |
+
t_delay = []
|
| 129 |
+
j = 0
|
| 130 |
+
for (t1, y1_prd) in zip(t_load, y_prd):
|
| 131 |
+
for (t0, y0_prd) in zip(t1, y1_prd):
|
| 132 |
+
if j % 2 == 0:
|
| 133 |
+
d = y0_prd[0] - 0.66
|
| 134 |
+
else:
|
| 135 |
+
d = 0.33 - y0_prd[0]
|
| 136 |
+
if 0 < d < latency_radius:
|
| 137 |
+
t_delay.append(t0 - t1[0])
|
| 138 |
+
break
|
| 139 |
+
j += 1
|
| 140 |
+
print(t_delay)
|
| 141 |
+
t_delay = np.array(t_delay).mean() - ey.LATENCY_WAITING_TIME/1000.0
|
| 142 |
+
print(t_delay)
|
| 143 |
+
ey.save([t_delay], target_dir, ["t_delay"])
|
| 144 |
+
|
| 145 |
+
else:
|
| 146 |
+
# predict the samples that are looking outside of the screen
|
| 147 |
+
y_in = y_prd.copy()
|
| 148 |
+
if (tfn == 1) and use_io:
|
| 149 |
+
for (x11, x21, yi1) in zip(x1_load, x2_load, y_in):
|
| 150 |
+
x1_io = x11 / x1_scaler_io
|
| 151 |
+
x2_io = x2_scaler_io.transform(x21)
|
| 152 |
+
y_io_prd = mdl_io.predict([x1_io, x2_io]).round()
|
| 153 |
+
|
| 154 |
+
for (et0, yio) in zip(yi1, y_io_prd):
|
| 155 |
+
if yio == 1:
|
| 156 |
+
et0[0] = -1
|
| 157 |
+
et0[1] = -1
|
| 158 |
+
|
| 159 |
+
er_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 160 |
+
|
| 161 |
+
# Removing the samples that are during blinking
|
| 162 |
+
blinking_threshold = ey.get_threshold(er_dir, blinking_threshold)
|
| 163 |
+
blinking = ey.get_blinking(t_load, eyes_ratio, blinking_threshold)[1]
|
| 164 |
+
for (yi1, bl1) in zip(y_in, blinking):
|
| 165 |
+
for (yi0, bl0) in zip(yi1, bl1):
|
| 166 |
+
if bl0:
|
| 167 |
+
yi0[0] = -1
|
| 168 |
+
yi0[1] = -1
|
| 169 |
+
|
| 170 |
+
"""Putting the values that are consecuitive and are looking inside the screen and they are not blink,
|
| 171 |
+
into one list"""
|
| 172 |
+
y_prd_mat = []
|
| 173 |
+
for yi1 in y_in:
|
| 174 |
+
blinking_out = (yi1[:, 0] == -1)
|
| 175 |
+
n_smp = yi1.shape[0]
|
| 176 |
+
i = 0
|
| 177 |
+
while i < (n_smp):
|
| 178 |
+
bo_vec = []
|
| 179 |
+
in_vec = []
|
| 180 |
+
now = blinking_out[i]
|
| 181 |
+
if now:
|
| 182 |
+
bo_vec.append(yi1[i])
|
| 183 |
+
else:
|
| 184 |
+
in_vec.append(yi1[i])
|
| 185 |
+
j = 1
|
| 186 |
+
if (i+j) < n_smp:
|
| 187 |
+
while blinking_out[i+j] == now:
|
| 188 |
+
if now:
|
| 189 |
+
bo_vec.append(yi1[i+j])
|
| 190 |
+
else:
|
| 191 |
+
in_vec.append(yi1[i+j])
|
| 192 |
+
j += 1
|
| 193 |
+
if (i+j) >= n_smp:
|
| 194 |
+
break
|
| 195 |
+
if now:
|
| 196 |
+
y_prd_mat.append(np.array(bo_vec))
|
| 197 |
+
else:
|
| 198 |
+
y_prd_mat.append(np.array(in_vec))
|
| 199 |
+
i += j
|
| 200 |
+
|
| 201 |
+
# Implementing median filter to the predicted values
|
| 202 |
+
for y_prd_vec in y_prd_mat:
|
| 203 |
+
if y_prd_vec[0, 0] != -1:
|
| 204 |
+
if 3 < y_prd_vec.shape[0] < (median_filter_window_size+2):
|
| 205 |
+
y_prd_vec[:, 0] = signal.medfilt(y_prd_vec[:, 0], 3)
|
| 206 |
+
y_prd_vec[:, 1] = signal.medfilt(y_prd_vec[:, 1], 3)
|
| 207 |
+
elif y_prd_vec.shape[0] >= (median_filter_window_size+2):
|
| 208 |
+
y_prd_vec[:, 0] = signal.medfilt(y_prd_vec[:, 0], median_filter_window_size)
|
| 209 |
+
y_prd_vec[:, 1] = signal.medfilt(y_prd_vec[:, 1], median_filter_window_size)
|
| 210 |
+
|
| 211 |
+
# Concatenating y
|
| 212 |
+
y_prd_fnl = y_prd_mat[0]
|
| 213 |
+
for (i, y_prd_vec) in enumerate(y_prd_mat):
|
| 214 |
+
if i == 0:
|
| 215 |
+
continue
|
| 216 |
+
y_prd_fnl = np.concatenate([y_prd_fnl, y_prd_vec], 0)
|
| 217 |
+
|
| 218 |
+
# Saving the data
|
| 219 |
+
if tfn == 1:
|
| 220 |
+
t = []
|
| 221 |
+
sys_time = []
|
| 222 |
+
for (t1, st1) in zip(t_load, sys_time_load):
|
| 223 |
+
for (t0, st0) in zip(t1, st1):
|
| 224 |
+
t.append(t0)
|
| 225 |
+
sys_time.append(st0)
|
| 226 |
+
t = np.array(t)
|
| 227 |
+
wb = Workbook()
|
| 228 |
+
ws = wb.active
|
| 229 |
+
ws['A1'] = "Time"
|
| 230 |
+
ws['A2'] = "sec"
|
| 231 |
+
ws['B1'] = "SystemTime"
|
| 232 |
+
ws['C1'] = "EyeTrack"
|
| 233 |
+
ws['C2'] = "(p_x/scr_w,p_y/scr_h)"
|
| 234 |
+
ws['D1'] = "Condition"
|
| 235 |
+
ws['D2'] = "{start,stop}"
|
| 236 |
+
ws['D3'] = "start"
|
| 237 |
+
for i in range(y_prd_fnl.shape[0]):
|
| 238 |
+
ws[f'A{i+3}'] = f"{t[i]}"
|
| 239 |
+
ws[f'B{i+3}'] = sys_time[i]
|
| 240 |
+
ws[f'C{i+3}'] = f"({round(y_prd_fnl[i, 0] * 10000)/10000},{round(y_prd_fnl[i, 1] * 10000)/10000})"
|
| 241 |
+
ws[f'D{i+3}'] = "stop"
|
| 242 |
+
wb.save(target_dir + "eye_track.xlsx")
|
| 243 |
+
ey.save([t, y_prd_fnl], target_dir, ["t_vec", "y_prd"])
|
| 244 |
+
|
| 245 |
+
if delete_files:
|
| 246 |
+
ey.remove(target_dir, [ey.FV])
|
| 247 |
+
else:
|
| 248 |
+
y_vec = []
|
| 249 |
+
for y1 in y_load:
|
| 250 |
+
for y0 in y1:
|
| 251 |
+
y_vec.append(y0)
|
| 252 |
+
y_vec = np.array(y_vec)
|
| 253 |
+
|
| 254 |
+
y_vec = y_vec[y_prd_fnl[:, 0] != -1]
|
| 255 |
+
y_prd_fnl = y_prd_fnl[y_prd_fnl[:, 0] != -1]
|
| 256 |
+
|
| 257 |
+
losses = np.sum(((y_prd_fnl-y_vec)*y_scaler)**2, 0) / y_vec.shape[0]
|
| 258 |
+
|
| 259 |
+
print(f"Lossess for two hrz and vrt models: {losses}")
|
| 260 |
+
|
| 261 |
+
info["hrz_retrain_test_loss"] = losses[0]
|
| 262 |
+
info["vrt_retrain_test_loss"] = losses[1]
|
| 263 |
+
|
| 264 |
+
y_prd_fnl[y_prd_fnl < out_threshold_min] = out_threshold_min
|
| 265 |
+
y_prd_fnl[y_prd_fnl > out_threshold_max] = out_threshold_max
|
| 266 |
+
|
| 267 |
+
ey.save([info], sbj_models_dir, [mdl_et_name])
|
| 268 |
+
ey.save([y_vec, y_prd_fnl], target_dir, ["y_mdf", "y_prd_mdf"])
|
| 269 |
+
|
| 270 |
+
if delete_files:
|
| 271 |
+
ey.remove(target_dir, [ey.Y])
|
| 272 |
+
|
| 273 |
+
if delete_files:
|
| 274 |
+
ey.remove(sbj_models_dir)
|
| 275 |
+
ey.remove(target_dir, [ey.ER, ey.X1, ey.X2, ey.T])
|
| 276 |
+
else:
|
| 277 |
+
print(f"Data does not exist in {sbj_models_dir}")
|
| 278 |
+
else:
|
| 279 |
+
print(f"Data does not exist in {target_dir}")
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
@staticmethod
|
| 283 |
+
def get_fixations(
|
| 284 |
+
subjects,
|
| 285 |
+
n_monitors_data=1,
|
| 286 |
+
t_discard=0.1,
|
| 287 |
+
x_merge=0.2/2,
|
| 288 |
+
y_merge=0.25/2,
|
| 289 |
+
vx_thr=2.5,
|
| 290 |
+
vy_thr=2.5
|
| 291 |
+
):
|
| 292 |
+
"""
|
| 293 |
+
Compute the fixations using eye movements. IV-T method is implemented for this. Visit README.md for more details.
|
| 294 |
+
You can do this for all the subjets once.
|
| 295 |
+
|
| 296 |
+
Parameters:
|
| 297 |
+
subjects: subjects list
|
| 298 |
+
n_monitors_data: The number of monitors while the data is collected.
|
| 299 |
+
t_discard: fixations lower than this will be removed.
|
| 300 |
+
x_merge: fixations closer than this value (horizontal direction) will be added together.
|
| 301 |
+
y_merge: fixations closer than this value (vertical direction) will be added together.
|
| 302 |
+
vx_thr: This is the threshold for detecting saccades in the x direction.
|
| 303 |
+
vy_thr: This is the threshold for detecting saccades in the y direction.
|
| 304 |
+
|
| 305 |
+
Returns:
|
| 306 |
+
None
|
| 307 |
+
"""
|
| 308 |
+
|
| 309 |
+
# Going through each subject's folder to compute their fixations
|
| 310 |
+
for num in subjects:
|
| 311 |
+
smp_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.SMP])
|
| 312 |
+
|
| 313 |
+
if ey.file_existing(smp_dir, "eye_track.xlsx"):
|
| 314 |
+
sheet = load_workbook(smp_dir + "eye_track.xlsx")["Sheet"]
|
| 315 |
+
max_row = sheet.max_row
|
| 316 |
+
et_xl = []
|
| 317 |
+
for i in range(3, max_row+1):
|
| 318 |
+
et_cell_list = sheet[f"C{i}"].value[1:-1].split(',')
|
| 319 |
+
et_xl.append(
|
| 320 |
+
[float(sheet[f"A{i}"].value),
|
| 321 |
+
sheet[f"B{i}"].value,
|
| 322 |
+
float(et_cell_list[0]),
|
| 323 |
+
float(et_cell_list[1]),
|
| 324 |
+
sheet[f"D{i}"].value]
|
| 325 |
+
)
|
| 326 |
+
n_smp_all = len(et_xl)
|
| 327 |
+
|
| 328 |
+
"""There is some tims that you don't want to calculate the fixations. you can simply put 'start' and 'stop'
|
| 329 |
+
in the last column in the eye_track.xlsx file to determine the moments that you want be calculated. So, each
|
| 330 |
+
series of values that are between 'start' and 'stop' is considered as a vector and in this way, these vectors
|
| 331 |
+
go to matrices (each matrix contains several vectors). for example, t_mat_seq"""
|
| 332 |
+
i = 0
|
| 333 |
+
t_mat_seq = []
|
| 334 |
+
t_sys_mat_seq = []
|
| 335 |
+
et_mat_seq = []
|
| 336 |
+
while i < n_smp_all:
|
| 337 |
+
if (et_xl[i][4] == "start") or (et_xl[i][4] == "Start"):
|
| 338 |
+
t1 = []
|
| 339 |
+
ts1 = []
|
| 340 |
+
et1 = []
|
| 341 |
+
j = 0
|
| 342 |
+
while True:
|
| 343 |
+
t1.append([et_xl[i+j][0]])
|
| 344 |
+
ts1.append([et_xl[i+j][1]])
|
| 345 |
+
et1.append([et_xl[i+j][2], et_xl[i+j][3]])
|
| 346 |
+
if et_xl[i+j][4] == "stop" or et_xl[i+j][4] == "Stop":
|
| 347 |
+
break
|
| 348 |
+
j += 1
|
| 349 |
+
t_mat_seq.append(np.array(t1).reshape((len(t1),)))
|
| 350 |
+
t_sys_mat_seq.append(ts1)
|
| 351 |
+
et_mat_seq.append(np.array(et1))
|
| 352 |
+
i += j
|
| 353 |
+
i += 1
|
| 354 |
+
|
| 355 |
+
# Creating the the vectors for time and eye track
|
| 356 |
+
t = t_mat_seq[0]
|
| 357 |
+
t_sys = t_sys_mat_seq[0]
|
| 358 |
+
et = et_mat_seq[0]
|
| 359 |
+
for (i, t1) in enumerate(t_mat_seq):
|
| 360 |
+
if i == 0:
|
| 361 |
+
continue
|
| 362 |
+
t = np.concatenate([t, t1])
|
| 363 |
+
t_sys += t_sys_mat_seq[i]
|
| 364 |
+
et = np.concatenate([et, et_mat_seq[i]])
|
| 365 |
+
|
| 366 |
+
# Removing the samples that are during blinking or are looking outside of the screen
|
| 367 |
+
t_mat = []
|
| 368 |
+
t_sys_mat = []
|
| 369 |
+
et_mat = []
|
| 370 |
+
for (t1, ts1, et1) in zip(t_mat_seq, t_sys_mat_seq, et_mat_seq):
|
| 371 |
+
n_smp1 = t1.shape[0]
|
| 372 |
+
blinking_out = (et1[:, 0] == -1)
|
| 373 |
+
t_mat1 = []
|
| 374 |
+
ts_mat1 = []
|
| 375 |
+
et_mat1 = []
|
| 376 |
+
i = 0
|
| 377 |
+
while i < (n_smp1):
|
| 378 |
+
t0 = [t1[i]]
|
| 379 |
+
ts0 = [ts1[i]]
|
| 380 |
+
bo_vec = []
|
| 381 |
+
in_vec = []
|
| 382 |
+
now = blinking_out[i]
|
| 383 |
+
if now:
|
| 384 |
+
bo_vec.append(et1[i])
|
| 385 |
+
else:
|
| 386 |
+
in_vec.append(et1[i])
|
| 387 |
+
j = 1
|
| 388 |
+
if (i+j) < n_smp1:
|
| 389 |
+
while blinking_out[i+j] == now:
|
| 390 |
+
t0.append(t1[i+j])
|
| 391 |
+
ts0.append(ts1[i+j])
|
| 392 |
+
if now:
|
| 393 |
+
bo_vec.append(et1[i+j])
|
| 394 |
+
else:
|
| 395 |
+
in_vec.append(et1[i+j])
|
| 396 |
+
j += 1
|
| 397 |
+
if (i+j) >= n_smp1:
|
| 398 |
+
break
|
| 399 |
+
t_mat1.append(np.array(t0))
|
| 400 |
+
ts_mat1.append(ts0)
|
| 401 |
+
if now:
|
| 402 |
+
et_mat1.append(np.array(bo_vec))
|
| 403 |
+
else:
|
| 404 |
+
et_mat1.append(np.array(in_vec))
|
| 405 |
+
i += j
|
| 406 |
+
t_mat.append(t_mat1)
|
| 407 |
+
t_sys_mat.append(ts_mat1)
|
| 408 |
+
et_mat.append(et_mat1)
|
| 409 |
+
|
| 410 |
+
# Calculating the saccades
|
| 411 |
+
saccades = []
|
| 412 |
+
vet_mat = []
|
| 413 |
+
for (t2, et2) in zip(t_mat, et_mat):
|
| 414 |
+
saccades1 = []
|
| 415 |
+
vet_mat1 = []
|
| 416 |
+
for (t1, et1) in zip(t2, et2):
|
| 417 |
+
if et1[0, 0] != -1:
|
| 418 |
+
if et1.shape[0] == 1:
|
| 419 |
+
vet1 = np.zeros((1,2))
|
| 420 |
+
s1 = [None]
|
| 421 |
+
else:
|
| 422 |
+
vet1 = et1.copy()
|
| 423 |
+
vet1[1:, 0] = (et1[1:, 0] - et1[:-1, 0]) / (t1[1:] - t1[:-1])
|
| 424 |
+
vet1[1:, 1] = (et1[1:, 1] - et1[:-1, 1]) / (t1[1:] - t1[:-1])
|
| 425 |
+
vet1[0] = vet1[1]
|
| 426 |
+
|
| 427 |
+
s1 = ((vet1[:, 0]>vx_thr)+(vet1[:, 0]<-vx_thr))+((vet1[:, 1]>vy_thr)+(vet1[:, 1]<-vy_thr))
|
| 428 |
+
else:
|
| 429 |
+
et_shape = et1.shape[0]
|
| 430 |
+
vet1 = np.zeros(et1.shape)
|
| 431 |
+
s1 = np.array([None] * et_shape)
|
| 432 |
+
vet_mat1.append(vet1)
|
| 433 |
+
saccades1.append(s1)
|
| 434 |
+
saccades.append(saccades1)
|
| 435 |
+
vet_mat.append(vet_mat1)
|
| 436 |
+
|
| 437 |
+
# Creating a vector of eye movement velocity
|
| 438 |
+
vet4 = []
|
| 439 |
+
for vet3 in vet_mat:
|
| 440 |
+
vet2 = vet3[0].copy()
|
| 441 |
+
for (i, vet1) in enumerate(vet3):
|
| 442 |
+
if i == 0:
|
| 443 |
+
continue
|
| 444 |
+
vet2 = np.concatenate([vet2, vet1], 0)
|
| 445 |
+
vet4.append(np.array(vet2))
|
| 446 |
+
vet = vet4[0]
|
| 447 |
+
for (i, vet1) in enumerate(vet4):
|
| 448 |
+
if i == 0:
|
| 449 |
+
continue
|
| 450 |
+
vet = np.concatenate([vet, vet1])
|
| 451 |
+
|
| 452 |
+
"""Separating the time and eye movements based on the saccades. It means we are considering a vector for
|
| 453 |
+
each series of values that we think they are one fixation."""
|
| 454 |
+
sac_mat_new = []
|
| 455 |
+
t_mat_new = []
|
| 456 |
+
t_sys_mat_new = []
|
| 457 |
+
et_mat_new = []
|
| 458 |
+
for (t_mat1, ts_mat1, et_mat1, saccades1) in zip(t_mat, t_sys_mat, et_mat, saccades):
|
| 459 |
+
k = 0
|
| 460 |
+
sac_mat_new1 = []
|
| 461 |
+
t_mat_new1 = []
|
| 462 |
+
t_sys_mat_new1 = []
|
| 463 |
+
et_mat_new1 = []
|
| 464 |
+
for (t1, ts1, et1, sac1) in zip(t_mat1, ts_mat1, et_mat1, saccades1):
|
| 465 |
+
if et1[0, 0] != -1:
|
| 466 |
+
n_smp = t1.shape[0]
|
| 467 |
+
i = 0
|
| 468 |
+
while i < (n_smp):
|
| 469 |
+
s0 = [sac1[i]]
|
| 470 |
+
t0 = [t1[i]]
|
| 471 |
+
ts0 = [ts1[i]]
|
| 472 |
+
et0 = [et1[i]]
|
| 473 |
+
now = sac1[i]
|
| 474 |
+
j = 1
|
| 475 |
+
if (i+j) < n_smp:
|
| 476 |
+
while sac1[i+j] == now:
|
| 477 |
+
s0.append(sac1[i+j])
|
| 478 |
+
t0.append(t1[i+j])
|
| 479 |
+
ts0.append(ts1[i+j])
|
| 480 |
+
et0.append(et1[i+j])
|
| 481 |
+
j += 1
|
| 482 |
+
if (i+j) >= n_smp:
|
| 483 |
+
break
|
| 484 |
+
sac_mat_new1.append(np.array(s0))
|
| 485 |
+
t_mat_new1.append(np.array(t0))
|
| 486 |
+
t_sys_mat_new1.append(ts0)
|
| 487 |
+
et_mat_new1.append(np.array(et0))
|
| 488 |
+
i += j
|
| 489 |
+
else:
|
| 490 |
+
sac_mat_new1.append(sac1)
|
| 491 |
+
t_mat_new1.append(t1)
|
| 492 |
+
t_sys_mat_new1.append(ts1)
|
| 493 |
+
et_mat_new1.append(et1)
|
| 494 |
+
sac_mat_new.append(sac_mat_new1)
|
| 495 |
+
t_mat_new.append(t_mat_new1)
|
| 496 |
+
t_sys_mat_new.append(t_sys_mat_new1)
|
| 497 |
+
et_mat_new.append(et_mat_new1)
|
| 498 |
+
|
| 499 |
+
"""We are turing each vector of fixations to a list of some information, like the number of values that it contains,
|
| 500 |
+
The start time, mean of the eye movements, and sys mean time."""
|
| 501 |
+
fix = []
|
| 502 |
+
k = 0
|
| 503 |
+
for (sac_mat_new1, t_mat_new1, t_sys_mat_new1, et_mat_new1) in zip(sac_mat_new, t_mat_new, t_sys_mat_new, et_mat_new):
|
| 504 |
+
fix1 = []
|
| 505 |
+
for (s1, t1, ts1, et1) in zip(sac_mat_new1, t_mat_new1, t_sys_mat_new1, et_mat_new1):
|
| 506 |
+
sac_shp = s1.shape
|
| 507 |
+
if s1[0] == False:
|
| 508 |
+
if not s1[0]:
|
| 509 |
+
fix1.append([k,
|
| 510 |
+
sac_shp[0],
|
| 511 |
+
t1[0],
|
| 512 |
+
round(t1[-1]-t1[0], 2),
|
| 513 |
+
round(et1[:, 0].mean(), 4),
|
| 514 |
+
round(et1[:, 1].mean(), 4),
|
| 515 |
+
ts1[0]])
|
| 516 |
+
k += sac_shp[0]
|
| 517 |
+
fix.append(fix1)
|
| 518 |
+
|
| 519 |
+
# Merging the fixations that are near together
|
| 520 |
+
fix_mrg_one = []
|
| 521 |
+
for fix1 in fix:
|
| 522 |
+
fix_mrg1 = []
|
| 523 |
+
n_fix = len(fix1)
|
| 524 |
+
i = 0
|
| 525 |
+
while i < n_fix:
|
| 526 |
+
f_new = fix1[i]
|
| 527 |
+
j = 1
|
| 528 |
+
while (i+j) < n_fix:
|
| 529 |
+
fj = fix1[i+j]
|
| 530 |
+
fj_d = ((fj[4]-f_new[4])/(x_merge/n_monitors_data))**2+((fj[5]-f_new[5])/(y_merge))**2
|
| 531 |
+
if fj_d < 1:
|
| 532 |
+
f_new = [f_new[0],
|
| 533 |
+
f_new[1] + fj[1],
|
| 534 |
+
f_new[2],
|
| 535 |
+
round(f_new[3] + fj[3], 2),
|
| 536 |
+
round((f_new[4]*f_new[1]+fj[4]*fj[1])/(f_new[1]+fj[1]), 4),
|
| 537 |
+
round((f_new[5]*f_new[1]+fj[5]*fj[1])/(f_new[1]+fj[1]), 4),
|
| 538 |
+
f_new[-1]]
|
| 539 |
+
if (i+j) == n_fix-1:
|
| 540 |
+
fix_mrg1.append(f_new)
|
| 541 |
+
not_joined = False
|
| 542 |
+
else:
|
| 543 |
+
fix_mrg1.append(f_new)
|
| 544 |
+
not_joined = True
|
| 545 |
+
break
|
| 546 |
+
j += 1
|
| 547 |
+
i += j
|
| 548 |
+
if not_joined:
|
| 549 |
+
fix_mrg1.append(fix1[-1])
|
| 550 |
+
|
| 551 |
+
fix_mrg_one.append(fix_mrg1)
|
| 552 |
+
|
| 553 |
+
# Removing the fixations that are short
|
| 554 |
+
fix_dcd = []
|
| 555 |
+
for fix_mrg1 in fix_mrg_one:
|
| 556 |
+
fix_dcd1 = []
|
| 557 |
+
for f in fix_mrg1:
|
| 558 |
+
if f[3] >= t_discard:
|
| 559 |
+
fix_dcd1.append(f)
|
| 560 |
+
fix_dcd.append(fix_dcd1)
|
| 561 |
+
|
| 562 |
+
# Merging the fixations that are near together
|
| 563 |
+
fix_mrg_two = []
|
| 564 |
+
for fix1 in fix_dcd:
|
| 565 |
+
fix_mrg1 = []
|
| 566 |
+
n_fix = len(fix1)
|
| 567 |
+
i = 0
|
| 568 |
+
while i < n_fix:
|
| 569 |
+
f_new = fix1[i]
|
| 570 |
+
j = 1
|
| 571 |
+
while (i+j) < n_fix:
|
| 572 |
+
fj = fix1[i+j]
|
| 573 |
+
fj_d = ((fj[4]-f_new[4])/(x_merge/n_monitors_data))**2+((fj[5]-f_new[5])/(y_merge))**2
|
| 574 |
+
if fj_d < 1:
|
| 575 |
+
f_new = [f_new[0],
|
| 576 |
+
f_new[1] + fj[1],
|
| 577 |
+
f_new[2],
|
| 578 |
+
round(f_new[3] + fj[3], 2),
|
| 579 |
+
round((f_new[4]*f_new[1]+fj[4]*fj[1])/(f_new[1]+fj[1]), 4),
|
| 580 |
+
round((f_new[5]*f_new[1]+fj[5]*fj[1])/(f_new[1]+fj[1]), 4),
|
| 581 |
+
f_new[-1]]
|
| 582 |
+
if (i+j) == n_fix-1:
|
| 583 |
+
fix_mrg1.append(f_new)
|
| 584 |
+
not_joined = False
|
| 585 |
+
else:
|
| 586 |
+
fix_mrg1.append(f_new)
|
| 587 |
+
not_joined = True
|
| 588 |
+
break
|
| 589 |
+
j += 1
|
| 590 |
+
# if (i+j) >= n_fix:
|
| 591 |
+
# break
|
| 592 |
+
i += j
|
| 593 |
+
if not_joined:
|
| 594 |
+
fix_mrg1.append(fix1[-1])
|
| 595 |
+
|
| 596 |
+
fix_mrg_two.append(fix_mrg1)
|
| 597 |
+
|
| 598 |
+
# Saving the fixations into the fixations.xlsx
|
| 599 |
+
wb = Workbook()
|
| 600 |
+
ws = wb.active
|
| 601 |
+
ws['A1'] = "FixationTime"
|
| 602 |
+
ws['A2'] = "sec"
|
| 603 |
+
ws['B1'] = "FixationSystemTime"
|
| 604 |
+
ws['C1'] = "FixationDuration"
|
| 605 |
+
ws['C2'] = "sec"
|
| 606 |
+
ws['D1'] = "FixationLocation"
|
| 607 |
+
ws['D2'] = "(p_x/scr_w,p_y/scr_h)"
|
| 608 |
+
i = 0
|
| 609 |
+
for f_seq in fix_mrg_two:
|
| 610 |
+
for f in f_seq:
|
| 611 |
+
ws[f'A{i+3}'] = f"{f[2]}"
|
| 612 |
+
ws[f'B{i+3}'] = f[6][0]
|
| 613 |
+
ws[f'C{i+3}'] = f"{f[3]}"
|
| 614 |
+
ws[f'D{i+3}'] = f"({f[4]},{f[5]})"
|
| 615 |
+
i += 1
|
| 616 |
+
|
| 617 |
+
wb.save(smp_dir + "fixations.xlsx")
|
| 618 |
+
else:
|
| 619 |
+
print(f"Data does not exist in {smp_dir}")
|
| 620 |
+
|
| 621 |
+
@staticmethod
|
| 622 |
+
def get_models_information(show_model=False):
|
| 623 |
+
"""Writing the NN models' information in the xlsx files.
|
| 624 |
+
|
| 625 |
+
Parameters:
|
| 626 |
+
show_model: Whether or not show the model
|
| 627 |
+
|
| 628 |
+
Returns:
|
| 629 |
+
None
|
| 630 |
+
"""
|
| 631 |
+
|
| 632 |
+
wb = Workbook()
|
| 633 |
+
ws = wb.active
|
| 634 |
+
ws['A1'] = "subject"
|
| 635 |
+
ws['B1'] = "model"
|
| 636 |
+
ws['C1'] = "trained model"
|
| 637 |
+
ws['D1'] = "weights"
|
| 638 |
+
ws['E1'] = "input 1 shape"
|
| 639 |
+
ws['F1'] = "input 2 shape"
|
| 640 |
+
ws['G1'] = "x2 chosen features"
|
| 641 |
+
ws['H1'] = "min-Max brightness ratio"
|
| 642 |
+
ws['I1'] = "r_train"
|
| 643 |
+
ws['J1'] = "epochs and patience"
|
| 644 |
+
ws['K1'] = "model-hrz train loss"
|
| 645 |
+
ws['L1'] = "model-hrz val loss"
|
| 646 |
+
ws['M1'] = "model-vrt train loss"
|
| 647 |
+
ws['N1'] = "model-vrt val loss"
|
| 648 |
+
ws['O1'] = "r_retrain"
|
| 649 |
+
ws['P1'] = "epochs and patience-retrain"
|
| 650 |
+
ws['Q1'] = "trainable layers"
|
| 651 |
+
ws['R1'] = "model-hrz-retrain train loss"
|
| 652 |
+
ws['S1'] = "model-hrz-retrain val loss"
|
| 653 |
+
ws['T1'] = "model-vrt-retrain train loss"
|
| 654 |
+
ws['U1'] = "model-vrt-retrain val loss"
|
| 655 |
+
ws['V1'] = "model-hrz-retrain test loss"
|
| 656 |
+
ws['W1'] = "model-vrt-retrain test loss"
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
subjects = os.listdir(ey.subjects_dir)
|
| 660 |
+
i = 2
|
| 661 |
+
for sbj in subjects:
|
| 662 |
+
sbj = int(sbj)
|
| 663 |
+
models_dir = ey.create_dir([ey.subjects_dir, f"{sbj}", ey.MDL])
|
| 664 |
+
files_name = os.listdir(models_dir)
|
| 665 |
+
if files_name:
|
| 666 |
+
for fn in files_name:
|
| 667 |
+
if fn[-7:] == ".pickle":
|
| 668 |
+
mdl_num = int(fn[3:-7])
|
| 669 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 670 |
+
if show_model:
|
| 671 |
+
mdl = load_model(models_dir + mdl_name + "-hrz.h5")
|
| 672 |
+
print(mdl.summary())
|
| 673 |
+
info = ey.load(models_dir, [ey.MDL + f"{mdl_num}"])[0]
|
| 674 |
+
|
| 675 |
+
ws[f'A{i}'] = str(sbj)
|
| 676 |
+
ws[f'B{i}'] = str(mdl_num)
|
| 677 |
+
ws[f'C{i}'] = str(info['trained_mdl_num'])
|
| 678 |
+
ws[f'D{i}'] = str(info['n_weights'])
|
| 679 |
+
ws[f'E{i}'] = str(info['input1_shape'])
|
| 680 |
+
ws[f'F{i}'] = str(info['input2_shape'])
|
| 681 |
+
ws[f'G{i}'] = str(info['x2_chosen_features'])
|
| 682 |
+
ws[f'H{i}'] = str(info['min_max_brightness_ratio'])
|
| 683 |
+
ws[f'I{i}'] = str(info['r_train'])
|
| 684 |
+
ws[f'J{i}'] = str(info['n_epochs_patience'])
|
| 685 |
+
ws[f'K{i}'] = str(info['hrz_train_loss'])
|
| 686 |
+
ws[f'L{i}'] = str(info['hrz_val_loss'])
|
| 687 |
+
ws[f'M{i}'] = str(info['vrt_train_loss'])
|
| 688 |
+
ws[f'N{i}'] = str(info['vrt_val_loss'])
|
| 689 |
+
ws[f'O{i}'] = str(info['r_retrain'])
|
| 690 |
+
ws[f'P{i}'] = str(info['n_epochs_patience_retrain'])
|
| 691 |
+
ws[f'Q{i}'] = str(info['trainable_layers'])
|
| 692 |
+
ws[f'R{i}'] = str(info['hrz_retrain_train_loss'])
|
| 693 |
+
ws[f'S{i}'] = str(info['hrz_retrain_val_loss'])
|
| 694 |
+
ws[f'T{i}'] = str(info['vrt_retrain_train_loss'])
|
| 695 |
+
ws[f'U{i}'] = str(info['vrt_retrain_val_loss'])
|
| 696 |
+
ws[f'V{i}'] = str(info['hrz_retrain_test_loss'])
|
| 697 |
+
ws[f'W{i}'] = str(info['vrt_retrain_test_loss'])
|
| 698 |
+
|
| 699 |
+
i += 1
|
| 700 |
+
|
| 701 |
+
wb.save(ey.files_dir + "info_et_retrains.xlsx")
|
codes/jupyter_notebook/check_boi_model_on_et.ipynb
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"id": "fecbac60",
|
| 7 |
+
"metadata": {},
|
| 8 |
+
"outputs": [],
|
| 9 |
+
"source": [
|
| 10 |
+
"from tensorflow.keras.models import load_model\n",
|
| 11 |
+
"from joblib import load as j_load\n",
|
| 12 |
+
"import cv2\n",
|
| 13 |
+
"import pickle"
|
| 14 |
+
]
|
| 15 |
+
},
|
| 16 |
+
{
|
| 17 |
+
"cell_type": "code",
|
| 18 |
+
"execution_count": 2,
|
| 19 |
+
"id": "c0f85582",
|
| 20 |
+
"metadata": {},
|
| 21 |
+
"outputs": [],
|
| 22 |
+
"source": [
|
| 23 |
+
"# Uncomment this box if the file is in your local directory\n",
|
| 24 |
+
"path2root = \"../../\""
|
| 25 |
+
]
|
| 26 |
+
},
|
| 27 |
+
{
|
| 28 |
+
"cell_type": "code",
|
| 29 |
+
"execution_count": 22,
|
| 30 |
+
"id": "bf26ac93",
|
| 31 |
+
"metadata": {},
|
| 32 |
+
"outputs": [],
|
| 33 |
+
"source": [
|
| 34 |
+
"subjects_fol = \"subjects/\"\n",
|
| 35 |
+
"sbj_scalers_boi_name = \"scalers-boi.bin\"\n",
|
| 36 |
+
"sbj_model_boi_name = \"model-boi2.h5\"\n",
|
| 37 |
+
"sbj = 3\n",
|
| 38 |
+
"data_et_fol = \"data-et-clb/\"\n",
|
| 39 |
+
"chosen_inputs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"
|
| 40 |
+
]
|
| 41 |
+
},
|
| 42 |
+
{
|
| 43 |
+
"cell_type": "code",
|
| 44 |
+
"execution_count": 23,
|
| 45 |
+
"id": "2787fe8b",
|
| 46 |
+
"metadata": {},
|
| 47 |
+
"outputs": [],
|
| 48 |
+
"source": [
|
| 49 |
+
"sbj_dir = path2root + subjects_fol + f\"{sbj}/\"\n",
|
| 50 |
+
"model_boi_dir = sbj_dir + sbj_model_boi_name\n",
|
| 51 |
+
"scalers_boi_dir = sbj_dir + sbj_scalers_boi_name\n",
|
| 52 |
+
"data_et_dir = sbj_dir + data_et_fol\n",
|
| 53 |
+
" \n",
|
| 54 |
+
"with open(data_et_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 55 |
+
" x1_load = pickle.load(f)\n",
|
| 56 |
+
"with open(data_et_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 57 |
+
" x2_load = pickle.load(f)"
|
| 58 |
+
]
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"cell_type": "code",
|
| 62 |
+
"execution_count": 26,
|
| 63 |
+
"id": "eff56379",
|
| 64 |
+
"metadata": {},
|
| 65 |
+
"outputs": [
|
| 66 |
+
{
|
| 67 |
+
"ename": "FileNotFoundError",
|
| 68 |
+
"evalue": "[Errno 2] No such file or directory: '../../subjects/3/scalers-boi.bin'",
|
| 69 |
+
"output_type": "error",
|
| 70 |
+
"traceback": [
|
| 71 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
| 72 |
+
"\u001b[1;31mFileNotFoundError\u001b[0m Traceback (most recent call last)",
|
| 73 |
+
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_5904/1123430948.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[0mx2_chs_inp\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mx2_load\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mchosen_inputs\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0mscalers_boi\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mj_load\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mscalers_boi_dir\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3\u001b[0m \u001b[0mx1_scaler_boi\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mx2_scaler_boi\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mscalers_boi\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[0mx1\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mx1_load\u001b[0m \u001b[1;33m/\u001b[0m \u001b[0mx1_scaler_boi\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mx2\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mx2_scaler_boi\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtransform\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx2_chs_inp\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 74 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\joblib\\numpy_pickle.py\u001b[0m in \u001b[0;36mload\u001b[1;34m(filename, mmap_mode)\u001b[0m\n\u001b[0;32m 577\u001b[0m \u001b[0mobj\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0m_unpickle\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfobj\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 578\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 579\u001b[1;33m \u001b[1;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilename\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'rb'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 580\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0m_read_fileobject\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mf\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfilename\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmmap_mode\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mfobj\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 581\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfobj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 75 |
+
"\u001b[1;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '../../subjects/3/scalers-boi.bin'"
|
| 76 |
+
]
|
| 77 |
+
}
|
| 78 |
+
],
|
| 79 |
+
"source": [
|
| 80 |
+
"x2_chs_inp = x2_load[:, chosen_inputs]\n",
|
| 81 |
+
"scalers_boi = j_load(scalers_boi_dir)\n",
|
| 82 |
+
"x1_scaler_boi, x2_scaler_boi = scalers_boi\n",
|
| 83 |
+
"x1 = x1_load / x1_scaler_boi\n",
|
| 84 |
+
"x2 = x2_scaler_boi.transform(x2_chs_inp)\n",
|
| 85 |
+
" \n",
|
| 86 |
+
"# model_boi = load_model(model_boi_dir)\n",
|
| 87 |
+
" \n",
|
| 88 |
+
"# y_prd_boi = model_boi.predict([x1, x2]) # .argmax(1)"
|
| 89 |
+
]
|
| 90 |
+
},
|
| 91 |
+
{
|
| 92 |
+
"cell_type": "code",
|
| 93 |
+
"execution_count": 13,
|
| 94 |
+
"id": "259cacf4",
|
| 95 |
+
"metadata": {},
|
| 96 |
+
"outputs": [
|
| 97 |
+
{
|
| 98 |
+
"name": "stdout",
|
| 99 |
+
"output_type": "stream",
|
| 100 |
+
"text": [
|
| 101 |
+
"16\n"
|
| 102 |
+
]
|
| 103 |
+
}
|
| 104 |
+
],
|
| 105 |
+
"source": [
|
| 106 |
+
"print((y_prd_boi.round()==1).sum())"
|
| 107 |
+
]
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"cell_type": "code",
|
| 111 |
+
"execution_count": 25,
|
| 112 |
+
"id": "52ffe7c6",
|
| 113 |
+
"metadata": {},
|
| 114 |
+
"outputs": [],
|
| 115 |
+
"source": [
|
| 116 |
+
"for (i, img) in enumerate(x1):\n",
|
| 117 |
+
" if True: # i % 10 == 0:\n",
|
| 118 |
+
"# cv2.putText(img, str(round(y_prd_boi[i, 0])), (40, 80), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 0, 255), 1)\n",
|
| 119 |
+
" cv2.imshow(\"Eyes\", img)\n",
|
| 120 |
+
" q = cv2.waitKey(5)\n",
|
| 121 |
+
" if q == ord('q'):\n",
|
| 122 |
+
" break\n",
|
| 123 |
+
"cv2.destroyAllWindows()"
|
| 124 |
+
]
|
| 125 |
+
},
|
| 126 |
+
{
|
| 127 |
+
"cell_type": "code",
|
| 128 |
+
"execution_count": 7,
|
| 129 |
+
"id": "2304c430",
|
| 130 |
+
"metadata": {},
|
| 131 |
+
"outputs": [],
|
| 132 |
+
"source": [
|
| 133 |
+
"smp_num = 20\n",
|
| 134 |
+
"print(round(y_prd_boi[smp_num, 0]))\n",
|
| 135 |
+
"cv2.imshow(\"Eyes\", x1[smp_num])\n",
|
| 136 |
+
"cv2.waitKey(0)\n",
|
| 137 |
+
"cv2.destroyAllWindows()"
|
| 138 |
+
]
|
| 139 |
+
},
|
| 140 |
+
{
|
| 141 |
+
"cell_type": "code",
|
| 142 |
+
"execution_count": null,
|
| 143 |
+
"id": "0254f097",
|
| 144 |
+
"metadata": {},
|
| 145 |
+
"outputs": [],
|
| 146 |
+
"source": []
|
| 147 |
+
},
|
| 148 |
+
{
|
| 149 |
+
"cell_type": "code",
|
| 150 |
+
"execution_count": null,
|
| 151 |
+
"id": "7fa3edcb",
|
| 152 |
+
"metadata": {},
|
| 153 |
+
"outputs": [],
|
| 154 |
+
"source": []
|
| 155 |
+
}
|
| 156 |
+
],
|
| 157 |
+
"metadata": {
|
| 158 |
+
"kernelspec": {
|
| 159 |
+
"display_name": "venv",
|
| 160 |
+
"language": "python",
|
| 161 |
+
"name": "venv"
|
| 162 |
+
},
|
| 163 |
+
"language_info": {
|
| 164 |
+
"codemirror_mode": {
|
| 165 |
+
"name": "ipython",
|
| 166 |
+
"version": 3
|
| 167 |
+
},
|
| 168 |
+
"file_extension": ".py",
|
| 169 |
+
"mimetype": "text/x-python",
|
| 170 |
+
"name": "python",
|
| 171 |
+
"nbconvert_exporter": "python",
|
| 172 |
+
"pygments_lexer": "ipython3",
|
| 173 |
+
"version": "3.9.6"
|
| 174 |
+
}
|
| 175 |
+
},
|
| 176 |
+
"nbformat": 4,
|
| 177 |
+
"nbformat_minor": 5
|
| 178 |
+
}
|
codes/jupyter_notebook/crt_emp_2mdl_et.ipynb
ADDED
|
@@ -0,0 +1,317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"metadata": {
|
| 7 |
+
"executionInfo": {
|
| 8 |
+
"elapsed": 3227,
|
| 9 |
+
"status": "ok",
|
| 10 |
+
"timestamp": 1635187414445,
|
| 11 |
+
"user": {
|
| 12 |
+
"displayName": "mostafa lotfi",
|
| 13 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 14 |
+
"userId": "11325640182532482321"
|
| 15 |
+
},
|
| 16 |
+
"user_tz": -210
|
| 17 |
+
},
|
| 18 |
+
"id": "gdpcJ0ywSIY8"
|
| 19 |
+
},
|
| 20 |
+
"outputs": [],
|
| 21 |
+
"source": [
|
| 22 |
+
"from tensorflow.keras.layers import (Input, Conv2D, Flatten, MaxPooling2D,\n",
|
| 23 |
+
" Dense, Dropout, Concatenate)\n",
|
| 24 |
+
"from tensorflow.keras.models import Model\n",
|
| 25 |
+
"from tensorflow.keras.utils import plot_model\n",
|
| 26 |
+
"import numpy as np\n",
|
| 27 |
+
"import os\n",
|
| 28 |
+
"import pickle"
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"cell_type": "code",
|
| 33 |
+
"execution_count": 2,
|
| 34 |
+
"metadata": {},
|
| 35 |
+
"outputs": [],
|
| 36 |
+
"source": [
|
| 37 |
+
"# # Uncomment this box if the file is in google colab\n",
|
| 38 |
+
"# path2root = \"/content/drive/MyDrive/Projects/EyeTracker/\""
|
| 39 |
+
]
|
| 40 |
+
},
|
| 41 |
+
{
|
| 42 |
+
"cell_type": "code",
|
| 43 |
+
"execution_count": 2,
|
| 44 |
+
"metadata": {},
|
| 45 |
+
"outputs": [],
|
| 46 |
+
"source": [
|
| 47 |
+
"# Uncomment this box if the file is in your local directory\n",
|
| 48 |
+
"path2root = \"../../\""
|
| 49 |
+
]
|
| 50 |
+
},
|
| 51 |
+
{
|
| 52 |
+
"cell_type": "code",
|
| 53 |
+
"execution_count": 3,
|
| 54 |
+
"metadata": {
|
| 55 |
+
"executionInfo": {
|
| 56 |
+
"elapsed": 23,
|
| 57 |
+
"status": "ok",
|
| 58 |
+
"timestamp": 1635187414448,
|
| 59 |
+
"user": {
|
| 60 |
+
"displayName": "mostafa lotfi",
|
| 61 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 62 |
+
"userId": "11325640182532482321"
|
| 63 |
+
},
|
| 64 |
+
"user_tz": -210
|
| 65 |
+
},
|
| 66 |
+
"id": "YGlf78zqN8xb"
|
| 67 |
+
},
|
| 68 |
+
"outputs": [],
|
| 69 |
+
"source": [
|
| 70 |
+
"subjects_fol = \"subjects/\"\n",
|
| 71 |
+
"data_et_fol = \"data-et-clb/\"\n",
|
| 72 |
+
"models_fol = \"models/\"\n",
|
| 73 |
+
"models_et_fol = \"et/\"\n",
|
| 74 |
+
"raw_fol = \"raw/\"\n",
|
| 75 |
+
"chosen_inputs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"
|
| 76 |
+
]
|
| 77 |
+
},
|
| 78 |
+
{
|
| 79 |
+
"cell_type": "code",
|
| 80 |
+
"execution_count": 4,
|
| 81 |
+
"metadata": {
|
| 82 |
+
"executionInfo": {
|
| 83 |
+
"elapsed": 1441,
|
| 84 |
+
"status": "ok",
|
| 85 |
+
"timestamp": 1635187415872,
|
| 86 |
+
"user": {
|
| 87 |
+
"displayName": "mostafa lotfi",
|
| 88 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 89 |
+
"userId": "11325640182532482321"
|
| 90 |
+
},
|
| 91 |
+
"user_tz": -210
|
| 92 |
+
},
|
| 93 |
+
"id": "wIrwyDFyQpof"
|
| 94 |
+
},
|
| 95 |
+
"outputs": [],
|
| 96 |
+
"source": [
|
| 97 |
+
"data_et_dir = path2root + subjects_fol + f\"{3}/\" + data_et_fol\n",
|
| 98 |
+
"\n",
|
| 99 |
+
"with open(data_et_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 100 |
+
" x1 = pickle.load(f)\n",
|
| 101 |
+
"with open(data_et_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 102 |
+
" x2 = pickle.load(f)\n",
|
| 103 |
+
"with open(data_et_dir + \"y.pickle\", \"rb\") as f:\n",
|
| 104 |
+
" y = pickle.load(f)\n",
|
| 105 |
+
"\n",
|
| 106 |
+
"x2_chs_inp = x2[:, chosen_inputs]"
|
| 107 |
+
]
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"cell_type": "code",
|
| 111 |
+
"execution_count": 14,
|
| 112 |
+
"metadata": {
|
| 113 |
+
"colab": {
|
| 114 |
+
"base_uri": "https://localhost:8080/"
|
| 115 |
+
},
|
| 116 |
+
"executionInfo": {
|
| 117 |
+
"elapsed": 944,
|
| 118 |
+
"status": "ok",
|
| 119 |
+
"timestamp": 1635187416811,
|
| 120 |
+
"user": {
|
| 121 |
+
"displayName": "mostafa lotfi",
|
| 122 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 123 |
+
"userId": "11325640182532482321"
|
| 124 |
+
},
|
| 125 |
+
"user_tz": -210
|
| 126 |
+
},
|
| 127 |
+
"id": "4PX0-j2gRhWL",
|
| 128 |
+
"outputId": "7bc4570b-7576-48c6-9455-845aa581d904"
|
| 129 |
+
},
|
| 130 |
+
"outputs": [
|
| 131 |
+
{
|
| 132 |
+
"name": "stdout",
|
| 133 |
+
"output_type": "stream",
|
| 134 |
+
"text": [
|
| 135 |
+
"Model: \"model_4\"\n",
|
| 136 |
+
"__________________________________________________________________________________________________\n",
|
| 137 |
+
"Layer (type) Output Shape Param # Connected to \n",
|
| 138 |
+
"==================================================================================================\n",
|
| 139 |
+
"input_10 (InputLayer) [(None, 100, 100, 1) 0 \n",
|
| 140 |
+
"__________________________________________________________________________________________________\n",
|
| 141 |
+
"conv2d_15 (Conv2D) (None, 50, 50, 16) 800 input_10[0][0] \n",
|
| 142 |
+
"__________________________________________________________________________________________________\n",
|
| 143 |
+
"conv2d_16 (Conv2D) (None, 25, 25, 32) 12832 conv2d_15[0][0] \n",
|
| 144 |
+
"__________________________________________________________________________________________________\n",
|
| 145 |
+
"conv2d_17 (Conv2D) (None, 12, 12, 64) 18496 conv2d_16[0][0] \n",
|
| 146 |
+
"__________________________________________________________________________________________________\n",
|
| 147 |
+
"flatten_4 (Flatten) (None, 9216) 0 conv2d_17[0][0] \n",
|
| 148 |
+
"__________________________________________________________________________________________________\n",
|
| 149 |
+
"input_11 (InputLayer) [(None, 10)] 0 \n",
|
| 150 |
+
"__________________________________________________________________________________________________\n",
|
| 151 |
+
"concatenate_4 (Concatenate) (None, 9226) 0 flatten_4[0][0] \n",
|
| 152 |
+
" input_11[0][0] \n",
|
| 153 |
+
"__________________________________________________________________________________________________\n",
|
| 154 |
+
"dense_20 (Dense) (None, 256) 2362112 concatenate_4[0][0] \n",
|
| 155 |
+
"__________________________________________________________________________________________________\n",
|
| 156 |
+
"dense_21 (Dense) (None, 128) 32896 dense_20[0][0] \n",
|
| 157 |
+
"__________________________________________________________________________________________________\n",
|
| 158 |
+
"dense_22 (Dense) (None, 32) 4128 dense_21[0][0] \n",
|
| 159 |
+
"__________________________________________________________________________________________________\n",
|
| 160 |
+
"dense_23 (Dense) (None, 5) 165 dense_22[0][0] \n",
|
| 161 |
+
"__________________________________________________________________________________________________\n",
|
| 162 |
+
"dense_24 (Dense) (None, 1) 6 dense_23[0][0] \n",
|
| 163 |
+
"==================================================================================================\n",
|
| 164 |
+
"Total params: 2,431,435\n",
|
| 165 |
+
"Trainable params: 2,431,435\n",
|
| 166 |
+
"Non-trainable params: 0\n",
|
| 167 |
+
"__________________________________________________________________________________________________\n",
|
| 168 |
+
"None\n"
|
| 169 |
+
]
|
| 170 |
+
}
|
| 171 |
+
],
|
| 172 |
+
"source": [
|
| 173 |
+
"input1 = Input(x1.shape[1:])\n",
|
| 174 |
+
"layer = Conv2D(16, (7, 7), (2, 2), 'same', activation='relu')(input1)\n",
|
| 175 |
+
"# layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 176 |
+
"\n",
|
| 177 |
+
"# layer = Conv2D(32, (7, 7), (1, 1), 'same', activation='relu')(layer)\n",
|
| 178 |
+
"# layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 179 |
+
"\n",
|
| 180 |
+
"layer = Conv2D(32, (5, 5), (2, 2), 'same', activation='relu')(layer)\n",
|
| 181 |
+
"# layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 182 |
+
"\n",
|
| 183 |
+
"layer = Conv2D(64, (3, 3), (2, 2), activation='relu')(layer)\n",
|
| 184 |
+
"# layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 185 |
+
"\n",
|
| 186 |
+
"layer = Flatten()(layer)\n",
|
| 187 |
+
"\n",
|
| 188 |
+
"input2 = Input(x2_chs_inp.shape[1:])\n",
|
| 189 |
+
"layer = Concatenate()([layer, input2])\n",
|
| 190 |
+
"\n",
|
| 191 |
+
"layer = Dense(256, 'relu')(layer)\n",
|
| 192 |
+
"\n",
|
| 193 |
+
"layer = Dense(128, 'relu')(layer)\n",
|
| 194 |
+
"\n",
|
| 195 |
+
"layer = Dense(32, 'relu')(layer)\n",
|
| 196 |
+
"\n",
|
| 197 |
+
"layer = Dense(5, 'relu')(layer)\n",
|
| 198 |
+
"\n",
|
| 199 |
+
"out = Dense(1, 'linear')(layer)\n",
|
| 200 |
+
"\n",
|
| 201 |
+
"input_layers = [input1, input2]\n",
|
| 202 |
+
"\n",
|
| 203 |
+
"model = Model(inputs=input_layers, outputs=out)\n",
|
| 204 |
+
"\n",
|
| 205 |
+
"model.compile(optimizer='adam', loss='mse')\n",
|
| 206 |
+
"\n",
|
| 207 |
+
"print(model.summary())"
|
| 208 |
+
]
|
| 209 |
+
},
|
| 210 |
+
{
|
| 211 |
+
"cell_type": "code",
|
| 212 |
+
"execution_count": 7,
|
| 213 |
+
"metadata": {
|
| 214 |
+
"executionInfo": {
|
| 215 |
+
"elapsed": 27,
|
| 216 |
+
"status": "ok",
|
| 217 |
+
"timestamp": 1635187416814,
|
| 218 |
+
"user": {
|
| 219 |
+
"displayName": "mostafa lotfi",
|
| 220 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 221 |
+
"userId": "11325640182532482321"
|
| 222 |
+
},
|
| 223 |
+
"user_tz": -210
|
| 224 |
+
},
|
| 225 |
+
"id": "woulgNfGVvMO"
|
| 226 |
+
},
|
| 227 |
+
"outputs": [],
|
| 228 |
+
"source": [
|
| 229 |
+
"# plot_model(model, show_shapes=True)"
|
| 230 |
+
]
|
| 231 |
+
},
|
| 232 |
+
{
|
| 233 |
+
"cell_type": "code",
|
| 234 |
+
"execution_count": 6,
|
| 235 |
+
"metadata": {},
|
| 236 |
+
"outputs": [],
|
| 237 |
+
"source": [
|
| 238 |
+
"models_dir = path2root + models_fol\n",
|
| 239 |
+
"if not os.path.exists(models_dir):\n",
|
| 240 |
+
" os.mkdir(models_dir)\n",
|
| 241 |
+
"\n",
|
| 242 |
+
"models_et_dir = models_dir + models_et_fol\n",
|
| 243 |
+
"if not os.path.exists(models_et_dir):\n",
|
| 244 |
+
" os.mkdir(models_et_dir)\n",
|
| 245 |
+
"\n",
|
| 246 |
+
"raw_dir = models_et_dir + raw_fol\n",
|
| 247 |
+
"if not os.path.exists(raw_dir):\n",
|
| 248 |
+
" os.mkdir(raw_dir)"
|
| 249 |
+
]
|
| 250 |
+
},
|
| 251 |
+
{
|
| 252 |
+
"cell_type": "code",
|
| 253 |
+
"execution_count": 15,
|
| 254 |
+
"metadata": {
|
| 255 |
+
"colab": {
|
| 256 |
+
"base_uri": "https://localhost:8080/"
|
| 257 |
+
},
|
| 258 |
+
"executionInfo": {
|
| 259 |
+
"elapsed": 3087,
|
| 260 |
+
"status": "ok",
|
| 261 |
+
"timestamp": 1635187419881,
|
| 262 |
+
"user": {
|
| 263 |
+
"displayName": "mostafa lotfi",
|
| 264 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 265 |
+
"userId": "11325640182532482321"
|
| 266 |
+
},
|
| 267 |
+
"user_tz": -210
|
| 268 |
+
},
|
| 269 |
+
"id": "Oa3y4Mfyg7g7",
|
| 270 |
+
"outputId": "b083e78a-bb99-4410-b237-70205bdbc05e"
|
| 271 |
+
},
|
| 272 |
+
"outputs": [],
|
| 273 |
+
"source": [
|
| 274 |
+
"models_numbers = []\n",
|
| 275 |
+
"models_name = os.listdir(raw_dir)\n",
|
| 276 |
+
"if models_name:\n",
|
| 277 |
+
" for model_name in models_name:\n",
|
| 278 |
+
" model_num = int(model_name[5:6])\n",
|
| 279 |
+
" models_numbers.append(model_num)\n",
|
| 280 |
+
" max_num = max(models_numbers)\n",
|
| 281 |
+
"else:\n",
|
| 282 |
+
" max_num = 0\n",
|
| 283 |
+
"\n",
|
| 284 |
+
"model.save(raw_dir + f\"model{max_num+1}-hrz.h5\")\n",
|
| 285 |
+
"model.save(raw_dir + f\"model{max_num+1}-vrt.h5\")"
|
| 286 |
+
]
|
| 287 |
+
}
|
| 288 |
+
],
|
| 289 |
+
"metadata": {
|
| 290 |
+
"colab": {
|
| 291 |
+
"authorship_tag": "ABX9TyNJ55D+JOVdA6fiv2ayAPT8",
|
| 292 |
+
"collapsed_sections": [],
|
| 293 |
+
"mount_file_id": "1mEGliX-rV2YkSx8ASgAVoGgB28BGXYz1",
|
| 294 |
+
"name": "crt_emp_2mdl_et.ipynb",
|
| 295 |
+
"provenance": []
|
| 296 |
+
},
|
| 297 |
+
"kernelspec": {
|
| 298 |
+
"display_name": "venv",
|
| 299 |
+
"language": "python",
|
| 300 |
+
"name": "venv"
|
| 301 |
+
},
|
| 302 |
+
"language_info": {
|
| 303 |
+
"codemirror_mode": {
|
| 304 |
+
"name": "ipython",
|
| 305 |
+
"version": 3
|
| 306 |
+
},
|
| 307 |
+
"file_extension": ".py",
|
| 308 |
+
"mimetype": "text/x-python",
|
| 309 |
+
"name": "python",
|
| 310 |
+
"nbconvert_exporter": "python",
|
| 311 |
+
"pygments_lexer": "ipython3",
|
| 312 |
+
"version": "3.9.10"
|
| 313 |
+
}
|
| 314 |
+
},
|
| 315 |
+
"nbformat": 4,
|
| 316 |
+
"nbformat_minor": 1
|
| 317 |
+
}
|
codes/jupyter_notebook/crt_emp_mdl_io.ipynb
ADDED
|
@@ -0,0 +1,334 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"metadata": {
|
| 7 |
+
"executionInfo": {
|
| 8 |
+
"elapsed": 3115,
|
| 9 |
+
"status": "ok",
|
| 10 |
+
"timestamp": 1634906898989,
|
| 11 |
+
"user": {
|
| 12 |
+
"displayName": "mostafa lotfi",
|
| 13 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 14 |
+
"userId": "11325640182532482321"
|
| 15 |
+
},
|
| 16 |
+
"user_tz": -210
|
| 17 |
+
},
|
| 18 |
+
"id": "gdpcJ0ywSIY8"
|
| 19 |
+
},
|
| 20 |
+
"outputs": [],
|
| 21 |
+
"source": [
|
| 22 |
+
"from tensorflow.keras.layers import (Input, Conv2D, Flatten, MaxPooling2D,\n",
|
| 23 |
+
" Dense, Dropout, Concatenate)\n",
|
| 24 |
+
"from tensorflow.keras.models import Model\n",
|
| 25 |
+
"from tensorflow.keras.utils import plot_model\n",
|
| 26 |
+
"import numpy as np\n",
|
| 27 |
+
"import os\n",
|
| 28 |
+
"import pickle"
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"cell_type": "code",
|
| 33 |
+
"execution_count": 2,
|
| 34 |
+
"metadata": {
|
| 35 |
+
"executionInfo": {
|
| 36 |
+
"elapsed": 28,
|
| 37 |
+
"status": "ok",
|
| 38 |
+
"timestamp": 1634906898992,
|
| 39 |
+
"user": {
|
| 40 |
+
"displayName": "mostafa lotfi",
|
| 41 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 42 |
+
"userId": "11325640182532482321"
|
| 43 |
+
},
|
| 44 |
+
"user_tz": -210
|
| 45 |
+
},
|
| 46 |
+
"id": "YGlf78zqN8xb"
|
| 47 |
+
},
|
| 48 |
+
"outputs": [],
|
| 49 |
+
"source": [
|
| 50 |
+
"# # Uncomment this box if the file is in google colab\n",
|
| 51 |
+
"# path2root = \"/content/drive/MyDrive/Projects/EyeTracker/\""
|
| 52 |
+
]
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"cell_type": "code",
|
| 56 |
+
"execution_count": 2,
|
| 57 |
+
"metadata": {},
|
| 58 |
+
"outputs": [],
|
| 59 |
+
"source": [
|
| 60 |
+
"# Uncomment this box if the file is in your local directory\n",
|
| 61 |
+
"path2root = \"../../\""
|
| 62 |
+
]
|
| 63 |
+
},
|
| 64 |
+
{
|
| 65 |
+
"cell_type": "code",
|
| 66 |
+
"execution_count": 3,
|
| 67 |
+
"metadata": {},
|
| 68 |
+
"outputs": [],
|
| 69 |
+
"source": [
|
| 70 |
+
"subjects_fol = \"subjects/\"\n",
|
| 71 |
+
"data_boi_fol = \"data-boi/\"\n",
|
| 72 |
+
"models_fol = \"models/\"\n",
|
| 73 |
+
"models_boi_fol = \"boi/\"\n",
|
| 74 |
+
"raw_fol = \"raw/\"\n",
|
| 75 |
+
"# chosen_inputs = [0, 1, 2, 6, 7, 8, 9]\n",
|
| 76 |
+
"chosen_inputs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"
|
| 77 |
+
]
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"cell_type": "code",
|
| 81 |
+
"execution_count": 4,
|
| 82 |
+
"metadata": {
|
| 83 |
+
"executionInfo": {
|
| 84 |
+
"elapsed": 1534,
|
| 85 |
+
"status": "ok",
|
| 86 |
+
"timestamp": 1634906900506,
|
| 87 |
+
"user": {
|
| 88 |
+
"displayName": "mostafa lotfi",
|
| 89 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 90 |
+
"userId": "11325640182532482321"
|
| 91 |
+
},
|
| 92 |
+
"user_tz": -210
|
| 93 |
+
},
|
| 94 |
+
"id": "BBr2Jp0jRp-N"
|
| 95 |
+
},
|
| 96 |
+
"outputs": [],
|
| 97 |
+
"source": [
|
| 98 |
+
"data_boi_dir = path2root + subjects_fol + f\"{3}/\" + data_boi_fol\n",
|
| 99 |
+
"\n",
|
| 100 |
+
"with open(data_boi_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 101 |
+
" x1 = pickle.load(f)\n",
|
| 102 |
+
"with open(data_boi_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 103 |
+
" x2 = pickle.load(f)\n",
|
| 104 |
+
"with open(data_boi_dir + \"y.pickle\", \"rb\") as f:\n",
|
| 105 |
+
" y = pickle.load(f)\n",
|
| 106 |
+
"\n",
|
| 107 |
+
"x2_chs_inp = x2[:, chosen_inputs]"
|
| 108 |
+
]
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"cell_type": "code",
|
| 112 |
+
"execution_count": 10,
|
| 113 |
+
"metadata": {
|
| 114 |
+
"colab": {
|
| 115 |
+
"base_uri": "https://localhost:8080/"
|
| 116 |
+
},
|
| 117 |
+
"executionInfo": {
|
| 118 |
+
"elapsed": 1366,
|
| 119 |
+
"status": "ok",
|
| 120 |
+
"timestamp": 1634906901865,
|
| 121 |
+
"user": {
|
| 122 |
+
"displayName": "mostafa lotfi",
|
| 123 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 124 |
+
"userId": "11325640182532482321"
|
| 125 |
+
},
|
| 126 |
+
"user_tz": -210
|
| 127 |
+
},
|
| 128 |
+
"id": "4PX0-j2gRhWL",
|
| 129 |
+
"outputId": "fa4469a0-3fe3-4b6b-93b7-e3b150335149"
|
| 130 |
+
},
|
| 131 |
+
"outputs": [
|
| 132 |
+
{
|
| 133 |
+
"name": "stdout",
|
| 134 |
+
"output_type": "stream",
|
| 135 |
+
"text": [
|
| 136 |
+
"Model: \"model_2\"\n",
|
| 137 |
+
"__________________________________________________________________________________________________\n",
|
| 138 |
+
"Layer (type) Output Shape Param # Connected to \n",
|
| 139 |
+
"==================================================================================================\n",
|
| 140 |
+
"input_5 (InputLayer) [(None, 100, 100, 1) 0 \n",
|
| 141 |
+
"__________________________________________________________________________________________________\n",
|
| 142 |
+
"conv2d_8 (Conv2D) (None, 100, 100, 16) 1952 input_5[0][0] \n",
|
| 143 |
+
"__________________________________________________________________________________________________\n",
|
| 144 |
+
"max_pooling2d_8 (MaxPooling2D) (None, 50, 50, 16) 0 conv2d_8[0][0] \n",
|
| 145 |
+
"__________________________________________________________________________________________________\n",
|
| 146 |
+
"conv2d_9 (Conv2D) (None, 50, 50, 32) 25120 max_pooling2d_8[0][0] \n",
|
| 147 |
+
"__________________________________________________________________________________________________\n",
|
| 148 |
+
"max_pooling2d_9 (MaxPooling2D) (None, 25, 25, 32) 0 conv2d_9[0][0] \n",
|
| 149 |
+
"__________________________________________________________________________________________________\n",
|
| 150 |
+
"conv2d_10 (Conv2D) (None, 25, 25, 64) 51264 max_pooling2d_9[0][0] \n",
|
| 151 |
+
"__________________________________________________________________________________________________\n",
|
| 152 |
+
"max_pooling2d_10 (MaxPooling2D) (None, 12, 12, 64) 0 conv2d_10[0][0] \n",
|
| 153 |
+
"__________________________________________________________________________________________________\n",
|
| 154 |
+
"conv2d_11 (Conv2D) (None, 12, 12, 128) 73856 max_pooling2d_10[0][0] \n",
|
| 155 |
+
"__________________________________________________________________________________________________\n",
|
| 156 |
+
"max_pooling2d_11 (MaxPooling2D) (None, 6, 6, 128) 0 conv2d_11[0][0] \n",
|
| 157 |
+
"__________________________________________________________________________________________________\n",
|
| 158 |
+
"flatten_2 (Flatten) (None, 4608) 0 max_pooling2d_11[0][0] \n",
|
| 159 |
+
"__________________________________________________________________________________________________\n",
|
| 160 |
+
"input_6 (InputLayer) [(None, 10)] 0 \n",
|
| 161 |
+
"__________________________________________________________________________________________________\n",
|
| 162 |
+
"concatenate_2 (Concatenate) (None, 4618) 0 flatten_2[0][0] \n",
|
| 163 |
+
" input_6[0][0] \n",
|
| 164 |
+
"__________________________________________________________________________________________________\n",
|
| 165 |
+
"dense_11 (Dense) (None, 256) 1182464 concatenate_2[0][0] \n",
|
| 166 |
+
"__________________________________________________________________________________________________\n",
|
| 167 |
+
"dense_12 (Dense) (None, 128) 32896 dense_11[0][0] \n",
|
| 168 |
+
"__________________________________________________________________________________________________\n",
|
| 169 |
+
"dense_13 (Dense) (None, 32) 4128 dense_12[0][0] \n",
|
| 170 |
+
"__________________________________________________________________________________________________\n",
|
| 171 |
+
"dense_14 (Dense) (None, 8) 264 dense_13[0][0] \n",
|
| 172 |
+
"__________________________________________________________________________________________________\n",
|
| 173 |
+
"dense_15 (Dense) (None, 1) 9 dense_14[0][0] \n",
|
| 174 |
+
"==================================================================================================\n",
|
| 175 |
+
"Total params: 1,371,953\n",
|
| 176 |
+
"Trainable params: 1,371,953\n",
|
| 177 |
+
"Non-trainable params: 0\n",
|
| 178 |
+
"__________________________________________________________________________________________________\n",
|
| 179 |
+
"None\n"
|
| 180 |
+
]
|
| 181 |
+
}
|
| 182 |
+
],
|
| 183 |
+
"source": [
|
| 184 |
+
"input1 = Input(x1.shape[1:])\n",
|
| 185 |
+
"layer = Conv2D(16, (11, 11), (1, 1), \"same\", activation=\"relu\")(input1)\n",
|
| 186 |
+
"layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 187 |
+
"\n",
|
| 188 |
+
"layer = Conv2D(32, (7, 7), (1, 1), \"same\", activation=\"relu\")(layer)\n",
|
| 189 |
+
"layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 190 |
+
"\n",
|
| 191 |
+
"layer = Conv2D(64, (5, 5), (1, 1), \"same\", activation=\"relu\")(layer)\n",
|
| 192 |
+
"layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 193 |
+
"\n",
|
| 194 |
+
"layer = Conv2D(128, (3, 3), (1, 1), \"same\", activation=\"relu\")(layer)\n",
|
| 195 |
+
"layer = MaxPooling2D((2, 2), (2, 2))(layer)\n",
|
| 196 |
+
"\n",
|
| 197 |
+
"layer = Flatten()(layer)\n",
|
| 198 |
+
"\n",
|
| 199 |
+
"layer = Dense(256, \"relu\")(layer)\n",
|
| 200 |
+
"\n",
|
| 201 |
+
"input2 = Input(x2_chs_inp.shape[1:])\n",
|
| 202 |
+
"layer = Concatenate()([layer, input2])\n",
|
| 203 |
+
"\n",
|
| 204 |
+
"layer = Dense(128, \"relu\")(layer)\n",
|
| 205 |
+
"\n",
|
| 206 |
+
"# layer = Dense(64, \"relu\")(layer)\n",
|
| 207 |
+
"\n",
|
| 208 |
+
"layer = Dense(32, \"relu\")(layer)\n",
|
| 209 |
+
"\n",
|
| 210 |
+
"layer = Dense(8, \"relu\")(layer)\n",
|
| 211 |
+
"\n",
|
| 212 |
+
"output_layer = Dense(1, \"sigmoid\")(layer)\n",
|
| 213 |
+
"\n",
|
| 214 |
+
"input_layers = [input1, input2]\n",
|
| 215 |
+
"\n",
|
| 216 |
+
"model = Model(inputs=input_layers, outputs=output_layer)\n",
|
| 217 |
+
"\n",
|
| 218 |
+
"model.compile(optimizer=\"adam\", loss=\"binary_crossentropy\")\n",
|
| 219 |
+
"\n",
|
| 220 |
+
"print(model.summary())"
|
| 221 |
+
]
|
| 222 |
+
},
|
| 223 |
+
{
|
| 224 |
+
"cell_type": "code",
|
| 225 |
+
"execution_count": 7,
|
| 226 |
+
"metadata": {
|
| 227 |
+
"colab": {
|
| 228 |
+
"base_uri": "https://localhost:8080/",
|
| 229 |
+
"height": 1000
|
| 230 |
+
},
|
| 231 |
+
"executionInfo": {
|
| 232 |
+
"elapsed": 32,
|
| 233 |
+
"status": "ok",
|
| 234 |
+
"timestamp": 1634906901869,
|
| 235 |
+
"user": {
|
| 236 |
+
"displayName": "mostafa lotfi",
|
| 237 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 238 |
+
"userId": "11325640182532482321"
|
| 239 |
+
},
|
| 240 |
+
"user_tz": -210
|
| 241 |
+
},
|
| 242 |
+
"id": "woulgNfGVvMO",
|
| 243 |
+
"outputId": "1357e75d-a130-47b9-8b8a-a283a28ef9ee"
|
| 244 |
+
},
|
| 245 |
+
"outputs": [],
|
| 246 |
+
"source": [
|
| 247 |
+
"# plot_model(model, show_shapes=True)"
|
| 248 |
+
]
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"cell_type": "code",
|
| 252 |
+
"execution_count": 6,
|
| 253 |
+
"metadata": {
|
| 254 |
+
"colab": {
|
| 255 |
+
"base_uri": "https://localhost:8080/"
|
| 256 |
+
},
|
| 257 |
+
"executionInfo": {
|
| 258 |
+
"elapsed": 1600,
|
| 259 |
+
"status": "ok",
|
| 260 |
+
"timestamp": 1634906903452,
|
| 261 |
+
"user": {
|
| 262 |
+
"displayName": "mostafa lotfi",
|
| 263 |
+
"photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gj4ilTay6rvtZq-saddDY_b1Sar82sct4P4Z2d_Wg=s64",
|
| 264 |
+
"userId": "11325640182532482321"
|
| 265 |
+
},
|
| 266 |
+
"user_tz": -210
|
| 267 |
+
},
|
| 268 |
+
"id": "Oa3y4Mfyg7g7",
|
| 269 |
+
"outputId": "cf277369-f69c-4b36-9140-c5c0480876e0"
|
| 270 |
+
},
|
| 271 |
+
"outputs": [],
|
| 272 |
+
"source": [
|
| 273 |
+
"models_dir = path2root + models_fol\n",
|
| 274 |
+
"if not os.path.exists(models_dir):\n",
|
| 275 |
+
" os.mkdir(models_dir)\n",
|
| 276 |
+
"\n",
|
| 277 |
+
"models_boi_dir = models_dir + models_boi_fol\n",
|
| 278 |
+
"if not os.path.exists(models_boi_dir):\n",
|
| 279 |
+
" os.mkdir(models_boi_dir)\n",
|
| 280 |
+
"\n",
|
| 281 |
+
"raw_dir = models_boi_dir + raw_fol\n",
|
| 282 |
+
"if not os.path.exists(raw_dir):\n",
|
| 283 |
+
" os.mkdir(raw_dir)"
|
| 284 |
+
]
|
| 285 |
+
},
|
| 286 |
+
{
|
| 287 |
+
"cell_type": "code",
|
| 288 |
+
"execution_count": 11,
|
| 289 |
+
"metadata": {},
|
| 290 |
+
"outputs": [],
|
| 291 |
+
"source": [
|
| 292 |
+
"models_numbers = []\n",
|
| 293 |
+
"models_name = os.listdir(raw_dir)\n",
|
| 294 |
+
"if models_name:\n",
|
| 295 |
+
" for model_name in models_name:\n",
|
| 296 |
+
" model_num = int(model_name[5:-3])\n",
|
| 297 |
+
" models_numbers.append(model_num)\n",
|
| 298 |
+
" max_num = max(models_numbers)\n",
|
| 299 |
+
"else:\n",
|
| 300 |
+
" max_num = 0\n",
|
| 301 |
+
"\n",
|
| 302 |
+
"model.save(raw_dir + f\"model{max_num+1}.h5\")"
|
| 303 |
+
]
|
| 304 |
+
}
|
| 305 |
+
],
|
| 306 |
+
"metadata": {
|
| 307 |
+
"colab": {
|
| 308 |
+
"authorship_tag": "ABX9TyO+imeiHVHbkQtIcxwp1CO/",
|
| 309 |
+
"collapsed_sections": [],
|
| 310 |
+
"mount_file_id": "1uCQ0LdEXc_1zKNjksNB7MFXCz9JZIiOY",
|
| 311 |
+
"name": "crt_mdl_in_blink_out.ipynb",
|
| 312 |
+
"provenance": []
|
| 313 |
+
},
|
| 314 |
+
"kernelspec": {
|
| 315 |
+
"display_name": "venv",
|
| 316 |
+
"language": "python",
|
| 317 |
+
"name": "venv"
|
| 318 |
+
},
|
| 319 |
+
"language_info": {
|
| 320 |
+
"codemirror_mode": {
|
| 321 |
+
"name": "ipython",
|
| 322 |
+
"version": 3
|
| 323 |
+
},
|
| 324 |
+
"file_extension": ".py",
|
| 325 |
+
"mimetype": "text/x-python",
|
| 326 |
+
"name": "python",
|
| 327 |
+
"nbconvert_exporter": "python",
|
| 328 |
+
"pygments_lexer": "ipython3",
|
| 329 |
+
"version": "3.9.6"
|
| 330 |
+
}
|
| 331 |
+
},
|
| 332 |
+
"nbformat": 4,
|
| 333 |
+
"nbformat_minor": 1
|
| 334 |
+
}
|
codes/jupyter_notebook/detect_blink.ipynb
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"id": "da79030b",
|
| 7 |
+
"metadata": {},
|
| 8 |
+
"outputs": [],
|
| 9 |
+
"source": [
|
| 10 |
+
"import mediapipe as mp\n",
|
| 11 |
+
"import cv2\n",
|
| 12 |
+
"import numpy as np\n",
|
| 13 |
+
"import time"
|
| 14 |
+
]
|
| 15 |
+
},
|
| 16 |
+
{
|
| 17 |
+
"cell_type": "code",
|
| 18 |
+
"execution_count": 2,
|
| 19 |
+
"id": "cd77dedc",
|
| 20 |
+
"metadata": {},
|
| 21 |
+
"outputs": [
|
| 22 |
+
{
|
| 23 |
+
"name": "stdout",
|
| 24 |
+
"output_type": "stream",
|
| 25 |
+
"text": [
|
| 26 |
+
"6\n"
|
| 27 |
+
]
|
| 28 |
+
}
|
| 29 |
+
],
|
| 30 |
+
"source": [
|
| 31 |
+
"th1 = 10\n",
|
| 32 |
+
"th2 = 7\n",
|
| 33 |
+
"blinking_time = 0.4\n",
|
| 34 |
+
"sampling_period = 1/15\n",
|
| 35 |
+
"n_smp_blink = round(blinking_time/sampling_period)\n",
|
| 36 |
+
"print(n_smp_blink)"
|
| 37 |
+
]
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"cell_type": "code",
|
| 41 |
+
"execution_count": 4,
|
| 42 |
+
"id": "0ee09c2b",
|
| 43 |
+
"metadata": {},
|
| 44 |
+
"outputs": [],
|
| 45 |
+
"source": [
|
| 46 |
+
"cap = cv2.VideoCapture(2)\n",
|
| 47 |
+
"face_mesh = mp.solutions.face_mesh.FaceMesh(\n",
|
| 48 |
+
" static_image_mode=False,\n",
|
| 49 |
+
" min_tracking_confidence=0.5,\n",
|
| 50 |
+
" min_detection_confidence=0.5\n",
|
| 51 |
+
")\n",
|
| 52 |
+
"\n",
|
| 53 |
+
"fw = cap.get(cv2.CAP_PROP_FRAME_WIDTH)\n",
|
| 54 |
+
"fh = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)\n",
|
| 55 |
+
"fs = fw, fh\n",
|
| 56 |
+
"t_vec = []\n",
|
| 57 |
+
"eye_ratio = []\n",
|
| 58 |
+
"t0 = time.perf_counter()\n",
|
| 59 |
+
"i = 0\n",
|
| 60 |
+
"bv = 0\n",
|
| 61 |
+
"while True:\n",
|
| 62 |
+
" success, frame = cap.read()\n",
|
| 63 |
+
" frame = cv2.flip(frame,1)\n",
|
| 64 |
+
" frame_rgb = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)\n",
|
| 65 |
+
" if success:\n",
|
| 66 |
+
" results = face_mesh.process(frame_rgb)\n",
|
| 67 |
+
" mfl = results.multi_face_landmarks\n",
|
| 68 |
+
" if mfl:\n",
|
| 69 |
+
" all_landmarks = np.array([(lm.x, lm.y) for lm in mfl[0].landmark])\n",
|
| 70 |
+
" wl = np.sqrt(((all_landmarks[33]-all_landmarks[133])**2).sum())\n",
|
| 71 |
+
" hl1 = np.sqrt(((all_landmarks[159]-all_landmarks[145])**2).sum())\n",
|
| 72 |
+
" hl2 = np.sqrt(((all_landmarks[158]-all_landmarks[153])**2).sum())\n",
|
| 73 |
+
" hl = (hl1 + hl2) / 2\n",
|
| 74 |
+
" \n",
|
| 75 |
+
" wr = np.sqrt(((all_landmarks[362]-all_landmarks[263])**2).sum())\n",
|
| 76 |
+
" hr1 = np.sqrt(((all_landmarks[385]-all_landmarks[380])**2).sum())\n",
|
| 77 |
+
" hr2 = np.sqrt(((all_landmarks[386]-all_landmarks[374])**2).sum())\n",
|
| 78 |
+
" hr = (hr1 + hr2) / 2\n",
|
| 79 |
+
" \n",
|
| 80 |
+
" t_vec.append(round(time.perf_counter() - t0, 3))\n",
|
| 81 |
+
" eye_ratio.append((wl / hl + wr / hr) / 2)\n",
|
| 82 |
+
" \n",
|
| 83 |
+
" if i>=1:\n",
|
| 84 |
+
" bv = (eye_ratio[-1]-eye_ratio[-2]) / (t_vec[-1]-t_vec[-2])\n",
|
| 85 |
+
"# print((all_landmarks[33]-all_landmarks[133])**2)\n",
|
| 86 |
+
"# vec.append(np.abs(all_landmarks-all_landmarks[0]).sum().sum()/all_landmarks.shape[0])\n",
|
| 87 |
+
" all_landmarks_pix = np.array(all_landmarks * fs, np.uint32)\n",
|
| 88 |
+
" \n",
|
| 89 |
+
"# for pix in all_landmarks_pix:\n",
|
| 90 |
+
"# cv2.circle(frame, pix, 2, (0, 0, 255), cv2.FILLED)\n",
|
| 91 |
+
"# print(all_landmarks[0].shape)\n",
|
| 92 |
+
"# break\n",
|
| 93 |
+
"# cv2.circle(frame, int(all_landmarks[0, 0]), int(all_landmarks[0, 1]), 5, (0, 0, 255), cv2.FILLED)\n",
|
| 94 |
+
"# print(all_landmarks[0])\n",
|
| 95 |
+
" if bv >= th1 or bv <= -th1:\n",
|
| 96 |
+
" cv2.imshow(\"Webcam\", frame[:2])\n",
|
| 97 |
+
" else:\n",
|
| 98 |
+
" cv2.imshow(\"Webcam\", frame)\n",
|
| 99 |
+
" q = cv2.waitKey(1)\n",
|
| 100 |
+
" if q == ord('q'):\n",
|
| 101 |
+
" break\n",
|
| 102 |
+
" i += 1\n",
|
| 103 |
+
"\n",
|
| 104 |
+
"t_vec = np.array(t_vec)\n",
|
| 105 |
+
"eye_ratio = np.array(eye_ratio)\n",
|
| 106 |
+
"cv2.destroyAllWindows()\n",
|
| 107 |
+
"cap.release()"
|
| 108 |
+
]
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"cell_type": "code",
|
| 112 |
+
"execution_count": 5,
|
| 113 |
+
"id": "7c94eeb0",
|
| 114 |
+
"metadata": {},
|
| 115 |
+
"outputs": [
|
| 116 |
+
{
|
| 117 |
+
"name": "stdout",
|
| 118 |
+
"output_type": "stream",
|
| 119 |
+
"text": [
|
| 120 |
+
"30.706434901829933\n"
|
| 121 |
+
]
|
| 122 |
+
}
|
| 123 |
+
],
|
| 124 |
+
"source": [
|
| 125 |
+
"dt = 1 / (t_vec[1:] - t_vec[:-1])\n",
|
| 126 |
+
"print(dt.mean())\n",
|
| 127 |
+
"# print(t_vec)"
|
| 128 |
+
]
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"cell_type": "code",
|
| 132 |
+
"execution_count": 6,
|
| 133 |
+
"id": "221c52c9",
|
| 134 |
+
"metadata": {},
|
| 135 |
+
"outputs": [],
|
| 136 |
+
"source": [
|
| 137 |
+
"eye_ratio_v = eye_ratio.copy()\n",
|
| 138 |
+
"eye_ratio_v[1:] = (eye_ratio[1:] - eye_ratio[:-1]) / (t_vec[1:] - t_vec[:-1])\n",
|
| 139 |
+
"eye_ratio_v[0] = eye_ratio_v[1]"
|
| 140 |
+
]
|
| 141 |
+
},
|
| 142 |
+
{
|
| 143 |
+
"cell_type": "code",
|
| 144 |
+
"execution_count": 7,
|
| 145 |
+
"id": "d9cce07e",
|
| 146 |
+
"metadata": {},
|
| 147 |
+
"outputs": [],
|
| 148 |
+
"source": [
|
| 149 |
+
"import matplotlib.pyplot as plt"
|
| 150 |
+
]
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"cell_type": "code",
|
| 154 |
+
"execution_count": 8,
|
| 155 |
+
"id": "dfb997ea",
|
| 156 |
+
"metadata": {},
|
| 157 |
+
"outputs": [
|
| 158 |
+
{
|
| 159 |
+
"data": {
|
| 160 |
+
"text/plain": [
|
| 161 |
+
"[<matplotlib.lines.Line2D at 0x1d80031ee20>]"
|
| 162 |
+
]
|
| 163 |
+
},
|
| 164 |
+
"execution_count": 8,
|
| 165 |
+
"metadata": {},
|
| 166 |
+
"output_type": "execute_result"
|
| 167 |
+
},
|
| 168 |
+
{
|
| 169 |
+
"data": {
|
| 170 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAABUq0lEQVR4nO2deZgcVbn/v29v07NnmclCEjIhCQkhkECGfQ+yC+gF9V4RBfVyuRcVrrjgBor6U0TwLiqICiiXRTYFFBSUsENCErKQlex7MplkMvtML+f3R9WpOlVd1XWquqZ7pud8nidPd7pPd5/pPvXWW9/zLsQYg0KhUCiGPpFST0ChUCgU4aAMukKhUJQJyqArFApFmaAMukKhUJQJyqArFApFmRAr1Qc3NDSwpqamUn28QqFQDEmWLFmynzHW6PRcyQx6U1MTFi9eXKqPVygUiiEJEW11e05JLgqFQlEmKIOuUCgUZYIy6AqFQlEmKIOuUCgUZYIy6AqFQlEmKIOuUCgUZYIy6AqFQlEmKIOuUAxhtrV247X1LaWehmKQULLEIoVCUThn/XQBGAO2/PiSUk9FMQhQHrpCMYRR/WkUIsqgKxQKRZmgDLpCoVCUCcqgKxRlQDartBeFMugKRVmQymZLPQXFIEAZdIWiDEhllIeukDDoRJQkokVEtJyIVhHR9/KMvYKIGBE1hztNhUKRj3RGeegKuTj0PgDzGWOdRBQH8AYRvcAYe0ccRES1AG4EsHAA5qlQKPKgPHQFIOGhM41O/b9x/Z/T6vk+gDsA9IY3PYVCIUNaaegKSGroRBQlomUA9gF4iTG20Pb88QAmMcb+4vE+1xHRYiJa3NKi0pUVikIh0m5TaeWhF0JvKlPqKYSClEFnjGUYY3MBTARwIhHN5s8RUQTA3QBulnif+xhjzYyx5sZGxx6nCoXCB7GIZtFVlEtw1u3pwMzv/BXPr9xd6qkUjK8oF8ZYG4AFAC4UHq4FMBvAK0S0BcDJAJ5VG6MKxcAT1Q16WmnogVm0uRUA8MaG/SWeSeHIRLk0EtEI/X4lgPMArOXPM8YOMcYaGGNNjLEmAO8AuIwxtnhgpqxQKDixiHYIpySiXNbv7cCCdfsGekpDjkM9KQDAiMp4iWdSODJRLuMB/I6IotBOAI8zxv5MRLcDWMwYe3ZAZ6hQKFzhHrqMQT//Z68BUJUZ7bR16wa9ahgYdMbYCgDHOTx+q8v4swuflkKhkCEe1SUXlfofmDbdQ6+uGPrVxFWmqEIxhPHjoSuc4R56pgxOisqgKxRDGK6hq03R4Bzq6QdQHt+hMugh8sCbm/HlPywr9TQUwwjloRcO3xQNKzlrb3svLv3fN7DnUPFzLJVBD5HvPbcaT7+3s9TTUAwjYlFu0Ie+d1kquNQS1nf4+LvbsXLnITz0zpZQ3s8PyqCHBFO9wBQlgCcWqdT/4ET0dNuwNPQR1QkAwEFdmy8myqCHREtHX6jv97OX1uP6h5aE+p6K8iMaQENXzTCs8PIJYVWsHFWlG/Su/lDezw9DP05nkNATci2I//7HBwA0r4HrpAqFHe6h9/swRhnGEIFaUxx+cR009PPtja2YdVgd6vXEpOqKKADgQAkMuvLQQ2KgNqU27Ov0HqQYtgRJ/S+HaI4wyegWPYhB7+pL419+/Q7+7SEzMZ6/S5uSXIYufWnToIehpzfUVAAAtrR2FfxeivIliIau9HYr/HAN4pTx43759kPC+2lveLBbeehDln7BoIchUfIMwHJIdlAMHGbYovw68bumdrb1GKF95UhWN8BBjjV+3IsnA36C6OhNFz45nyiDHhKiQQ/DA+I77yqlW5EPbtAzPtac3/C80378Ms696xVfrxlKcIMeJGyxL63tnYnHqanJF/9KSBn0kBA3pcLwqvXgBV8HqmL4YYbcyb8myPrc31l8+aBY8EMsSJSL6Mhx+LdbitwAZdBDQvxhw/gho6TqXCu8iQTw0JWGboUVILn0ORn0EuakKIMeEqJBD8dDVxq6whse0erHuVROghV+iKUCGfTccOVSHrLKoIeEKLmE4QFFlYaukIBHk2d8eIVqTVnhGnoQyaUv5fQa5aEPefpC9tCjykNXSMBXhx/JRa0pK1ljEzOAh+5wEihlFRBl0EPCEuUSwiWtinJRyMCNhy/JRWnoFljIHnopj1iZnqJJIlpERMuJaBURfc9hzJeJaDURrSCifxDR5IGZ7uDFGrYYpoeuDj6FO0E8dKWhW8kWkCkqaujcBgx2D70PwHzG2BwAcwFcSEQn28a8B6CZMXYsgCcB/CTUWQ4BrGGLYcSha7fKQ1fkw4zQkH9N0DXlFKJXDnAJKsiJTpRau/q0RCI2mDV0psELisT1f8w2ZgFjrFv/7zsAJoY6yyFA2B66EeWivCmFBF5OhBhKF1RD7+4vfuZjMSgkEajP4bgXv95i71dIaehEFCWiZQD2AXiJMbYwz/DPAXjB5X2uI6LFRLS4paXF92QHM2Fr6CrKRSGDoaF7XOeL6yhomdiwK4oOFgqRXJzClcWTZ7E7SUkZdMZYhjE2F5rnfSIRzXYaR0SfAtAM4E6X97mPMdbMGGtubGwMOOXisWrXISzb3iY11hq2WLgRNrVRZdAV7vDLey+7IToZQddnua5FI8qlgNR/wPmk6pR4NJD4inJhjLUBWADgQvtzRPQhAN8CcBljLNxuDyUglcnikv95Ax/5xZtS461n6sJ/RFaA16AYPphRLvnXnCgnBDXM5dqUy6zlUliUS9bw0M3ni73vIBPl0khEI/T7lQDOA7DWNuY4AL+CZsz3DcA8i866PR2+xls89BAklyDRC4rhh2zYorgmg8oA2TK16OZ3WNimKHe+xE3RYksuMh2LxgP4HRFFoZ0AHmeM/ZmIbgewmDH2LDSJpQbAE6Rpv9sYY5cN1KSLgd8fIhPCJa1IIckOiuGDKbl4eeiFb4qWr+QSTtgi/37En6LYHrqnQWeMrQBwnMPjtwr3PxTyvEqOX29EHB+KES6gYJBi+MCXh9dFobiOgq7Pcl2KhUguGYcTpfg1+WkNGAYqU9QFv7+DNVQpBA1dv1UeuiIvkhp6NoSwxXKVXIyTYoDvhVmO+9wol0GnoQ9X/P64om4WiobOF5mKQ1fkwZRc8q+TbAihdOVo0K0hhgEMunDc8+9HeeiDEL81jcXh4YQtqigXhTeym6JOnqRfynF/3ipF+f8DHY/7wRzlMlzxU44UsJ4AQjHokpfSiuGNbDRUGA5HOXroFqk0gIfulBUahrwVFGXQXRB/CBlvPWwNXUW5KGQwarl4LBOrJKgkF45Figp03ObfFC328asMugt+z7IMQCKqfZ3haOgqykXhjayHng3ooYdRA2Yww/+8aIQCHbfOm6LmY0FPnkFRBt0F8XeQ2djIMoZ4NPz6K8pDV+RDNikmqGEWjVM5LkXuuMWjhHSWFbR3ZnroIYcw+0AZdBcsl2JpiR+FAfGY7qGHqqGX4VGkCA3TQ/cw6MJ9P+tTPA5K2fx4oOB/H7+69nu8Wa7kmZOHrgz6oCAr/LCyHnosoi+KEC6zVJSLQgpJac6yae/DyIgjy9G54H9SIhYF4P94E0ebtVwKi5wpBGXQXRCjXGQNeiJEyUVFuShkMDx0r03RgJv2ogdahvbcML5Bj12n6KFSngSVQXdB/CFSErGkLGTJxexEXoZHkSI0ZE/8wTdFxfvltxZND50HNPhzoJz2JpTkMgjxm1mXZUA8oA7nhKw2qhjeyNZDD7pRZ/Hsy9Kg6x56QGfMyRsPOyfFD8qguyA6PHJF6plh0EM5K6s4dIUE0h668LQ/Db28JRczyiXYscucNkWF55WGPkjIBPDQoxGAKJwfUXnoChmkwxYtCTDy69MStliGa5F/FdxD91vnxmlT1CJvKcllcCAuXpmiPYwxRIgQi1BIm6IqykXhjXTYovB0KmDYYjlnigYPW0RO/kkpk7GUQXfBEuUiIblkGUAAYpFIqBp6OXpFivAwU//lDbqfmiUWD9TnUuxLZ3D9Q0vwy1c2+HthEcnV0P1vikYjmkF3OlaDlRMIjjLoLlg9dIkoFwDEPfQQy+cWW4NTDE28lknQBixMeF+/jsriLQfx11V7cNeL6329rpjwr6XCkFz8b4ry/BPTQzefL3b5a5meokkiWkREy4loFRF9z2FMBRH9gYg2ENFCImoakNkWEXHtysShM8ZABESjFIoR5gegctAV+TDbp3lUWxTu+1mfovbuN2yRHzeDeR/I7qH7nisDYrrkYm6KCs7gIJRc+gDMZ4zNATAXwIVEdLJtzOcAHGSMTQPwMwB3hDrLEiD+sDKSC2MIWUPPnYdCYUd2nQT10C1VRP22ZRwCa5dPkUe5+N0UFTPEs04e+mCTXJhGp/7fuP7P/ktdDuB3+v0nAZxLerfooYr/OHRmaughXmYpg67IB18dXkY6sIZeQKboUNjQt2+KBskUtW+KDvooFyKKEtEyAPsAvMQYW2gbMgHAdgBgjKUBHAIw2uF9riOixUS0uKWlpaCJDzR+PfSsHuUSDTnKpRwjCxTevLa+BV/+wzLPcXydeGczix66n9T/3M+Sxa8zsnl/F1bsaPP1mkJh9k1R3xp67qYol1zCulr3g5RBZ4xlGGNzAUwEcCIRzQ7yYYyx+xhjzYyx5sbGxiBvUTQscegy9dCZFoMeC0lDV3How5tP378IT7+3E9396bzj+Orw2swLnPrv0MBBFr/G7JyfvoLLfv6mr9cUil1y8R/lIr7WKrnEojS4wxYZY20AFgC40PbUTgCTAICIYgDqAbSGML+S4bdIPTfo4Xno2q3y0IcnDTUJAMCutt78A/Xl0Z/J5vWgg/YULaQe+lAoLMePr4qAHnqWaZ64+F6ceDQSuCF3UGSiXBqJaIR+vxLAeQDW2oY9C+Az+v0rAbzMhnglH0txLqmwRU1yiYekofPFoTz04cn4+koAwK62nrzjrNEr7mvFsinqS0N3fg8ZxMNmsJoDfnzZvWx5TMnFXsslEQ0nJ8UPMh76eAALiGgFgHehaeh/JqLbiegyfcxvAYwmog0AvgzgloGZbvGwGnTvHyUbtoeu3w6FjSVF+IyrTwIAdnoZdMnNez7Mb2kKS6aoz7Uoeuh+47uLBf/zgldbhGHQ7ZuisSgV/e+OeQ1gjK0AcJzD47cK93sBfCzcqZUWvx6NkfoflobOJRdl0IclIyrjAIC27lTeceLqSKUZkHAZJxSh8qehmxQS5dKbzhhG04tM1vR6Bxp7HLrfuHEGM1zZHrYYj0YGX9jicCWrp/RGSL44F6CdrcO5zNIll0F6qaoYWGJROY9RXB75EuD4ML8ygOhQ+F2L4uf0peQNW08q4+tzCsG+KerXgcrqCYUR4cqcbyT7PXmGgTLoLmSyQJQIsWhEqh4DP1PHI5FQU/+HwL6SYgDgWRxeWcqWrMQ8Y8VmyEFlgELCFvvS8kbaK7InTMTvRfy/LIxpNZyiRMZrTQ89nDIgfvCUXIYrWcYQiWiJQrKSC9fQQ21woTz0YQk3np4GXVgeMhp6zKcMUEj5XPE46PXjofcXz0Pn37N9Y1P69QBgq+HE3yEWUR76oCGTZbqHTnJRLgyGhh5GhTUV5TK84Usolc7/+zNmZjnKeOgJnzKAtau99MsAWDV0fx56CSQXPX3fr/+k7Z1pNZz4iZI7d2Htp/lBGXQXMlmGSIT0WFKZKBf9hw3LQy/AM1IMfbghlXEm+IZefx7jb3iNPmUAcWRhkssg1dCzNg89oOQSj0aMDVVDhgltP00eZdBd4FEr8Yich679biQt0ch8PkeFLg4/+E/utfYYY3LddiyRFwE99IIkF28jzf+OYkouYoih9n+/kgsDcTuhn7TMxwZhYtFwJaNHucSiEclMUc1Dj4WsoQMqW3Q4Iq2hw5/kokVeBNTQC5JcvD8zqRv0Ykou/Hs2Kib6llyACAHxmGm8Da89VvxNUWXQXchk9aiVKPmq5RINSUMXLbrS0YcfpuQioaFzyUViUzThs76ItdqiXw/dnE+fhIdeEY8CKK7kwiWWfF2H8qFVWSWr5ALNFgzK1P/hSjbLEI3oP4pMPXSYPUXD9tBVpMvww5BcPNYeA5PqtiN66H7CFi1XigUU55Lx0PmVhkx107Aw49CDhy1CvzLnv5UWm64Z+X7loQ8OMkyLcpFNDuCp/2Fp6Frh/GBeg2LokzE89PzGLSt46PmMP19BhWjofpdh1qdB50bVT0RMofC/jydyBQlbJGi/gfFb6ZJLQnnogwd+lpUPW2RGT9GwolyCxsYqhj5+4tBlNkUNrdhnKF0hxblER0jGyYmXwEM3SiLox5rvi2HGpdmIkCnKJRc52xEmyqC7kNXrScjuVBuhSqHVQzc9dCW5DD+MOHTPtcdMqUJKQ/d3BZktSEM3x8scQ9yg+wlxLBT+PQcNW+Sp/7EIGSciJurqRfxbAGXQXcnoHrLsTjWDdqZO+NQoXd+PiRs1Bb+dYogRZFM0v4au3XJPUjamvFAPXSqkUiceK4WGbpVc/Ictat54wh7loke+yDSYDxNl0F3IZnkYopyHzs/UYV1mMQTvoqIY+nAD7GXcGCBsiubT0HVpwWd3e2tjDKmXmOMz5oatzD4UL7BYik3RWEDJRfTGRcmFO3fF/FsAZdBd4SU8ZaMCzNT/kDZClIc+rDF6hYaUWGSP5pBNVhOLf/nOFGUMST0UUbbrF1DcTdEwarnYJRfeMF7z2lWUy6CAN32OS2ripoeu/YiFdmgRo1yUhj784Jf+/hKL3NcJX48VEjHr1nnkzkmWTJYhGeefJ+MU6X9zScIWg0kuWnQb5SQWQW2KDi64QY/58NAJhIRPD8j1/RA8lEox9OFLzttDl4ty4VTENI9ZdrNOdEz82qZ0liEeieiVCOVKUANF3hQ1PHT9/wFSRXmIonjM8/ou6SwratixTE/RSUS0gIhWE9EqIrrRYUw9ET1HRMv1MdcOzHSLhym5yIctRsg0woWemZkYh6489GGHIbl4VltkUnHo9mbIslJAYR56FpEI7+Ilk8tRCg9d+0wiQjRC/lP/IYQoilEueigjgHAyxyWR8dDTAG5mjM0CcDKAG4holm3MDQBWM8bmADgbwF1E5NIMa2iQYVoXEtmGFTyxyPgRPQ5ELxhUHPpwRrbaIoO2cU8enbW4LZbZQM39BP4e/iWXmM/QX6A0HnqEtO5kQaot8iv5fqEeeoTkpLCw8TTojLHdjLGl+v0OAGsATLAPA1BLRASgBsABaCeCIQv3uOMxSQ9dT/3nkkuhZ2WVWDS84cvHU+sWHIl8OjVfQjJ1XyxvL0a5BDDoWuivbKSYdluKOPQIaV66fw1dlFzsqf+6LSji3+NLQyeiJmgNoxfanvo5gKMA7AKwEsCNjLGcv4KIriOixUS0uKWlJdiMi4Rx5pUOW7R56CFshvCSnsqgDz/8eOja3k3+dWpuikal3tech/N9GdK6hy5288kHn2PQKJe3N7bi4v9+XapUL0f00KNEAcIWnSQXXm0xPFsgi7RBJ6IaAE8BuIkx1m57+gIAywAcBmAugJ8TUZ39PRhj9zHGmhljzY2NjYEnXQx4wwrZHoxM3+2OhSC52Et6qiiX4Qf/yb0Ti+TyH5jNQ5ddn5ZqiwHqoftpEsM/KqiG/q0/rsTq3e3Y0tol/Rr+mRG9IXygFnSwBk8YyUYlyHyVMuhEFIdmzB9mjD3tMORaAE8zjQ0ANgOYGd40i49YMU0mbJHpl178MquQDLGcCnDKQx92iC0I8xkZPULOs1QrjycvdthizNgUlZMtgeAGkG+8dvXJq72mh64Zdf/VFrnjpzWTZ4zpJwnyFX0UFjJRLgTgtwDWMMbudhm2DcC5+vixAGYA2BTWJEtBVr9siknGlWsbIWSclQvJ7iw02UEx9BENi5fnbWjoebxuu4Yua2TExKIgkkvUl+Si3Qb10Plxsr+zX/o1/G/SNkXJt/PEv/9EVJNrtDmYRh4o7qZoTGLMaQCuBrCSiJbpj30TwOEAwBi7F8D3ATxIRCuh2cGvM8b2hz/d4sEYQzQSscSVc4/ZCaNITxiSi36rJJfhi2gD+jNZI+PSDm93lvDYeDSjXPxp6NbUf/8eejwe0Tds5ZLzgOBXt3x+B7r8GHQetoiAYYta6n9MMN7ZbPj7abJ4GnTG2BvQjHS+MbsAnB/WpAYDjAGRiGmg0xkGl2MKAK/9QqFILvwgUqn/wxfxijBvnXO+AeehoXPD5dtDFwyc32Q5M9taso2jfivroe/v7MOoqgQiEe50aa/zY9CZIbloGnqQBheRCCwx59zIh2EL/KIyRV0wMkUjcmGI9o2QQs7K/DJXpf4PX6ySSx4NXd+M99bQNYywRckrSGuTaH9rOmvEaMslFvGPkjH+B7v60fyDv+Oul9bpr2U41JMCoBl6P3MEtHkGD1sUwpXTWUGG8U74Chtl0F3gNRpksvAAM/WfhyoV0rWIrykzbFG56MMN8Sf32uyUiUPni8pvYpH4jn7XtLFhKJ1YxCUX78/Z16EZ7eeW7wYAHOxOGSe+oJuiUSLfV8P6/qdFcjE2qn1m5YaBMugu8KgVrmN7eRhG6j/36EOQXMxaLoHfSjFEET1Fr8YVRruzvKn/2q1fycXqoQeUXGJyob/87WUCCrissu1ANxas24d9Hb3Gc36kIf43UcBMUd5uTtTLxasm/lixUAbdBS1qxfSSvXQ9e2JRQRq6XXJRUS7DDtGueEkpJJHRbK+2KG1kxHn4NehZPTcjIquha+8vcyXQ2mXKKj/56zrsbTf/7+d4MeLQg4YtApa9M82gM4vkojT0QQD3LswwRK+wRX18iJJLKc7wisFBljFBg82noQvtziTqoZtx6P409EQ0EkBD95ecJ992D9jfYRrwptFV2Nfea8zTj4dureXiP2xRLJsNCJILAYmYnDMYJsqgu2B4FzxsUaL7OsKSXPRb/4WUFOVCljGpJCDDQ/fQ0M015bd8rnYbj8rFkttfy8tn+MnLkFnvrUIky9i6pLEhOromgYyvnqnabTQSsNqio+RinmT5Y8VCGXQX7GdemSJJEUE3KyxsMViImaJ8yDKgIu4dM27R0CUkl6AaeiwaCaahR6AX55LR0Jl+650dLUaypLNZo35LbTIWyEMn0v75l1xshbgyWctJlj9WLGQSi4Yt/HIR8JZQeNW1Cr1Di58CQXbs3pTs5bGifMgys9uPp0Eg77r9Qcvn8pUXlww9FDHKZ0RkewqY91PZLCoi7okf+zv7MXNcLdp7Uvi/d7ahaXQVYhFCMh71JQ0xu+QSIA49R3Kxee3FPH6Vh+6CGYcul8rPN0dGVGpl4Nu6U4E/m9epNC65i9xoVlF6RMnFzRhyY2TUcpFocGEYGd+Siz/ZhL/WiEP34aED3qF+vakMqhJRIxJsS2s3KhNRrcyALw9du+XVFn2HLfJwZcEbt++/qTj0QQBPijAPAG8PPULaJW1tMuYrW80O3+1XksvwJZsVrtBc1h63f1qUi5yGHiFCRSyCPulMUfNE4FdD58dETLbAnXDfa88qlckiHo1YSu1WxqOIRfxJQ2IcOgVqcGFWu+Tz4rHp8Vjh+2l+UQbdhazth/L00LlwBmBUdQIHuwsw6PbLY+WhDzsYY4Z857Yfw00Pr4eer464qBUnYhH0pfxLLv41dLNgncwVAfPhoacyWuu9XuHvqExEEQ3oofMWdH67MhlJRGIhLoeN0mKhDLoLfpIDmHCWB4CRVYkCPXSNWIQ8W4spypMs8z6hM8FIV8TyG03RTlXEotIlakWpJsimKOmRX7Kp/6Knm49UJotYhNCTsnnoPk88PCEQ0E4+vuuh267kNQ+d6fH32vGrNPRBgNn0mS8w9x/FOMsjLA/dzF6LR+UvjxXlg6ah549yMT103aDrIXOOY4XNPy/jb32ddhv3Gd/NX8v7bcpq6Ebeh8f4/nRWLxlcqIeu6d0A9OJc0i81Xp8juegeukyNnbBRBt0Fu4aeb4E5eegHuwrYFNVvIwRURCMFN5xWDD0sHrrrpqh2S6SFODLm7nhYx+aXZ6zz4B46SWV72l9rJBZl3U82xhwh1D/xkDhTmawxlqNp6OQryiWThWnQA2aKOkW58PeUlZvCQhl0F3IzwLyz8PTfEKOq4wVJLlnh6JNtsKsoL7KMob4yjniUsKOtx3EM3zwnIaLCzVBbN0XlJRdOEA9dLJ/LmHdKfjYr76GnMuZYThX30H1IHHxTE0DgsEU4RLnw9/QKJw0bZdBd4OFIMpmf4oEFACOqEuhJZYLHohsSTvEXhGJwkGUMVYkopo2pxYa9nY5jLLp4PH//SmNTFJrnXxwN3ZZt7Vk+Qz6yS4tysbZpSAaMcuF9B4KELZrdicyMcvHTleQySOAaulGbJV9fR5uHXpvU8rU6fZTxtLwfzPez64SK4UE2q3cikkjo4ZuigHt8uUVyiUXQJ+lsWFL/fWvoZvlcwNtIM+bXoEdw/zXNxmMHu/sRjfjb2OTSKhAsbJEX5ePx8P08sUh/T6/WgGEj01N0EhEtIKLVRLSKiG50GXc2ES3Tx7wa/lSLixhyBSDvAWBWbNN+xKqEZtB7+oN56MbBpzeaLWa1NsXggOuw+XRd+zoB3D10caM94ctD125jkl2H7K+NiB66hxTCxE1RD6PMN0XnzxyLld89H6ccMRqXz50QILHIlEcChS0yZoSNAvxExIwWb16tAcNGJvU/DeBmxthSIqoFsISIXmKMreYDiGgEgF8CuJAxto2IxgzMdIsHr0NRmdAiDXryxO2Kl7MAUK2/pqs/qIcuxAwX+ZJNMTgwknLyaMJiUgyPiMmnofNN+4pYFK1puT0eowZMYA1dbP7gXeBOtqFMOsuMsbXJOB697mQAwLubD/gun2tGuQTpKWovEZI12tIBg1BDZ4ztZowt1e93AFgDYIJt2CcBPM0Y26aP2xf2RItNVt/s4JeyPfk8dP2WLwx+EujqK9RD5xqcinIZbmSZFnURIXKVAURpzkty4XVVAH9RLqLk4ltDF2QjwLueOoNQMthjrJOGDkBvdydvQPlJB9All4B/I9fh+zPMaEsHFN9D96WhE1ETgOMALLQ9dSSAkUT0ChEtIaJPu7z+OiJaTESLW1paAk24WHD9j4hQGY/m3eAUs/AAoLqiMMlFrNGc8BEzrCgfuBQQjbjX6DZruchILqKHLi+58KtFHuXiR5IQy+cC3un8WmKR91jGGFIZZowV8a+hm3HoQTNFATPSSKy2CEi0BgwZaYNORDUAngJwE2Os3fZ0DMA8AJcAuADAd4joSPt7MMbuY4w1M8aaGxsbC5j2wKNdSpkedz7jzItpkaGhFyi5CBkj8SgpDX0Ywg1NNCLroeuSi4s0mGVm4pu/TFHtlssmfhxYU3LxTs4D5DdF+fs4GfRYxN8VLY/EAZD3asgVJhpvMiQXfu3gVTQtbKQMOhHFoRnzhxljTzsM2QHgb4yxLsbYfgCvAZgT3jSLj3gpVhmPekgu1sSian1TtDugQeeYkosy6MONrO5R5/M4ren8PMrCTUM3N/+CRrkA/spQcNlItqYJA0NlnO8F5DPo2nP2OHTAv4duT/0P0iSaO34xXR7V1FohsWgwSS6knb5+C2ANY+xul2HPADidiGJEVAXgJGha+5CFa2MAkIxH8hp0M/Vfg3vojy7aHuizzRAzUpuiw5QsY4jqJV1dDZS4Trjk4uKhM8GTrIjLGxl72V2/coaYnOfdU8CUK/PtP/HjwVFDj/jT0DNZe+q//41fPouKWAS9qYzlJFHsTVGZKJfTAFwNYCURLdMf+yaAwwGAMXYvY2wNEf0VwAoAWQC/YYy9PwDzLRriAVCZiKI3n+TCNW/9V6zSF+WizQdwqDuF+qq4v8/mUS6A0tCHIYyZscyRfB66sE4qJMIWDcklqmnoTNgodZ2LfhuXDCe0fqY1bNErnZ8xZuRw5Lu65Scje+o/EERDN6+so0FS/wU7UVcZR0dvOkdyKebx62nQGWNvwJxfvnF3ArgzjEkNBiwauofkYvfQ+WUjAHT0BTDoQhKIinIZfoh5DdE86ej2Wi5A/sQibrhqk3EwBmw70I3Jo6ulJsMjVfxvOJqJRXIeuneEWF4NPUCTaH5S29/Zh/V7O7FuTwdmjKuVej2vrAgAdckY2ntTlseKXbpDZYq6IGroSUkNnf+IPIQJCBa6KIZBqkzR4YcYXx7Nk6Ep7J171nIRN/8um3sYohHCk0t2SMxFu40ZHrrfkEDBQ5coQR2LRFARi6A75e6h801GN8mFMe+epObnmsfru1sOAgD+vGKX1Gv560UPvb0nZfHQE0V2yJRBd0EMZ6qMe0S5CJ4S52sXzgAAdPb5r7pobUagarkMN4y2aBHeFs0jbJHIs5aLuCk6ti6J2mQMh3q816bYsQjw9rLtf4d8TwHtlkjT0bulNHRnyQWQl4ZEx+3SOYcBAMbVJ6VeC5g1nwCgLhlHO5dcDGcw//5b2CiD7kKWwTjNViU8PHThEplz0pTRAICO3jRuf2417n11o/Rni1fYxd4lV5Qe8YQuH7boHYcu+rPJWP7cCnMu2q1ss3Tz88yrDJnXilelVYlo3pDf/jwGnRfTk5WGxFouP7h8NgBYuiB5IZ4o6ypjmocOZjXyEifOsJDZFB2eMPk4dHvqP2At0HX/m5sBANefNVX+wyEUyFeSy7BCTCyLRggZrxrnECUX903RiCAFVsQjUoaLf7Kh0UtHx2i3so3WxWOoOuHloXNd391DT2WzqEQ053mnzxWjfwD4qpIqnijrknEc6kkZBbsATYbpS2fRm8ogGfeeT6EoD90Fu4be1Zd27waj34oeeo0e6fKFR97z/dmWAzVmRiQohgemMdRKurp76KZOYRbdyqOhC/9PxqJS6f983SVj+TX63M/L9dDzacnGVW6EUOnhoXODyw2wiF9piAnSakUsAiKfBh3mcV9XGUc6y9Ddn7YYdABo7y2Ol64Muguihj55VBW6+jNo6ehzHps1L5E5NcngFz/ipfTYuiTSWYb9ncEbZiiGFqKHroUtugy0RVflay3HwCwOR1LWQ2d8fP4oGjtZZr3KBLyaxJgGuLoiiu48V8Tc4Dp5vAmPmjY5nyt0LCIiI5ZcFtHD51fl7T1pQ3Kp5wa9SLKLMuguiB7NUePrAACrdtsrHmiIiUAcni3KqXCImXVD1OQnjqwEAOw42C39esXQhpeS0CQX92QX7rlzmSFfjRZRBtDGymrouocukcFp+RuENWxKGd6v1TT0GLry9BLg75OMORj0qE+DLhhkAHrdJh8aumAnaoykKMFD1438oZ7CssZlUQbdATF6AABm6gZ9/Z4O5/G21H/APMiuPnky/uPsqb4KG4l64sSRVQCAHQed25Apyg9RrohF3OuQc6PFjVhFLOqRKWrT0CWMHl+xlQE99AiZXmq+qBpxI7jaIwiByz5JB8kl4VECwelzrVcu5omuuz+NTS3O3aIs6K/nBv1Ad78R5llsD11tijpgj1qpr4yjKhHFPjfJxSFsEQA2/+hiAMC9r25CJstwwyNLcVh9Jb794VlSn08EHDZCC6Ha5dJXUlF+GMZQL5/rFrBhGPQYN+juEVFMSFEHNMPlJiE6zSXpERaZ+zrtNqJXK03EImjrdpcNzWNOy7TOl78hI7n4mWdEOC8k41G8t70Nv35tExZvPYC/rdqLtd+/0PGz7M3hedkCxkz5RWnog4Asy/W4R9ck0NqpHQAvr92LdYK3zgTNU4T0zSr+4z6/cg9+88Zmz89nQsqIWeireLGsitKSFSS8aEQLwTvjJy/jO3+yVtPosxn0RJ6iWyxHcpEroWvX0P1uihJpf8fIqjjauiU8dBCqE1E5ycXJoAeQXMTjtiIWwYZ9nfjh82vw5oZWAMDWVme508wQt3rogCm11CWVhl5yxMQOzujqCrR2aR7GZx9cjAv+67Wc8W7U2jZIvaQX0UOPRLQCXX67tCuGLqLnF9Hj0Lcf6MFD72y1jONrokLw0PMlFjlJC5taOvGtP6501dOZzUOXNZTiPgAAjKhM4GA+D12/JV2i6UllXE8epofuLrnIZmeKcejae0aF+9p7bdjn1qTbGgxRLRj0Wt2QcyPfGbDZjV+UQXfAaROqoSaB1z/Yj30dvQ6vcPbQOfwszZGNWBGjF2Q9I8XQJyNc8VXkKf2QK7lE83QssoUtxjXjP/+uV/Hwwm15jBaM9wb8SBnWq9wRVXG05fFSxZ4C9VUJAO6aO9fXHTdFfUa5iJURAetJgu+DbWzpxBOLt2PR5gPW1+q39k1RAKjV7yfjEUQI2H2oB197cjle/2BgG/soDT0PooHe064Z8jteWGc81pfOoCIWddXQOXYPfUtrFxprK1w/1x41UxGXb0igGPrw9RQVjJsTXC+vECQXtyqF9k1Re6aom/aetUsukiF94j4AAIysSmDTfvcNRjGwYATfRO1OYUxtbhp+byqLRDRiuYLmGJJLwE3RLftNeYU7XtsPdOPul9Zrz//4EstrAfNvtBh0/ZgnfQ/k929rV1edfWmcMX3gmvsoD90BJw39hrOnAbCGD25q6QLgnPovUmvz0L1CEHmZUV7USGtIoAz6cEHMa+DGzQkzykUztvkkl0w2a6wnQItyESNJ8sWvA4Lk4jNTlITAgi37u7G1tSv/eGjePABXj743lXFMKgIKi0MHTMcNMMsHuF0p2C/kRe++Jun8u9VW+Ku86hdl0B3IOhjoi44Zj7NnNGK1EIu+fm+HPj439V/E7qG3e8Sk8sVYwcPR4hH0Ksll2CA6CCNspZfFQl05kkvcXZ5JZZmlCmgyFrUYpHxldwHvFne5r8uNAOnPZHHWna/kHU9EGFGpXZW4baL2pd3T6P1HuVjj0O+/phkXHzPOMmavYOR3OkSb8deLV0D2Y57TPcCFupRBd8De9JkzfUwNOnpNY5xj0F09dOuP61Xljh9ccUEbVR768MG8lEeOQX9z437jPpcVjCiXPJvn6UzWqEsOaPWJRPrSWew42G2saY5ZbZEQoWBhi4BZ59x7vOChu2yi9qayjhuigP8oF96EgzN/5lh8cf50y5itB8wr6tN+/LJx/yFdRhFf31CjSalOBr2mIha4cbwsyqA74JT5CQBnHTnG8v/dh3ot4x0kPQC5WaNeIUz2nolqU3R4Iab+11daNfRHFm4z7jttirqtk3SGWSSXOpuU05/O4vQ7FuD8n71meZzBDD1M5Ilzd/8btP9XCceAU5SXWJem3jDo7pKL04YoICYW+QhbtFlBe/ih0zy2H+jGD5/XumyKh/3fbjoDXz7vSBx/+Mic10wcWemrrEAQZHqKTiKiBUS0mohWEdGNecaeQERpIroy3GkWF/vlIuekI0ZZ/n9I/6HdTgAccfNmXF1S2kPni5NHJCiGB6L+LHroY+sqLMlARhx6VE5yiQnVCe3avNsmYirDjFos2pWi3zj0XA/dMQtUcIpqEmalUid6UpmcKwxOoXHogNWgzzqsLuc1mSyzzE18+eiaCnzp3OmOktDomkTBjeO9kPHQ0wBuZozNAnAygBuIKCfVkYiiAO4A8GK4Uyw+Tho6YK2/LBpmp9R/N+or455ZY9y7sHpeyqAPF0SHYqQQ5TJ9TC1W7DiE+9/YjIfe2YrfvK4lqfENQm/JxVyg9Q4euhOpTNYwkn48dHuggHiV2tmba9TEJJ1IRKuJ7mb8uvrSFqMr4ndTNMNyHTGxsB6v4yRysLsfrULoMbnunmmcNEVzBCvjMfQMsHTqadAZY7sZY0v1+x0A1gCY4DD0iwCeArAv1Bn6IJXJ4qbH3rNkcQbBTUMHgH89YwoAoKmhCm09KaQyWUMi8ei3C0Argu/loed4XnkyABXlhxiHHo0QFn3zXPzyquMxc1wt+jNZ3P7n1fjOn97HNl3bFT10V4Nu2xQdYQuHFF8nSiKpjBkdk4xHpDVgHiFiboqKfXZzDbXdKapKxNDl8lkdvd4GXbbLlz0OHdAct386fgJGVsVxRIPZc/WWi2YCAK7+7SJ86rcLjce9jvv/+/xJWHP7hahKRLFmdzt+//YWqbkFwZeGTkRNAI4DsND2+AQAHwVwj8frryOixUS0uKUl/AD7ra3d+NOyXbjy3rcKep98m5zfvPgovP2N+ZjSUI1DPSmcfsfLuOKet13Hc175ytl45xvnah66bJSLEb2gPPThBO8DwQ3NmLokLj5mvGVDXkQszpXJMsdiXulM1nKFad9sFT1aca2lhNc11lSgpdO7/gtg3QcArBp6Xg9dOAF0u0gunX1p1/LUsQiByJ/kEnU4bu/++Fws+taHcMrUBgDA8YePQPNkTRdfI0S6nTG9AfNnjsl5vUg8GkFlImokIt36zCq8vbFVan5+kTboRFQDzQO/iTFmryP7XwC+zhjL+y0yxu5jjDUzxpobG8MPrue74m4LX5o8m5xEhPH1laivTOBQdwp7280Fnu9E3dRQjXH1SdR7pEADDhq68tCHFbyzT8y2W3f2DOdjhu/R5AvZS2etm6J2yUV8TWdfGjsOdiOTZUhlmHHCGF9faQQCeGE30Hxu/P3t2Cuc5vPQO/NILkR6qQxJDz2Tdfew49EIpo2pwdvfmI+HPncSjp04ImfMPZ+ahyMaa6Q+60CXaSve2DAwGaNSBp2I4tCM+cOMsacdhjQDeIyItgC4EsAviegjYU1SFl5rpVDcNHSREVXxHD0x33hOQ20CrV39eeu55GjoalN0WMEbHMdsXe0vOmY8PvjhRa6v40bOyaFJZZjlBJGMR1FfGcdXL9CamYvra39nH06/YwGueWCR7qFr8xhXn8Smli48uWSH599gL1gnLnen+YldugCthK6Ths4Yy6uhA7rWL1V4jGH3oR40OmSjioyvr0R1RQyJWAQLv3kuXv/aOcZz1S6bs05w3f2Ba0/AVy+YKf06P8hEuRCA3wJYwxi722kMY2wKY6yJMdYE4EkA/8EY+1OYE5XhoGDQ3Tqly+CUKWqHN54QkdHQG6or0J/OOuqIHMc610PAoD+ycBs+/7t3Sz2NIQ9vn2b30AHnxsgcvoHqdAWYFgwzZ/lt5+OGc6ZpHq1wBbi7TfPCX/9gP3pTGSHKRbv9yhPLPf8Gu1M0vt40ms4eunW8WwndvnQWqQzL2xEskaf+jUhLZx/aulM4cqychw1oHcQmjarCl+ZPwxEN1XllVjt87FHjcjdaw0LGQz8NwNUA5hPRMv3fxUR0PRFdP2AzC4DooefrSeiFWMrTjVOOGJ3zmIyHPrpGO+g+8vM3XTuT96ezRsd3QLs87upPD3jIU6F8848r8fc1JdsTLxvSttIPdhZ981y8/Y35OY+P1HVxJ4OesW2KiiRiEctGvZgZuaut1zDo5x41FoBm2BljeHPDflfHye4UNTVU45kbTgMAdDpEedkDEariUSzb3ob3dx6yjOMng9oQPPRVOzXl+MixtZ5j7Xz5/Bl4+Stn+3rNPVcdj29cNBNj69zrOBWKTJTLG4wxYowdyxibq/97njF2L2PsXofx1zDGnhyY6ebngGDQ3WJYZWA2/c+J0TUVmDNphOUxmZP1aD2TbNP+Ltyv10a3Rw7066Fi/Iw+Y1wtGEPB0TuKoYHpoTsvqDF1SYyvz71C5JErhxwSYVLZrKt3n4hFLCcBsZ7JltYuI2N53uSR+KfjJqCxtgLPrdiNq36zEI8v3u74nvbCVQAwc7xmOLk23tWXNhKh+GmBO0XcIROjSQAzKa/ay6BLaOiPvbsNI6vimDc5NwloIGhqqMa/nTXVl1fvl7KqtigmXeQrkO+FV7Etzu+vPRGpbBbNP/g7AO94VACoEcK3fvj8GiPb7LZLZ+Gjx01AMq6VQBU3kWbpsbDPLNuFmgpts2hKQzUYYznhZ4qhj+mh+0vkHlnNPfRcg57OMNcTRCIasbxmj7Dx2dGbtsSvJxNalcZFm7UoDbcCWk7HUEUsiuqE2Snp6Nv+hqPG1+GFG8/I8dB5hy57lubNutyTV0OXlFzW7unA6dMbXevCDEXKyqBvESq5FVJQXqylkY96W+iXTGLRsRNH4HOnT8Hp0xtw7QOm3vy951bje8+tBgCMrk5YmkpPHFmJM6Y34MG3tuDBt7ZY3u/Ba0/A2TPyh00Vm3yX9wpvvDx0N/Jp6KkMcz1BVMSt7eHsBahEz74qHkV3fwbbD2hj3GRDt32oaWNr8cE+80qThwDagwT2CdFjYugk3yc7aUqu5MlJSHZjautOGTJVuVA2tVweXbQNK3YcwgxdD+vqS4Mx+cbMIjIausixE+sByIVLxqMRfOfDs3DOjDG46UPTHce0dvUbG6KAtply9cmTHceGFc+6cschbHNpteUX2aQOhTNuUS52nvr3U/D7z55o/D8ZjyKpG+f3th1E0y1/MRpXpLO5m6KciljEcnVr718bF5yLSr2BMzfEbn1J3SLFZoytwZsbWrH9gHWt8drjo6s1SfJXV88znJrnV+7GA29uRjqTRWUihvNmjc1xpkQmjKj0bO6czTK096byliceipSNQf/G0ysBmLUX2rpT+OyD7+KEH/7Dd0Ecr4YVdm6/fDZqkzHMnlDv63OuP2sqAK2KI+fL5x0JIHcP4JiJzu/t1F0pCJf+/A2ceecCx+fuf2Mztux3rmPthGx6uMIZw6B7XCLOmzwKZx5pjU2vS8bR2ZfGH97VtO3X1mvxzumM+1XT1MYai+SyS49y4d5rQjgRVCa0sru8YbpbqLBbtvVp07REnWseWGQ8lspkDWfi8FFVAIBTpzVg+W3nIxGN4MbHluF7z63Gva9uRHd/2jNUsLlpJLa0drt0F9Po6E2DsdwiZUOdsjHo4+uTqK+M43Ona6n5NzyyFAvWtWB/Zx82+zBGGvlbytmZO2kEVn73Aoyrzx/PaicZj+LNW+bjuS+eDgC4aPY4nDJVu5Rst3n74+srcccVx6BpdJXl8bBi793o7Evj9j+vxtk/fSXnOaeMRABIDYEQy8EM/179Si6AtlnY1ZcxCmDxyKh0nk3RE5qsRef4a3n0h/i6SkFvHlkVR2tnH5ZsPZjjNLk1Tr987gT8+9lTsbHFPCavvOctbDvQjWiEMH6EeQwl41EcLqz397a1oasvjao8+jkAnKxHoL2+fr/rmLYe7bgptz2osjHoB7v78fHmiZg2Jjem9AOXfoluyCQWhcWEEZVIxqNYffsF+J9/OQ5HO1R343zihMPxwLUnYlS1uQiXbD2I7z67yij49aMX1uDk//ePgufFGMOh7pQlAkeUr97dcgDTvvUClmw9kPNa5aEXhqGhe0guTlTGtYQcLmls0T3ffJuiUxrNeiWTRpnRM/xYsmjounc8vj6JE5pGYfP+Llxxz1v41G+s0Sj5jqFTp1r17+U7DmHbgW4cNiKZc9IRa6l092fQ1ZfJuyEKAMdMqMf4+iReeH+P6xgepmnPmB3qlIVB7+5PozeVxajqCscd6w17/YX75SvONVBUJWKIRyOWmhdOTGmoxptfN2OQt7Z248G3tuDY776Iv6/ei1+9ugl72nuxX7Lmhhv3vroJc25/EduFdnk9qQze2dSKnW09hnbvFHeeSocjAw1XZCUXJ6oroujqy2D9Xs2J2dXWA8aYnvrv/H6NNWZc9GF6OGR1Imr08xSPA358ja1LYnRNhVEKYPHWg5b3zGadN0UBM2pLZGNLJyaPqs55XEyrb+3qQ08qY5xU3CAiXHD0OLz2QQt+9Pwa3PDI0pwxPHrGXtNmqFMWBp3Hn4/Sw7bG2Bow723vw89f/gCPv+scM2vHXhyp2Dzyryfh4c+f5Pp8ZSKK7146C9+91FrF+PO/X2zcX7XLXm5H87B/99aWvOV7+eX+Y+9qjRT2CiFs7T1p/PN97+Dcu14xDiqnAkrKQy8Mr8SifFQlYvhgX6exB3OoJ2WcIOJuce3C8cINXE0yZoRBiiHA3IMeXZ3I8W7FzXB7T1GR0TUV+PzpU4xgAkBbr5NGVeWMFa+4dx7UNmvtDWOcmD9zDPrTWfzqtU34y4rdWLT5AFbsaDOeb9XrqoxUksvgwzTo2sJ89gunG30BpzRU4+V1+/DTF9fja0+tkHo/XspzIBMA8nHq1AZj88iNa06bgo81T3J9ftWuQzmPvf7Bftz27Cr86Pm1lsfF0DO+OXZAjzoQSxTwE0FvKmsYbafwUNlKdwpnuOQSD+ChVyWixtXZpFGVaO9NCRKO8/uJEh430tUVMUNfFjfouVQxqjqBukqrYT3Q1Y9HF23D8u1trk1iON/+8Cw8c8NpuP+aZuOxwx0M+hzB6POEpHxJRZzJtr2mj//qbVz28zeN/29t7QaRcwmPoUxZGPRWm4c+rj6JX141D1t+fAnG1SWN0CpZvUw2sajU5FvYBzpzN0v5ia/D5qGLBpjHMHNDLmYd7hdC1Dbpm1qtXX1IZbIWL457ajsOduNzD75bUNbucIR76NGAHjqghSKe0DQK7T1poXqj8/uJhp4fI6OqEhilG3SxpspcPTv6o8dPyDmeWjr68I2nV+LyX7xpauh5LnOJCPNnjsVUXcOf4GBcpzpUMvTqTwrAEqDgpLlva+3GuLpkWSUVAUPcoN/w8FLc/eI6I9mAe+gi/LIRMC8X+9NZfPtPKy01K0RkinMNZhpqEo61bPhjlbZFLPahtDffEJNUdgnyC6+419LRh5+9tB6X/8L0fi7/xZtIZbL4yV/X4R9r9+FveTanFLmYGrr/BcgzjD9zahMmjtA89JTkJmtdMmYY6ZpkzJBfxBPyUePrsOXHl+DUqQ05Bl1sMM1b2sn8DZ/ScyymjM7V0CMRwpPXn4JvXmxWJ/TaZwK0rFSOKO3wtb71QHeOF18ODOlM0b+s3A0ARjKReOnIETWyHt2gvblxP/7vnW3Y196H+z7dnPOa3lSw1OtScMmxWuODK46fgBsfWwYAqE3GHaWQ/R2acbZ7JWJWXVt3yhLN8stXNhr39xyyJpwAmte/fm+HkcDC2draZTbqcOnQrnAmaKYoYNY6mdZYg/beFBgzT8r51vOSb38I8VgEzyzbpX92BCOrcyUXkbqk1aCv2GHKfK/pIYONtd6FqK45tQlnzxiDKQ25Bh0AmptGYe6kEXh66U4c6kkZx7ssXzp3Ot7dcgCpDMO+9j4s296GJVsP4hN5JMuhypAz6Mu3t+HhhVst9YTX6Z5BnUNJTdGgd/Vn0N2fRoW+sN26r/AsOD9lNUvFLz55PABzzjUVMdduL3v1RAt7k94+oc/hoZ4U7n5pveNnOTU3aO3qtxRz4mzY12no7G7p4Qpn+MZ0kPIJfLNPlBy41Oa2KQqYReP4MRSLkJFF6ZYeb/fQ391ihrDyEhVjPGqNA5r04mbMObFoBC/ceIYxXoa/f/ksxCKEpoZq3PXxufjSo+/hjJ+YyXOHKw+99Ozv7MPji3fAyUY4/dD2rt3n/+w13Hbp0QCcq9IBmqfRUFOBcXX+EoVKCY9UuHTOeGxq6XL0qvbphteuodsll/99eYPjZ2w/aHrot19+NPa29+IXCzbi/Z25ETXr93YaHnrBHaSGGeks01upBZFctKuvMXUV6NY3Eb+uBwP4ueKMRgnVFTH87BNzXOumiFmW4+uTOZFVo6sTliJzheL3+xAjZJz2z5ocJJ6hzpC7Fj5shLZxItM1BdCyL2+5aCZO1LPhdhzswQNvamVr3SrFrdp1CLMn1JUsyiUIo2sq8OJ/nonbL5+tZQs6aOjck27vSWPtnnb8YoFmuEXJZa3u6X/2tCk5r/9A0EgnjazKe0As3nrQYtA3tXSi+Qd/x1sb3LP3FBr2dnF+uPPKY3HbpbMwY2ytUS6Cb2BXSBhXPqZBl1s+etxE45izM64uiabRVfjWxUdhjIPz4/RYqXAy6OWooQ9Zg27nro/NcXyciHD9WVNx8/lHGo+9pSfFHOjqd6zzcqCrf0h555wjx9YiHo2guiKGbl1Dz2YZ3tq4H4wxo//p25taceF/vY47/7YO7b0pi4f+xJIdqIhFcNXJh1vee2pjtUVyqYhH8kbZLNzUiv365X97bwo/+es67O/sw88XbHAtGaDQSGdYoJBFQEv4ufa0KSAiTBhRib986XTjOS9ZAwDOm6U5QF+90LtFWmUiile+eg7+9cwjHCW+gWzk4Jcah8iYcpRchpxBF8+0H/zwIrz+tXOw8f9djCvmTcz7upOOGI23bpmfs3H69qZWMMbwhUeWYsE6LesxXxPaoUBNRRSb9nfhN69vwh/f24lP/nohHl203TF7dOfBHouGDgBXnzwZUxtrLCfJsbYT3JjapKWtmJ2+dNbwDPd39OGV9dp3+9bGVsfMPYVJOpsNFLLohJiV6RQCaCca0Rwgv+v/omPGA7Aa8bES+nmxmNpYgzuvPBZfOnc6Hv78SXjk8yflbOqWAzI9RScR0QIiWk1Eq4joRocxVxHRCiJaSURvEZGzuxwy8WgEk0ZVSW8eHTaiEi/955nG/4m0srEdfWn8ecVuXPvAu8hkGbr7M1LJC4MVXn/lB39Zg7V7NAnl4YVbwRjwg4/MtlR3/L93tlrS+wFgpm4ExJPkWXpVv/NmjcVT/34Kpo2pwXGHj8TT/3Eqzps1FidOMQs8JW1RLU8s2YHeVBa/unoeLp1zGBZtzq3/ojDRNPRwfC0iMmLHK300NPbLjedOx6JvnYtawUiOGUQeOhHhY82T8OXzjsRp0xpwqkfi3lBFxmqlAdzMGFtKRLUAlhDRS4yx1cKYzQDOYowdJKKLANwHwD13vUD+6xNzA2uMo4W6FZNHVeHul9Zbojq49lybpwntYEfcvPz169p+Ad+wGluXxLj6pFGw7OGF2/CwXlfp/Flj8eLqvZbNpKMPq8OqXe247swj8JlTm3JCHo8/fCR+/elmfO3J5YahPn1aI267dBZeXrsPv3t7i+Gpz55QjzW72/Hc8l2WpgUKK+lMNlDIohuPXXfygNeoj0YIY2qTOKFppBHCKhMvrggXz2+cMbYbwG79fgcRrQEwAcBqYcxbwkveAZBf/yiQjxw3IZT3cYrV5hmPQ1lyOXJsDZbYiiVxpjZWuxY3+s6HZ+HfzjrC8OgAzRi0dPSBiPJm1X3q5Ml4fPEO/PRjc3DxMeNQlYjhM6c24fK5h2Hu7S8B0DbRGvQT6oGu/hwZR6Hx+GK5DX9ZtMYXxcmI/O5lR2NTSxcWbj4QaoSLQg5f3zgRNQE4DsDCPMM+B+AFl9dfR0SLiWhxS0uLn48Olbs+NgdfvWAGvn7hjJznOvUQu6Esudz64aMt3Wl4CdKPN0/EEY01RqboP58wCY8IRcAmjqzEvMnW2ti1ybil4p0bx04cgS0/vgRXzpto8czEetPRCBkG3a3TTdgsWLsP33h6pbE/MhjYsK8T97660bGbFs8nGKpUxKK47swjAAAnNBWn+bLCRNqgE1ENgKcA3MQYc1x1RHQONIP+dafnGWP3McaaGWPNjY2NTkOKwhXzJuKGc6Y5FrfiBahqhrDkUpmIYuJIcwd/xjgts44nefC06MbaCjTr4ZzzZ44ZsDDNaISMDdTGWs3AuyV1FcKKHW05XeivffBdPLpom6V/a6n5yhPL8eMX1uY0XvnHmr246L9fL9GswuPco8Zi9e0X4NiJI0o9lWGHlNUiojg0Y/4wY+xplzHHAvgNgIsYY+E0uiwiJ04ZhUWbD2DjPu0gqx3CHjqgJf5c/9ASfObUJiMJi8cYcz11RJWW+PHG188xPOeBYMVt5xuFzvjn7B8AD/1j976NvnQWpxwxGlnG8HOXBKlisW5PB/7r7+vxo386xnKlwvdn3tiw33L18+gi82TEWxEOVZR+Xho8v3XS3LbfAljDGLvbZczhAJ4GcDVjzDlvfJDymVMm43dvb8XXL5yBK+552wivG8qSCwCcMb0Rq26/EACwZOsB3PvqRpwzcwwA4Mp5E7Fub4dxSSx68wOB+F0aBt2hGmRQ3tt2EKt3txsJUmJ6d7HpTWXwwd5OzJ5Qh0t//gb601lccfxEfGjWWGMMD721N0refqAb584cg+vOPCKnLZxCIYOM1ToNwNUAVhLRMv2xbwI4HAAYY/cCuBXAaAC/1C/b04yx3KpXg5DbLj0at1x0FKIRQkUsgudX7sHo6oRUEsZQYd7kUdjy40uM/586rQF/+dIZJZlLdUXMUrM7DK68922pejGZrHujZADYsK8Dn31wMZ64/hRjw/YvK3YjGiEs2nwAt9oaijhx2zOr8Aeb7LP9YDcYY3hq6U5cOHuc0S1H3JRnjGFnWw9OmToaJx3hnGqvUHghE+XyBoC84ipj7PMAPh/WpIpJJEJGfO5R4+uwbHsb/un4CWVXJ3kw0VBTEeqmqGjMP3nS4XhkodZtacbYWpwzcwzufVWrGLlsexvmTXbfqHt44TZsO9CNJ5fswA3nTENLR58lCeqL86dhY0unse/gxJJtudFF33tuNSaPrsJXnliO51fuxm69amW3UJ6hvSeNzr40JrhkQisUMqi4IgFeOH/2hHqPkYpCaKhJhOqh86up+66eh+9fPht3XnksAGDbgW4j4gIArrjnLcfXc7gx3drahWeW7TSSsjhfeHQprrz3bSzZegBffWI52rpzZSN7rXnOS6s1Ke/ltfuMjvc8RHZnWw/m3/WKNocy66CjKC7KoAt8+5JZOLFpFM6eMabUUylrGmoqQjXo3f1pfGzeRJx/9DhEI4Qr503EFcdPxB1XHotR1Qmj7CrgXtsbMHuhPr54B258bBlueWql5fk3N2h7/bc9uwpPLNmB/3tnKwAtEYhn54oJbycfMQrf/8hsAJqcY6ezL43dh3rw99V7ja5b5dYSTVFclEEXOGp8HR6//hTpVnWKYIyoiud0RgpCJstw8+PLsbe9Dw1CIwUiwl0fn4PL5hwGQPtded14XjHyUE8qp0hYp63M78623IYeAIxywY8u2o7u/jS++Oh7OOrWv+JQTwrvbWszxt1z1Tx88sTDESE4lhju7s/glB+9jNueXQUAuOlD0zH7MHV1qAiOMuiKolOViBm1ugth6baDeGqpllWZ9dgUnT1Bq0+zfm8HevozmPO9F/HjF9bixVV7hDrx+eu2//Cjsy3/39nWg+//eQ1e0FvsXfPAIgDAV84/Es/ccBpGVicQjRAaayssTUXmTKw3Ok2JXH/W1Lw9OBUKL5RBVxSdykTUkCgKoVUPfZw9oQ4fPyF/O7FJI6uQjEewfm8n7tfr4f911R5c99ASI8yxsy+NiSMr8eT1p+S8fsFXzsZVJ03Gtac1WR5/4f3dxv33trXhqpMOxxfmT8ccoXzC+HpNRuFFy646aTJqEjFLYlEiFlEb8YqCUQZdUXSq4lGks8xogBEU3nnpl5+c51kaNhIhTB9Ti1fXt+DOv60DYNbW70tn8fLavfjjeztRm4yjuWkUvjR/Gk6aMgq/unoePjZvIpr02tnfucQauthm63r1Lyda68gDwDn6nsx1ZxyBdT+4EB9rnpiT56Da9CnCYGhnzyiGJDxMtKc/U1ABp3ZdsqirlFvG08fW4OmlO43/7xN6oX72wcUAgD5dGvny+WadnwuOHmfcj0QIf/7i6fjw/75hee8vnDMNW1q7cLSt5SEA/OuZU5CMR/DpU5qMsgu9aesVijLoijBQBl1RdHhaeHcqjXoE34DmHrpsZcwjhW7xcybW5/TABLQa+V44hbXefP6RrrVwqhIx/NtZUy2PTdOvKGorYujIE3mjUPhBGXRF0eHlewvdGG3vSaM6EZVufnzkWFOWmTGuFst3HAKgFSnjiU7fvsQ7GxTQKgl29KZx04emo6M37buwGS8tPKo6gSnfeN7XaxUKN5RBVxQdUXIphI7elKVDjhfHThyBEVVxXHzMeEv7sYR+Qvj+R2Yb9W68eOL6U/1N1kY0QkazlWtObcLUMd4lihUKL5RBVxSd0Dz03pS0fg5oCU3Lbj0fALB2T7tREuCyuYfhnlc24sQSFcT67mVHl+RzFeWHMuiKosPT48XY7CAU0sx75rg6HNFYjU0tXfjI3Am44ZxpQ7pLlUIBqLBFRQkwJZfCNgMLbeb9nx/Sao6Pq08qY64oC9QqVhQdI8qlQMmluy+DMbXBG3NcOucwXKqXB1AoygHloSuKTlgaeld/WnXGUSgElEFXFJ2wolx6+jPGyUGhUCiDrigB1YkYEtFIwSV0NQ9dGXSFguNp0IloEhEtIKLVRLSKiG50GENE9D9EtIGIVhDR8QMzXUU5EI0QJo2qxJbWLu/BLmSyDL2prJJcFAoBmaMhDeBmxthSIqoFsISIXmKMrRbGXARguv7vJAD36LcKhSNNo6uxtbXbe6ALPOSRd5lSKBQSHjpjbDdjbKl+vwPAGgATbMMuB/B7pvEOgBFEND702SrKhqaGamxp7QJj1qJU+9p7ccn/vI71e3M7/ADAoe4Unlm2E6+vbwEAVCoPXaEw8HU0EFETgOMALLQ9NQGA2Op8h/7YbnEQEV0H4DoAOPzw3DKjiuHDuLokelNZdPSl8df392Djvk6ce9RYLNl6EKt2teMXCzbg6xfOxIZ9nTjzyEbjddc+uAhLha5A1UpDVygMpA06EdUAeArATYyx3DJ1EjDG7gNwHwA0NzereqHDmFHVCQBak4qvPbkCAPDbNzZjll5+9pllu/DMsl0AgOW3no/6qjjae1MWYw7IVUdUKIYLUgadiOLQjPnDjLGnHYbsBCC2jJmoP6ZQODK6RjPoa3ZrvsFlcw7Ds8t3YcWOQxhdnTCaJgPAlx57D8l4xOjLecmx49GfzuKl1XsRi6hALYWC42nQSasL+lsAaxhjd7sMexbAF4joMWiboYcYY7tdxioUaNArDb675QAA4CPHaQYdAH740WPwnWfeN0ravqrr5QAwa3wd/vsTcxGNEF5d34IzpzdCoVBoyHjopwG4GsBKIlqmP/ZNAIcDAGPsXgDPA7gYwAYA3QCuDX2mirKCSy4PvLkFgFYs69I5h+FgVz/OntGIaY01aOnowxfnT8MnTpiE0+9YgNpkDI9ed7JR//zsGXKlbhWK4YKnQWeMvQEgr1LJtFCFG8KalKL84QYdAD563ASMr0/if//lOOOxi44Zh7c3taKhpgITR1bh/muaMXfSSNRXBu9wpFCUOyrmS1ESkvEobj7vSJw6rQHzJo/Mef5TJ01GQ00F5usNJ+bPHFvsKSoUQw5l0BUl44vnTnd9LhIhXHyMSmVQKPygQgQUCoWiTFAGXaFQKMoEZdAVCoWiTFAGXaFQKMoEZdAVCoWiTFAGXaFQKMoEZdAVCoWiTFAGXaFQKMoEsjcYKNoHE3UAWFeSD5ejAcD+Uk/ChcE8N2Bwz0/NLTiDeX6DeW5AuPObzBhzrEpXykzRdYyx5hJ+fl6IaPFgnd9gnhswuOen5hacwTy/wTw3oHjzU5KLQqFQlAnKoCsUCkWZUEqDfl8JP1uGwTy/wTw3YHDPT80tOIN5foN5bkCR5leyTVGFQqFQhIuSXBQKhaJMUAZdoVAoyoQBNehENImIFhDRaiJaRUQ3Oow5m4gOEdEy/d+tAzknh8/fQkQr9c9e7PA8EdH/ENEGIlpBRMcXaV4zhO9kGRG1E9FNtjFF/e6I6H4i2kdE7wuPjSKil4joA/02t/2QNu4z+pgPiOgzRZrbnUS0Vv/d/khEI1xem3cNDNDcvktEO4Xf7mKX115IROv09XdL2HPLM78/CHPbIvQTtr92oL87RxsyGNZdnrmVbt0xxgbsH4DxAI7X79cCWA9glm3M2QD+PJDz8JjjFgANeZ6/GMAL0PqqngxgYQnmGAWwB1pCQcm+OwBnAjgewPvCYz8BcIt+/xYAdzi8bhSATfrtSP3+yCLM7XwAMf3+HU5zk1kDAzS37wL4isTvvhHAEQASAJbbj5+Bmp/t+bsA3Fqi787RhgyGdZdnbiVbdwPqoTPGdjPGlur3OwCsATBhID9zALgcwO+ZxjsARhBRsXujnQtgI2Nsa5E/1wJj7DUAB2wPXw7gd/r93wH4iMNLLwDwEmPsAGPsIICXAFw40HNjjL3IGEvr/30HwMQwP1MWl+9NhhMBbGCMbWKM9QN4DNr3HSr55kdEBODjAB4N+3NlyGNDSr7u3OZWynVXNA2diJoAHAdgocPTpxDRciJ6gYiOLtacdBiAF4loCRFd5/D8BADbhf/vQPFPSv8M9wOqlN8dAIxljO3W7+8B4NTNeTB8h5+FdqXlhNcaGCi+oF+W3+8iGQyG7+0MAHsZYx+4PF+0785mQwbVustj34q67oqS+k9ENQCeAnATY6zd9vRSaFJCp64j/gmAe/fg8DmdMbaTiMYAeImI1uoey6CAiBIALgPwDYenS/3dWWCMMSIadHGwRPQtAGkAD7sMKcUauAfA96Ed1N+HJmt8doA/Mwj/gvzeeVG+O7sN0S4cNEq97tzsWynW3YB76EQUh/bHPswYe9r+PGOsnTHWqd9/HkCciBoGel7C5+/Ub/cB+CO0y1yRnQAmCf+fqD9WLC4CsJQxttf+RKm/O529XILSb/c5jCnZd0hE1wD4MICrmC5c2pFYA6HDGNvLGMswxrIAfu3ymSVde0QUA/BPAP7gNqYY352LDRkU687NvpVq3Q10lAsB+C2ANYyxu13GjNPHgYhO1OfUOpDzEj67mohq+X1omxnv24Y9C+DTpHEygEPCpV4xcPWQSvndCTwLgEcPfAbAMw5j/gbgfCIaqUsL5+uPDShEdCGArwG4jDHW7TJGZg0MxNzEfZiPunzmuwCmE9EU/Urtn6F938XiQwDWMsZ2OD1ZjO8ujw0p+bpzm1tJ112YO6wOu7inQ7ukXAFgmf7vYgDXA7heH/MFAKug7eC/A+DUgZyTbX5H6J+7XJ/Dt/THxfkRgF9AizZYCaC5iPOrhmag64XHSvbdQTux7AaQgqZHfg7AaAD/APABgL8DGKWPbQbwG+G1nwWwQf93bZHmtgGahsrX3r362MMAPJ9vDRRhbg/p62kFNOM03j43/f8XQ4ue2DgQc3Obn/74g3ytCWOL/d252ZCSr7s8cyvZulOp/wqFQlEmqExRhUKhKBOUQVcoFIoyQRl0hUKhKBOUQVcoFIoyQRl0hUKhKBOUQVcoFIoyQRl0hUKhKBP+P0JmoQzXLuTyAAAAAElFTkSuQmCC\n",
|
| 171 |
+
"text/plain": [
|
| 172 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 173 |
+
]
|
| 174 |
+
},
|
| 175 |
+
"metadata": {
|
| 176 |
+
"needs_background": "light"
|
| 177 |
+
},
|
| 178 |
+
"output_type": "display_data"
|
| 179 |
+
}
|
| 180 |
+
],
|
| 181 |
+
"source": [
|
| 182 |
+
"plt.plot(t_vec, eye_ratio)"
|
| 183 |
+
]
|
| 184 |
+
},
|
| 185 |
+
{
|
| 186 |
+
"cell_type": "code",
|
| 187 |
+
"execution_count": 9,
|
| 188 |
+
"id": "ae7b8769",
|
| 189 |
+
"metadata": {},
|
| 190 |
+
"outputs": [
|
| 191 |
+
{
|
| 192 |
+
"data": {
|
| 193 |
+
"text/plain": [
|
| 194 |
+
"[<matplotlib.lines.Line2D at 0x1d80048f460>]"
|
| 195 |
+
]
|
| 196 |
+
},
|
| 197 |
+
"execution_count": 9,
|
| 198 |
+
"metadata": {},
|
| 199 |
+
"output_type": "execute_result"
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"data": {
|
| 203 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD4CAYAAAAJmJb0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAABEIUlEQVR4nO2dd5gcxbX235q0u1qt4q5yWCQkoQASsBaSyEEgBCYZDNgmGGyifcH4fhhjG2ODL9jYxtj42mCCscmXDCJKBAkZlEAo55zz5jChvj+6q6e6p3umu6dnpnfm/J5nn53p6VDTU/3WqXNOVTHOOQiCIIjiJFDoAhAEQRC5g0SeIAiiiCGRJwiCKGJI5AmCIIoYEnmCIIgiJlToAshUV1fz2traQheDIAiiU7Fo0aJ9nPMas898JfK1tbVYuHBhoYtBEATRqWCMbbb6jNw1BEEQRQyJPEEQRBGTtcgzxsoZY/MZY18xxpYzxn6lbj+MMTaPMbaOMfYCYyySfXEJgiAIJ3hhybcDOI1zPh7ABADTGGOTAPwWwIOc88MBHARwrQfXIgiCIByQtchzhSb1bVj94wBOA/CSuv0pABdkey2CIAjCGZ745BljQcbYYgB7AHwAYD2AQ5zzmLrLNgADLY69jjG2kDG2cO/evV4UhyAIglDxROQ553HO+QQAgwBMBHCEg2Mf5ZzXcc7rampM0zwJgiAIl3iaXcM5PwTgIwCTAfRgjIk8/EEAtnt5LYIgzNmyvwWz11CvmFDwIrumhjHWQ31dAWAqgJVQxP5idberALye7bUIgsjMSQ98hCufmF/oYhA+wYsRr/0BPMUYC0JpNF7knL/FGFsB4HnG2L0AvgTwuAfXIgiCIByQtchzzpcAONpk+wYo/nmCIAiiQNCIV4IgiCKGRJ4gCKKIIZEnCIIoYkjkCYIgihgSeYIgiCKGRJ4gCKKIIZEnCIIoYkjkCYIgihgS+U5ERyyB9li80MUgCKITQSLfiTj19x9j1M/fLXQxCILoRJDIdyK2H2otdBEIguhkkMgTBEEUMSTyBEEQOWbFjgbU3jED6/Y05v3aJPIEQRA55s0lOwAA7y3fnfdrk8gTBEEUMSTyBEEQRQyJPEEQRBFDIk8QBJFjOC/ctUnkCYIgPCSe4GiLmo9MZyzPhQGJPEEQhKdc96+FOOIX/hmZTiJPEAThIbNW7Sl0EXSQyBMEQRQxJPIEQRBFDIk8QRQpvJApHYRvIJEniCKFNN4/cBTuxyCRJ4gihTTefzDkP4eSRJ4gihRy1xAAiTxBEERRQyJPEEUK2fEEQCJPEEULeWsIgESeIIqWQmZ0EAZogjKCILyGLHn/QROUEQRBFAl+yW4ikScIgsgBPtF4EnmCKFYyiUxHLIGrn5yP5TvqHZ97/sYD2Ly/2WXJSgOfaDyJPEEUK5kCr8t21OPj1Xtx56vLHJ/7m498hpMf+NhlyeyzeX8zlm133gj5Ab+4a0KFLgBBELnBJxqTFaIh2XT/OYUtiAsSPrn/ZMkTRIlSgESPkkLuSRVS77MWecbYYMbYR4yxFYyx5YyxW9TtvRhjHzDG1qr/e2ZfXIIg7OITQ7JkMetJFaJh9cKSjwH4Med8DIBJAG5mjI0BcAeAWZzzEQBmqe8JgsgTfvEJlyp+uf1ZizznfCfn/Av1dSOAlQAGAjgfwFPqbk8BuCDba3U2XvtyO+563XlQiyC8wCcaU7IkfKLynvrkGWO1AI4GMA9AX875TvWjXQD6WhxzHWNsIWNs4d69e70sTsG59YXF+NdnmwtdDKJE8YnGlCx+uf2eiTxjrCuAlwHcyjlvkD/jSr/R9Dtzzh/lnNdxzutqamq8Kg5BEERB8Yu7zBORZ4yFoQj8M5zzV9TNuxlj/dXP+wPY48W1CIKwSQaN8YcEFS9yCmUhBd+L7BoG4HEAKznnf5Q+egPAVerrqwC8nu21CIKwj91ZKCmVMkeYZdcU4GZ7MRjqeABXAFjKGFusbrsTwP0AXmSMXQtgM4BvenAtgiBs4hNvQcnil8Br1iLPOf8U1sbA6dmenyAId/hDYkoXv9x/GvFKEEWKXwJ/pYpfLHkSeYIgiBzgE40nkSeIYsWuxjjVIuoh2MMvyy+SyBNEkZJJi91qtV9mV/Q7nJu/zjck8g441NKBA80dhS4GQdgikyXp1iL3i6/Z75hPUJb/HEqaT94BE379AYDOObc1UYJk0GJhkTuVHRJ5e/jlPpElTxAlilsR8ol2+R6/3CYSeYIoUjKJjFuRj5NT3hYJn9wnEnmCKFJyF3j1h3gR9iCRzwOUckYUgkyBV7di7RMD1ffosmsKVwwS+XxAGk8Ugkz1zq1YF7vR0twew09eWoL61mhW5zFrRAsxQRmJfB6g7i3hR8iSN+e5+VvwwsKt+MustVmdxy+3iUQ+D3j9UBS7JUV4Q6ZaQnny5gRUczuW5YPrl/tEIp8HvB7e7JO6Q/icTCKeSCj/nboQ/JI1kitCQSHyiazO45fnlEQ+D3j9Y/uk7hA+J7NPntvaL/U4lwUCsK+pHfub2t2fIA8EA4rIZ5sq6pceN414zQNed9uUykPr+RDZ4VbDsqnPdffOBODvUePhgGL7RuNZirwXhfEAsuTzgOc+eW9PR5Qs5JM3wztL3vx1viGRzwPeW/Keno4oUnKXQunuuM5C0idPgdeSxWngyXufvD8qD5Ff6lujWL2r0fb+uRoMVezTGoRUd00sToHXksVpC+91AMYvlYfIL5c+8hnO+tNs2/vnypL3i4WaK4S7JheWfCFuHYm8C5xaMkVu+BB5YpUDKx6wnyfvOIWyyOuzqvFZW/JmFKIXTiLvgqjD/FnyyRN+xP1Uw8VdAcW3I0u+hIk7TK0inzxRCOwOhnJKsVvy4r55ml2jPrOFuHUk8i5wasmTT54oBLmaT77YffJC22M5yJMnS76TUGiffHE/YoRX5Go++WLPrhGNWLbTGpi6a8gn3zlw2sJ7YfnIvYFi94kS6bH/++cmhbLYq59myXvorkm3LdeQyLvA6Y/vjchLr7M+G9GZsVv9KIXSHaIRzdpd45P7RCLvgrhjn3z210zoLPnsz0d0XrwS2c7kk8+nYCa8CryabSvAvSORd4HTiYu8+F255Rui1LBbnzLmybu8fiFc8vnURmHD5WKqYXLXdBKcB169dteQypcydutT5sBrJ7Lk83gtbQpmj84DJH8LSqHsJEQdjoTzROSRWmGI0sS+JZ8pT96lyBfAlM9nw6JdKstLkiXfiSlECiUFXgmB3Z5c7gKv7o7Lhry6azyy5M16SpRC2UlwvpiAx+4aMuVLGq9EtjNNa5BPcYyr3y/b3gMNhurEFMSSl9012Z+O6MR455N3e313x2VDfi15b65pPhgq/5DIu8Bp1N3zwCupfEnDbVa/XM0n7/XgPr/BNXeNd4OhtO9LKZSdA8cjXj2YsVQXqSdbvqQptE8+7oFQOe0N5zeFkntyTW7ymiz5ToJTS8YLUaY8eULgtU/e6ZLwXljhhRg1bv9ayn8v3TVaMLez+uQZY08wxvYwxpZJ23oxxj5gjK1V//f04lp+wLHIU3YN4SFeCZ7rPHmPe6Z2KEievIemfDJPvvO6a/4JYJph2x0AZnHORwCYpb4vCpxaUp48lOSTJ1S8CrwWcu6aQi+hmf5a6v8sz6N3serPnU88EXnO+WwABwybzwfwlPr6KQAXeHEtP1CI7BryyRMaXg2Gch14dXWYDscL72R/Sdt45VoxS5YoNp98X875TvX1LgB9c3itvOL04fBmxKv0mjS+pPF6Fkqn1cmL+uw0eFuIFMpsv6f++E7uk88EV/papl+PMXYdY2whY2zh3r1781GcrCmMT57y5AkF2+6ajDuIQT+5uX46HM/wmFeRFymU2WFmmHVmn7wZuxlj/QFA/b/HbCfO+aOc8zrOeV1NTU0Oi+MdTgNPXvgT9RWGZL6U8erXT2aR5N/9WIhJ/uzCc+iuKYSFlkuRfwPAVerrqwC8nsNr5RWnXU3PffKk8SWN3QnCMi7k7XL4vhdGhuM8+ayvaB+3jZ8R+Xivegdu8CqF8jkAnwEYxRjbxhi7FsD9AKYyxtYCOEN9XxQ4t3y8za4hSpt01cmJW89tPnixZ9fkxF0j/hfAQgt5cRLO+eUWH53uxfn9hsOZhj1fNIQs+dLDzCo038/8dbpzOvbJu8yTl79DZ7DkvQy8FnBWAxrx6oZCdG/1g6FI5UsN2fJN9+vzNO+MuB3043ZaA/kwP09rIO6H0zTP1PNIr0V2TVZndAeJvAsKEagin3xpI8+XlN6St1853FqsXqwo5dySz7+7Jts5ehJ6lVf+kSXfOSjI8n8Wr4nSQJ75NJ3IOnHrJdy6a9T9mcNJb+TL+NmSF0XLdiFvmWRyTXGlUBYthcgr1gXUyJQvOWRLPn3gVXqd4ZxiX7eD+5xObCZfx+l03fkVedWSz1Lk5TJ3+gnKSgG7gS+z/b34XWmCstJG9smn0x4nawG7FZ6kJe9M5s1Ez/axeaz1omheumsKaZeRyNvEaQXVZzl4HHgllS85ZMvXfnZNbgKvIk8/K0ve6dw1+bTkpfnks1m03MwwK0QvnETeJnp/Yub9dXNJezA1KyevfEljP/Aqvc5wzmTg1VlZNHeNQ5WXr9MZUiiB7Kx5vSVP2TW+x2wBgPT7mx/rFrLkSxtdCqVNd00mknny7rLFmENbXpdd49TlWYDBUEB2fnkzs4x88j5G566x8cN7vfA22fGlTSwuZ9dY7+dsMJS9/VKP01Te2XFSj9bOiNdEgQwbz0TeLIWSsmv8i96Sz7y/9z55fwRxiMKgD7ymcdfoXtvzyRciu8aWoVSgOu+0x2GF6WAosuQ7B3Z+eKeNQubzJV/TiNfSI25X5B045d0OhnKbJ69PobTTG5Zf59NdI732yF0j4nLkk/cxZkGU9PubH+sesuRLGVnk0/38Ttx64pxOdUwcF3Co8k4Dr4VKQeQOGyMrzFZzI0vex2RTQb2w5CnwWtroA6/2smsyEVX9/E7didylu8bpBGWFGhsiZ8N5lkKpvSafvG/hDkXbc5+87jWpfKmhd9dY7+fEjy3SMt0OhnJKVimUhcquyconn9r7Ikvexzh1v3gdNPLL6DmiMOjcNXazazIYA2KAlevAq2N3jTNLPmEikvlALprTQVsyOsOM6//nExJ5uzgMxuQyT54oPWwHXuXXGepMNO7OJ+/eks/CXZPHB8DpFCZW6HVC7TWRu8a/OO3CkU9eYcaSnfj5a0sLXYxOT9ym8DiZM8m1Ja8N+3fqcpGv7TC7xsalfvXmctz6/JeOymRGLgZDuV2FywtI5G3i1F3ivU8+NVLfGbj52S/w9OdbCl2MTk88YXMwlINzRl365EWD47QWZmMo2bnWk3M34bXFOxyWyuy6ydfZDYaSX7u7Z15AIm8T+cex19X01ofeWS15whvk+ZLsj3jN4JOPu7Pk4wl3jYNOPG1MAFWoOu9V4NWskSJL3sf4au6arM9GdDbidmehdDCdhnCZOK2fWlaOw5qoF08bBzgIInuJfDu8WjhEC7yST97H6EQ78+5OR/c5OR8tGlJ62J7WIDXWZ4nbwGtc8+U7O06fJ5/Zki9URplXPnlTdxNZ8v7F6VBn+QeO2pmbOAPyOUjiSw+7I171C37bc9c4rVDaNbJw1zgNvGbqbXhp+Hg3QZn8mnzyvkd+YJwuGtIRy17kO2zOQugln6zZi+U76vNzMSIt+jx56woQc2BQuHXXaD75LNw1Xk9Q1tIRd3TudHgVeDU7lBYN8TFOFxLwWuT1gzLyU1GuemI+zvnzp3m5FpEeXZ58muoUdWAMRF0GXmNuA69ZTDWciYa2qPa6LRZPs2dmnE6/YIVZHIUseR+TSNi3KgD9g9PhtbumwP4azjke/nAtNu5rdnQM4R67g6E6Yvbracy1T96dYDkeDOVgUr7Gtpj2urUjO5HPdmUoMRC4w2Txdcqu6SQ4HZLd7oEl7yef/KGWKH7//hp8+x+f2z7GoySFkkUWm3S30kk9iely7+3/QElL3tmP6jhrxUF2zaEW2ZLP7nnL1icvDjdb6IUseR/jNIVS3sMbn3xhMg3MEJdvdmAxeZWKVqrY9sk7EO6obt1YJ2VRZ6+0f4h6DWfi6WRlqH1N7dprYcnvb2rHiwu2OiskvPPJ6xtcdw2jF4TyfsVOivzbOJ2gzAtLXm8VFFYw3QyH92ZO/dIlZlOQZXdN5nPqfcZBm5MHu5+9Mgt3TYZ990si3xZVRP7WFxZjztp9mHhYL9RWV9ouJ+ccwQBDPMEd11v52YyauWscnc0byJK3iT4zwM7+yddeWPL5dteky9IwDodvj8UzPrT5suTvfHUprn5yfl6ulU/s9iSd1JOoDT//5Y9+jsn3zdJts9urMOI4hdKmYVXfEsXaPU3aeyHy+5o6AADNHTHT46zLyREOKg2e01ko9fPzmPwW5JP3L3Kr7HTeDS8Cr/l216Qrs3E4/Kifv4sbnl6U9nzZDA93wrPztuDj1XtdH885x6ifv4N/zt3oYamyJ2Yz8O8kQB+LJxAMsLT7frZhP3bWt4FzjodmKsF2u2Ux4jRrxe5gqPG/fh//+myz9r5VFflISJE3pz3peIIjHAiklMEO8t7RWGpjSCNeC0g8wfGXWWtR3xo1/fysP83WXptZL+8u26XLKZet/fZodtF+AIjGUv17uaQ9mtmSlx+AD1bsTns+nn0754jGNvPfMROxBEd7LIG731wBADjU0oH73l7pyYC2bLCbXRO1mWqbSHAkOBAJKhKQScs272/BgzPX4Jbnv7Q9MCvlmiZZK3PX7cO/P99sur++TPav1KbW3YhqjTe1ObXkgbDaQDj92XXuGpNJ5Si7poB8tGoP/vDBGtz71oqM+5pZITc8vUiXUy4LsdcplPkwBtJZP8ll4+wPPMmXJS/YsNd+eqeMUcx/++4qPDJ7A95eutOLYtkmnuD4z7p9uvcCcSun/Wk27ntnpe44u5a8ECBh7Zo1HLLLbkd9KwAgFGC2grtvL92JQy0dum06n7zaGH37sXn4xWvLrAuqXSfjLujZJQwg1ZJvdCjynHOE1B5OzI5vVj5Weq0PbLuLY3gBibyKEGI7/juny/95MhjKpfXklvY0A0rkwJvcGCQSHO8u22kq/PnOrpGzLZwQNQQuRaZGNIsVguxQ3xLFvA37tff/mLMB33psHj5Zo7ie9Naz8nrVrkY88skG3Xns+uTFb5hO5Hc3Ju/hlv0tAICaqrKMlvyOQ6246Zkv8MPnvtRt16VQOnR52rn7vSojAIC2jjj2NbVr12tw0Kt7bM4GLNh0EOFgZndN3b0z8ZOXliTLyDn+8P4a7b3cSIoRueSu8ZD9Te248on5WLmzAa8v3m7phhGI35KZZBgYBcr4w5tZMzqffBqRf3vpTny6dp/l52bnyJU18MKCLTj+/g/BOU9ryXdIPvlWyRX1w+e+xA1Pf4FXvtyuljNZ0Gl/mu1oyH22OLXeBMZel9Ml7txy7VMLcOmjn2uN63o1kLjjkGJBG0e8WlnQdt1KmshrYpb8bMm2Q9i4rxl7Gtq0bZtUke9REcnokxeCtvVAi7btH7M3YP7GA9p75ytD6T9bt6cJ33tqoRZkBYDeXcsAKCNe6+6dif+sVxpNJ667e2coPSMReDXezmXb6/HgB4qQ72tqxwsLkyma6/c24e+frNfeR81Eniz57Pnn3I14f/kurNrViNlr9uLsh+bglucXp7hhOOdYsaMh+T5NC2u07o0ib+aO0WXXWDx4nHPc9MwX+M7j8yyvLTDLuU2HHUFds7sRW/a3YMaSnWhuj+EnLy/F9kOtaI8l0vrk5cBri3RvZqguDWFFy2Kwv7kDB5o7sGpXA077/cc42KzvynvNwx+tw5VPWGfZrNvTpFmnMvn2vX+6dh/aY3Es2a7Ec5rbFTEQAVFR14wrQ1k1wmZpe6b7Gdw1cqNx3sNzcervP9bNB7NdbWzaDJlUZnVR1IlAINlA/ubtlXhwpmTlWoj8/qZ2NLXH1HMnMT5zv3xjGWau3I0Fm5INR3VXxZI3jnj9n7dX2XIryt9LWPLG2TLP/cuneGjWWl3jkjxe/17+LZoN36m+NYraO2bgo1V7MpYrW4pG5OMJRbTvfnMFrvv3Il0FBZIt6bo9jahvieKf/9mE6X+eo1USrQ4xpcLf/OwXOOIX7+D95bu0H0i+loyxUr27bBdmrVQCkZFQwNKSdzItgF1f61dbD2HR5oM4/Gfv4D/r0/cQznxwNk564CPc/OwXeOOr5Io6rR1xU3dNLJ7Asu31uilqzSp7KMBw56tLMfLn7+i2t8cS+MP7a7BhXzM+k1wTXiLEcd2eJsxeY51lc8YfP8FJD3yUst1K5O2I/9rdjai9Y4bOlw4ovbVxv3wv5V4t3VaP7zw+D797d7X29IsgoehBtHbE0dIRw9NS9gjn0IQwXTnTGQNWlrzcaMvPUIt6vZaOuC6tUNTFaDyh1RnxHYLqdzBOchcKsBTRFYbDsffOxPSH5qhlsm6wgmr2i1yWqjLFJ292b/Y0Znbf7WlM9lxCwfSB1131bSnbjHVE9ucLQ1F8j03qs/+HD1ZnLFe2FI3IP/35Zkz/8xztfYvB+u7TTenKnfHH2bjwb3OxVLWcNuxVusXiB2JQKvKMJTvRFk3gun8vShF5o1FgbFBueHoR/vLhOgBARThoaXWtdxAc1FloJp9Pf2gOznpwNs7/61zc+YqypursNaki39weM7Xy5YeoJRo3LfNdbyzHuX/5FFukbnhrR+p+4WAAz87bkvJgtkXjOKBa8AxKTrtVV7qhLWragOyqb8PcdeaNF+c8xY0hN8jvLtuJ2jtmpPhoOed4bv4W1LdGLcXcWAcem7MBk/5nlna91o44Lvzf/wAAXlu8Xbfvr95cjqb2GM79y6d4Uerei2Dm459u1Hp7Te0xLN1Wr7k67p2xEmPueg+N0vU5eEp5BFbGwAV/nWvqSgiHRAqlsvP6Pck6KT9Doj60dsQtg8BH/vJ9AECDKvIBVeTlhITJw3pjeE3XlIBmeyyhlUHUryelNFZjgyUCo7KgBwIMkWDA1DUr3Iq7G9pw6SOfYbfkiorGE9h+qBWT7/tQ2yYyc+QelHw/RM/G7BraedX4Tiye0DJ+xNNbFlakt7k9jiPvfg9//Whdyvm8omhEfqehZb3l+cW69/EEx+b9SgXesLdZs2BE5RU/AmMsxT3T1K7/8YxCYhR5mfJw0pKPJ7jmpnj4w7V4/NMNpsfMXLEb1/5zAVo74nh98XZwzg0Pb6rMr9jZgNW7GwEAm9Tv+fdP1uOP76/G9f9eiI37mvHUfzZh7C/fww+f+zLFEpEt99aOmO79/368Dq8v3o63VGv/zleTC3MbKzaQFC8jTe0xbWTis/O34Nl5W3TCI3PU3e/jChM31nkPf4pvPzbP9B5E4zylAW5si2pWo2h4jZk3S7fX46evLMXPXl2qGzF6sLlDC4Y2t8dxqKUDTe0x1N4xA/fOWIldDW1a/Xli7kZNcFqjCSzdVq/91sleZBNuf2mJ5ps2ixs0tcfw9Yc/xacWDRmgGBnGY3fWt2L6Q3OweldyUJB8KxZvPYT731mFG59ehGg8oblLjJb8hn3J4+UeqngmWjpihkE+auOwtxkd8QRW7GjQGu5AgKWIYe+uEXU0qf47tccSuvlnPlq9B5+tl3p7aiZX7R0z8LeP12s9NlmsA0wRTzln3vhdXlq0DfM2HsCjszdgT2MbVu5swI9eWIzj7/8w5RgguUzhCwu2aD0MANh+MLWOG3VAuMRapGdE6/mo9ayxLYbGthgeeC93Fn3OpzVgjE0D8BCAIIDHOOf35+I6wh9nRX1rFCc/8LFULuX/zJV7cNxhvXVi1WIQ9Uv+/h/d+3TuGqOboywU1H78/3l7JR7/dCMiwUCKn/43M1Zg4eaDePWm4/G9fy0EANz+8hK8+dUO9O1WnjZrwtj1LZd6D39WhW3LgVas3KnEIN5ZtgvvLNulO0a23Fs64jqf/O/eta6AZiJvzPiQz7tfFb6yUBAAsFyKiwiEWC7YdBCA4gb528fr8dPpo7Vud2N7DN3Kle75W0t2YHhNVwzsWZFyrhueXoSvttbjv88apY2KvOCvc3X7iBjCW0t24uJjB2nbv/XYPOxQG8O56/bhwZlrcM6R/XXHrtjZgGfnbUF5OGkvbT3Qgq8//CmqykNY9POpKYL8zUc+w6b7z9EJlMCYdmhGgnOdBTtvw358tHovVuxswIqdqfdT5p1lu/DF5oOaL15kpIhGU7aC5XILF0yLhSUvmP7nObhy8lAAiugaxbMiHEQoyFJ83e2xZC8PAL775AIMq65ETVUZ9ja2gyNZR3/77iqcPa4fAL3bJMBYyr0OBxmi8WSCQE2V0qPfeqAFk+/70DIA3K1CqVtxrvQqf/LyUt3n26TGa8GmAzhyYHe0GoxD8cw263phye8LAPWtuY1NATkWecZYEMBfAUwFsA3AAsbYG5zzzMnoDjHr2stsMgTZDjYrlXn2mr2YvWYvrjn+MO2zW57Xp34JV8kTV9fhn//ZjAbpQfhgxW5dRdlr8P2FgkzzLb61RLGEzQKx/5izMeV7fKwGZRZsPGAIqCVf72tqx7cMs0GKzACZVbvSP/zyda/71yIM6dUl7f6CFgu3gRnN7THtITyoitnHq/diT0Mb+nQr18rxwYpkA8Q5xzcf+QwHW6KYcni1tn1/Uwfmrt2HG5/5Qts2sbZXyjU/36BYzfdYjH/YuK9ZZ5XJPcCVkmDOV2M3Mwz58hepLprhNcm5UYTQNrbFcNz/zDS97lVPzNfyumWenrfFdH8ZzvUDfC591Hw2UFFPjO65gy1Rzd0hRE9UYVkkZSs82UsxBl6hC34CwLwN+vcyFZEgAoylBF7bownsPKRv9HY3tGFYTVdF5LnegBKuoJ1SQ2mWCNWjSwR7G9s1Q0wMTNx6sNVU4Af3qsDWA60YM6Ab5qzdh3V7GnHU3e+n7CfXmUv+/hkAYOqYvrp9RLyguV225JVtonef69RcIPeW/EQA6zjnGwCAMfY8gPMBeC7y6VwmALBOdWUI5CALoHS3ASW6/9U289WQJg+rxtOfb9G6q/WtUXxftboFRussEgxolcksPdOIbGUKP+wfPliDAd3Lte1C4xMJjhcWbMWa3U26c5gFnjKlbs1cmRyxuquhDbtMrEwzZJHNxG/eTg7cWbT5oPb6T7PW4saTh2Nwry54Yu5GXc/h3hkrcVDtxv/8taQ19YvXlqW4NOZvshYXK079/ce4/qRh2vtMqbZWyPEVOdB+sMX8fJ9YBIXTBYsF3GDJZ8IYX/nZq0s1ka2pUuqVMERkq1Mn8pIlH5KyZjjnmsgJth1UDCqzYGdFJIiQOvmXzKxVe1Iyrpo74ujXvRxLt9enZBQJ//hOqYwBE5XvURFWRF4Vd/FMrd/TlLIvANx48uFYvPUgvnfCMDzyyQass9jvq22HUrYZR313pLXk85fFlWuf/EAA8lyf29RtGoyx6xhjCxljC/fudT/nSCaRN06Lu3pXo+l+mywyXv506QRURIKIBAOaP63BRBBW7tSfV+maqiJvI+V6lUW5dkjdUiHY98xYYerLa0uT/mjFsu3pLX0v2GySsggo882IdMcdBh/u458mg2/y90rnsz7MwYyDgLMsJzf061aeeac0BAz1JsGhC8RaIeqJaHQurRsMQElnFY1Z0pJXstO2SRZqvdRAieu1dsQtB+ZNG6u4UMSzZuzVAoq7xizj7J63VuBhk+CjuHcc+qk2hHX8xZZD2rZhNam/e88ualqlKvJCcK3Smmuru+B3F49HtwrF/rUaJ2El/jKaJS+5cZKT+qVe30qTsqXggVfO+aOc8zrOeV1NTY3r87R0xFDdNYJJw/Rd9vsvOhInjqhO2d9qLvQdJqlRANBXrWzhUEDztZlZfQsN1mQoIFvy3iAerCfnbrK1/+F9umZ9zSev/hrGD+6R9XmsEELbEUugX7dybLxvOh694lhbx77/o5N07y86OmlHVISDGY9fsOkAThxRjSP6VTkosX3eu/Uky8/Sla8yEsRZY/tizk9O021ft6cJS00sSSMiKCoE7ajB3VNceeL6nCv+dDmV1iyDpKk9pnPRyXMSVZWHdLEJMyrCQZSHg7aX6KtWBzgpA/SSx8i9tqlj+uLZ7x+HKyYNTT2+KjkK9r63V+KvH5kH+gUizhNSUzSNDawV4wd1N5Q7oumEcIGFAkx7ds0amefmZ3bVuSHXIr8dwGDp/SB1m+e0dMTRrTyM66SuNwBcNnEIBtv0L1vBmNLCA4q/W7TCr3yR+lVkywJQrMqkJZ+sMd86bkjKsVdMGooPJMEKBhj+cWUdnr9ukm4/p/PJ1/a2/v53nTsGfVRrLh19u5Xjv047PON+NRnOddvUkbr3D3/raJSpQcCWjhjqW6PoXhEGYwynHtFH22+YwTr/1XljceKIavzzu1/DyL5VuPWMETh2aE8AQP8eFZh35+kAgLPH9cPt00Zpx714/WRcPnGw7lwHW6IY1LMLKsu88V5WRoK4ekotAEXUKsushbyuVi2z5I4TSQRjB3THI1fUYWAPfUD5359vxosLt9kuj7CAy0JBdK9IJigcNai7ZniYDd83irwoo2wgHZQCxZFQAN0rUuMMMhWRIMrDAdu9zfGDFfGUA69GhtVUYsrwalOru0ZtJBZsOoBHZpsnBMiI8gtxD5qovNm2ruX6ujOsuqvmbxci370irD27xkkLb5s6EnedOyZj+dyQa5FfAGAEY+wwxlgEwGUA3sjFhVo64qiIBHWBDGF5VmX58H5r4hD07648aGWqJd/SEdP8+ADwvROUwO2WAy3oql5vYI8KDOxRYTpPR3ko9cHvWh7CiL5VuH3aKLxzy4lY95uzMXVMX0wa1ht3nTsG35mkNAwJnjoOIB1mvkrRtY6EApY51/Jh1WrqmxnC2nrsyjrtu1tx0ynDde/PPWoA7r1gHADgyy2H8N7y3eiuBiTDwQB+PHUk/nDJeLz5wxMwSM2e2XT/ObhqSi3+fe1xOGWU0hDcesZIvHj9ZDx02QRcMGEA+nYrx+s3H4/7vnEkbjrlcMy783Tcf9GR+FptT81KkwkHmZZOCEAXA6kb2lPL5rDDsl+dhbvOHYMrJg3FKzdN0QbWAMDvLxmv23e6mq1z6hF9tPstMl6y7fpp7pq4IihloQB6qPf2hpOH46UbpkDcCjuDvUb376a9Fn55OXYTCQXQoyJ9llswwFAeCpomShgbs1tOH5FsNLj1fEq9K62vKQL6Iq1Y8PhVdThyYPeU/avKk26aADOPo5n9LOWhICYP6629P6y6UovdCbeu3AAaLfmRfat0I4S9JKcizzmPAfgBgPcArATwIud8udfXmbdhPz5ctQeVkRCOO6wXBveqwFs/PAGv33w8gOQPJxDWZjd1+z0XjMOrN01JOe9ZY5VouVwZwmr6oxz4uuTYQbhz+mjt4ewSUQS8rrYnAmqQyWh9V0SUWy8HsYRA3nTK4Rjdv5vOMrnmhMNwxaRaAEplH3PXewCAX58/Fu//6CQMr6nUpf/JGNPK3vjB8ehZmaxwRtdVMMCw6p5pmvgCQM9Ka5EXFl4kFNAswmHVlThv/ACdP3ruHafpBE+4VarV3+Pbjyl58fJVfnj6CHzj2EGoLAth5m0nY9mvzjItgyj3+RMGatcYP7iHlqrZt1s5Lps4BIwxbcg6AAxVezlThlejsT3pfps2ThHfV26agpdunIK/fcfaddS7MoILJRcRYwyBAMM9F4zThLFnlzCumDQ05Te66JiBuHLyUNx48nCtMe6jBkOFDx0A5tx+Kl66YbJlGcwQVU6bejcU0HorVeUhREIB7ZryoLZeFqI5un/SnRUKpuapR0IBrYGW3VDCCBBlKo+Yi/zcO07T7s9Rg7rjplOHa88AB9d6JPdfdCQurRuMCaoR16vSuvdY3TUCxlJdq6eP7ov/On2E9v6/zxyJuqE9NXcNoLhszHo4cv0RlIeDeOqaifjRGSMxvKYSVeUhbXpwMfiuqiKc9MkbejKiN5sLcu6T55y/zTkfyTkfzjn/TS6uEUtwHD2kB74+vj96dIlgzu2nYZwkzEI8D+/TFZvuP0dLdxOVLxRgOHpIT7z1wxPw1DUT8ez3j8M36wZhgGpZHCFZMOFgANFYQpdL/+vzxyEQYFpLffnEIRjVtwo/OmOkJuIJrreMhSUv53ZnsoKFyMqiXRYKYGTfKsz68Sn4/SXjdUIgGjdZvADlIRYPt7Hx+fr4AXjnlhNRHg7qLNtwMGAp8sIPGw4GtHt6wynD8efLj8YfL01arkZL7Y+XTgCQ7FILjAPbktcJZrxHdpDv+Vlj+2HVPdMwbVw/nRBcc0ItvrrrTBwzpKe2bc7tp+r8/2eNVXzBi34xFQ+q38WKL+86E/eojeZXvzxT214WCuLX54/D4F5dtGkALp84BC/fOAXfkBqEwb264MhBqZan4BvHDMLvLj5Kt83o/y0LBdBVdR3JFiugH+9gltoJAL0ry/D18QOUc6snl4OrZcGku2Z4n6R7TY4DcEC15M17DmLMxxWThqIsFNTcJvKMp6P7d8NvLz5Kq5/pLPmq8jAqwkHtt60qC+GM0UrvTxhj4wZ2ww9OG4GXbpyis6YDAfOVoYyuGUAZhBUJBXDLGSMw68enIBQMaCtvNbTGUKlmFRnjJIJIDkW+KNZ4Pf7wahx/eGpwVdC1XO9nG9KrCz7fcADVXcuwYV+zVmHlhmHK8Gq0dMRw0sgazWIAlB8jGue6iLkx2DRhcA/8SPU9C2Fs6YjpMhfKVUtHbsEzCZhoMGQrqNwQuKur7YUPf3wyGGOo7hrBkXe/j2tPOAw/euErbZ/elWWayBtT2a47cRhG9lUsNmGxaP5Ji0wDcf+CAYbxg3pg0eaD2gAjM1eRkeE1XdGrMqINhsk05iFbrp5Si9cXb8eSbfXoXRnR7qHIJnni6joM6pkaxxCxHVHWv3/nWFezVHavCOOZ7x2XYr2JU1WVh7T4gkxZKKgN7jHyy/PGoFt5GLcbpr4FklZjJBRAZUSpY+K/+G3le96rMmI65UZ5OIizx/XDm1/t0E1zIAgEGHqoIj+iTxWWbW/A2eP64auth5In4Vz1ycdNJw0TlrN4boS7JMGBqHA7qc+bcIfIsZSHLpugG+vQtSyEinBQS2V959YTtd9W/O5W9TrImG7hD/mcexvbcdSg7liipluLGIwgEmRS4DWKbhVhMMDSkjfrHXhFwbNr8oEQTyE4w2q6IhhgqFHns7GayKlLJIRTR/XRbRPuGpHCddMpw1MedDn4KCrrzw0LI4iGQfb5jeibPgsm2WAkHyyzbt6wmq44rLoSVeVhbLr/HFx4dNIifPMHJ6AiEpRmOdQfWyY1WMLiFfuETAZZAUmLkTHg5lOH49RRNbhgwkBdmdNREQlimuTzfuyquozHZEMwwPDi9ZNx29SRuPr4Wm37lOGKoXDC4emzvF6/+Xj877ePyWoa4uMPr0adYfCWqJ/pArWyoAnrb87tp2puhi9+MRUv36h36/z4xcUAlEZCPAsiThQwteSTlvG/rpmovS4PB3QGj/G4BAeGq5lclWVBvHzjZDx46QTdferXvQLl4SBiCY4PTWZg7KFeW3xPplnyHO+qo7SFC25Enyq1vMmex/kTdBnamltKpGzqLWZ9g2IkGGCmsQpxD3969mi8eP1kbLr/HBw1qIdun1AwAM4VI6qhLYpu5WEEAslBYB3xOIIBphluZMlnieiaigp9xaSh+FptL3y8WqlkTlwAQlRF9+8MaZTbtHH98LeP16OfFLQTVoJxUq0y1YqQdUIObJkhRFYn8jZSBAHFYktwqXFRr2v0OcqNhnigI5pFnyzsw986Gj949kswlrQYGZQ5vZ/8blIY7FjyANBX9UMfM6RHygOTC8rDQZ1PFgAevHQCftLQlvGBG9yri2nGVrfykKNBSkaE1lSEretjZSSkzfFyy+kj8K2JQ9BTclf0qoyguT2ZWw4k04LLJJ+8CLabWfKyyFdEkvWrIhzEgB4V+MMl47FubxP+9vF6/fgUzjFKTUPd3dCOY4fqG7F7zh+LM0b30caifM8wkBAAbp82CkN7d8HU0frRo41tMby2eIf2PQDg7vPG4tzx/TGir3Xqa1V5WCfiZcHk9xENytGSS04mGGCm7pqLjx2EpdvrMaR3lxQXpEBY5rsb2vDe8t2oG9oT5arbKBpPaKmcyrPFda5RrykJkRe+N5FJUFmmdIfHDeyGft3L8fWjBtg+l/AvitQx0e0FgP8+cxSumlyrCzSJClZZFtJWj5fPM6BHBW6bOhJbDrRk7LKJc8lzZNgN2ERCStqa6KJefOwgPDl3E04f3RezVu7Rpv4tk7J+wsEAnvv+JC2lT1x/QPdybQj3d6cchi+3KqNXzfRc63Zn0HqRz+zFUoluqYgEHQ+kkpn/szOyur5oELtErBtu2SAJB5lO4AXaveb60bdloQBuOnU4Nuxr0npawsrWibx0TrlOipTWbxw7CM/M25xyXIIDJxxeje9MGoKrpySnCRHlOXlkHzDG0ubSd4mE8N3jU4/dejA5kE7U+YpIECeOSN/rqioP6b6D3IAPr+mKN35wvKVxFQwEUiz5AAOunDwUFx4zUBekNSKe73tnKIP7V+9uxKRhvdEejesHBbLUcnlNSYi8cDcYfW9loSC+fVzqAIp0iAoj/LfyAxkMMJ0VL7YB+sagW3lIm/tiWHUlzhxrLz1PpP7J1pPRJ29FJKiIvHDHjB3QHZvuPwcA8PT3jsPwO99W9jNUtsnDk2lh4ruEggGUhYJYeveZqIyEsGJnA+55awXGDkgNDIp7Llv08+48PaVBE8FXN6N1/YLd38IKcYvSirwU9AuapIIq50lmpMhTOUdCAfTvXoFnvpccdyF+Fdm33kvKvBLbJwzuoft+ws2oF3mOcDCAey840vR7CbeoWe/TasCeuM7WA8l4lt3eK6CIvGzJG+t3ul5jMJA6t0xVuTKGI53AA0mdEEZTY1tMmzhQjoOJonXq7Bo/MKZ/N4zu3w13fT37wQaikhxSZ4/LNIAmacknK+aM/zoRFx4zCGP6d8M1JxxmdWjqudSnRZ661Czf3oyJhyldZ7m7aiwjkL6yJUVe+V+l+hnHDeyOF66fbCpyQofkBrZvt/KUND2RRmlcgKWU+Jrqo69II/JyfbMIkWjCzbk+E8uspyga3zbJ4pfdNSP7dkV5OICfnn2E/hrqRWSDw2qI3k/PHo2yUEAbNW4c5VvdNYL/u948PVTUHzNL3g4V4aDm9w4GmK0YkSDImG5ytx9PHYn/s5nGKq4j0rTvu+hIlIeUgLNoGEf06ardf7Lks6QiEsQ7t5zoybnEg3LQxJI3Q1Qw8WMeNai75s9922GZgupT3abzydurHA9ddjRW7WrU8pitsPMAOfEf2nXXCEs+3QLixc6fLz8aa/c0oSqNldhVMhaCFr9D0nLWi7zZFApCRI3ZNYLeXcuw6p6zU44TjXarwZI3Y/qR/bVBX0CyxyPiRM9fN9nU7QQkLXl5TqOQA6FmLCnsTv3ewSDT9Sx/aIjhpEOUUTSCl31tMFbsaEBbNK7ds1+dPxbff0qJS1B2jY+ISO6aYIBlFMWA9GMzBvz72uNcX9tYcQD7LgIRh8h4jTSVTQShnFTIgIm7xgwRx8jkYy1mKstCKdkrKftEZEs+s9gJd80DFx9l4b9PFetuGaYmUI5T/usseZuzbQiffIIDJ46oTju3ktDzxrYYelVGsPG+6bazmub/TJnawm0GS5CZZ9fYOlY8q9E4IqEAGFO0oi2a0BrU8nBQey7s/JZuIZF3iOyu6RIJZqxwooI1t8dw5MDuGef2SIdccQRe+fKMmSZmiKCoVSqlGVolzmB9VUSC+OT/nYL7Ljoy7X6ljuyusbJohfXLeXIpvjEDzIOLmrtGEms72WZBqV4P6F6Or48fgBtOHp7hKAXZMCnL4G4Uz0t9axQV4czPm4wYORx0KfIBi+waO4SkXneZahQpPvmku6YiHMS/rp2IS44dpM16mQtKwl3jJZq7pjmqs6qs0B6Gjpgu68YNWhdZNxAru2Cf4LapI1MmDzPSS/XVCv++Hey6awBgaG/3mS2lgizAVnOdyIFOYclbBQrFKWRL3km9bo3GUV1Vhr9cfnTGYwQVOpFPL7xy7yOTa1Qwqm+VthQmkExYcGoQhQLMdbZXUEqSEI1LeTiAhNTwloeDGN2/m2UKp1eQyDskIuXJd0kzaEWQtHjitv3nVgQCyqRJmQZD5Yra6kq8d+tJulWQMmGWXUO4x54lryBb8sb5mwSaJS/5nu1YvMKibumIa2Mc7CKnUGaqv+GgMqnaoZaobZH/vxsnY7+UruzakpcCr1OkLDM7hAzuGiBpkIklHu1Mg+0F5K5xiJwnb8viYUlL3gurOxQI6DJQchmwMWNUv6q0fnsjIrCXown2Sg458Go5a6EUeN2tDu6ychMygyV/w8nD0bdb5h6nlunVHnNsvMguGjvCK3rAdp+fbuVh3XgH4TpxHHgNMC39+s7pox0dG5B63eI7lmkir/SuMs297xUk8g4RFaWlI27LshBWBOfeuFYCgeRKM/I86X5FfH8nqWuENV0i9n3ygDIn/MAeFZa+bGO++4+mjrDl9xZ62dwRz+hXNyKniNrpifY2zO7qFFH33LhrBE57oiHJnRUJ6t1FBzWRJ0vel8iWh51FJmRxq/Cg5Q4FkgsuOB3IVQjEw5HNPC9EElnoMgoP59h+sNVy6L1yDuX/vqYOVEaCtgVb/j2dWqSyuNmx5HurI67TjR9Ih9vsGuOMlE4Q6c6tHanumvrWDjCWP1cr+eQdIrtHnFjygDctt92BS34jlylipUS5VOcsLXl1czzBseVAC04/oo/pfkBSyPY0tOkm1rvk2EGmM3EKgsx9vS4PyT75zMcKV1O6OX3S4dYnL39Ht5a8LvAqMvNaoigPOcsUygYSeYfIIu8kCwHwRuTlBzuXkxp5RUKb7bDABSkS5GCdlQtMbH1ryU4caO7QLaNoRJxif3OHbiHsBwwrWBnR12uHbpBgQJ3Yzp7wisFhbt01yewaZ8cHde4aZ9eUZ4wNB/WW/MGWaN788QC5axwjW89OsmsAvQXjFnG+cJDlbLkwLxEWULWNdWSJzNgSefWeL9x8ENVdy7SlHtPtC8BRim9AV6+di6+oF3Z6o2JqALdxHXGcU3ePfD2nVre8xKQ8oRqg9Jq62DAQvYIseYfIFcXWoBGpcjiZWMkKzb/YCax4QJmn5jcXjsMZhqljCXdUROxb8gAwpFdFWmNAdkNkWoRdf1zytZseaoABcdiz5EXsy7jAjV3EM1OZhcg7ddeYHSt6IvubO3Bamt6V15DIO0RORbPTGsujQz3xyYt0sE7kj+8MAeLOgpwaaEd4xAL0Vshi3TvNWqlGdMaLi7qoZPVwWy4U4e6ImazSZAchuE6t52zcNbJbVSwiLl/fbBHxXNF5lMIn6HzyNtw1gSyyEMwQD1dnEnnCO8LBgDaBmNX0ErL29++efqCSnG7Z3cHQ+kCWsabk0n2ZjxWCabbsoR3EfXLsrski8Co3EGKxc7knMczBgMJsIaXIArPVgYzIvjkvFqF2mylAFA/csHSfEVm4xfS+VsinyDRDqUy2xovwvBgXcTdD1Hm37hrRAcjGXeM0EUY+VqQ8d5Gefye9pmwhpciCkWmWHRPI+bVWQ8udIBqNzuKTJ7xHW3M3w4hXIPNYDlmsMy2EISNXv2xiTXbiAMKYcjKdhoyYf6YiK3eNuxRKADh/grLynBw071npfqJCp5BPPgsGZOgKA0ZLPvsfNmnJ52e0HOE/RFqqVeA1LLlxMk+FnXztZIbUQBZ58jJ2RH7SsN548frJtqbKNkMsgeg0BdOrwOufLp2Qss24aE4uIZF3wdVTarFmd6Oj4d+AN+6aUCcMvBLeIuZttxJ5OZiZaV4ZnSXvVuSzqIt2GxYnM58aEQvROBX5HpL7ynngNXlPzHRCXn0r15DIu+Du88ba3ldeh9MLd402Dwe5a0oWnsGS14/NSC9s8imcWPK6kddZWPL5GPXZrlnyzp6/mq7JnrrTcgYzrLmQr3lrABL5nCNH6D2x5CnwWvIkMljyMpkseVm8XLtrXNTFT/7fKTjQ3JF5Rw8Qqzs5DbzKrqRsUigLDYl8jpFbdDsTmmUiHwv/Ev5G88nbsC4z5aG79a3Lvnw3VunQ3pV5WyRG+OSd9jj0Iu/eJ19oSORzjPwgeiHMwiffmSYnI7xFJBLasuQz1BO3UpTNBGX5RrhrnD4zOpF3KNpWlvyvzx/rOhXULSTyOaZaTJPq0YMgfPxkyZcumXzyMpkE2O2KXbLo+d3g6HAp8rL7yu0EZUaunFzr7EQeQCKfY0LBAJb/6izP/I+dbe4awnsy5slLZLTk1VP0dDAQCtBb8mGfi3xy0RBnhlY2i4aQu6bEqCwLeeKPB5I50GTJly7a9M0eBF7FuSYNc7aGqSx6YR8Jmhl//86xeOWLbRjcK/08PkbkKUxI5Im8IeawJ5EvXbgDSz5TCuXQ3pV49IpjceKIGkdlkAOvTtb8LQS11ZW47UznS2XKcwM59WqFnC4llUNI5DsZokdAIk94YckDwJlp5pu3QrZUrSZK6+yEA+4teR8Z8jR3TWdDLFRCg6EIO5Z8rmI3endNcdZFufFyKtp+WtO4OH+dIkYLHvmoEhGFwY51mStXinztYrXk9SLfeb8jiXwnQwS5YnF3CygQxUMhR1Xq3DV+8k14SFg3/0wBC5IlJPKdDJGuFsvzgArCf6TL4DiiX+ZpsLNBvrSfXBNeIsc83H5Hp6mpuYACr50MkdYlBngQpUs64Xn5xiloao/l7NqdYRH5QvPKTVMwqIeztM1ckJUlzxi7hDG2nDGWYIzVGT77KWNsHWNsNWPsrOyKSQgiQbEUGol8qfJfp4/IuE9lWSjjqlDZYGfenFLnmCE90SeHv4FdsrXklwG4CMAj8kbG2BgAlwEYC2AAgJmMsZGc83iW1yt5hCVPIl+63DZ1JG6bOrKgZfDTYB8iPVlZ8pzzlZzz1SYfnQ/gec55O+d8I4B1ACZmcy1CYfzgHgCAU0b1KWxBiJKGDPnOQ6588gMBfC6936ZuS4Exdh2A6wBgyJAhOSpO8TC6fzes/PU0xyvPE4SXkLum85BR5BljMwGYDYn7Gef89WwLwDl/FMCjAFBXV0cpIzYggScKDblrOg8ZRZ5zfoaL824HMFh6P0jdRhBEEVCsaZPFSK7y5N8AcBljrIwxdhiAEQDm5+haBEEQhAXZplBeyBjbBmAygBmMsfcAgHO+HMCLAFYAeBfAzZRZQxAEkX+yCrxyzl8F8KrFZ78B8Jtszk8QBEFkB01rQBAEUcSQyBMEQRQxJPIEQRBFDIk8QRBEEUMiTxAEUcSQyBMEQRQxJPIEQRBFDC0aQhAEYcFjV9Zh84GWQhcjK0jkCYJwxe8uPgq1vSsLXYyccsaYvoUuQtaQyBME4Ypv1g3OvBNRcMgnTxAEUcSQyBMEQRQxJPIEQRBFDIk8QRBEEUMiTxAEUcSQyBMEQRQxJPIEQRBFDIk8QRBEEcM454UugwZjrBHA6kKXIw3VAPYVuhAW+LlsgL/LR2Vzj5/L5+eyAd6WbyjnvMbsA7+NeF3NOa8rdCGsYIwt9Gv5/Fw2wN/lo7K5x8/l83PZgPyVj9w1BEEQRQyJPEEQRBHjN5F/tNAFyICfy+fnsgH+Lh+VzT1+Lp+fywbkqXy+CrwSBEEQ3uI3S54gCILwEBJ5giCIIibvIs8YG8wY+4gxtoIxtpwxdovJPqcwxuoZY4vVv7vyXMZNjLGl6rUXmnzOGGN/ZoytY4wtYYwdk6dyjZLuyWLGWANj7FbDPnm9d4yxJxhjexhjy6RtvRhjHzDG1qr/e1oce5W6z1rG2FV5KtsDjLFV6u/2KmOsh8WxaetAjsp2N2Nsu/TbTbc4dhpjbLVa/+7wumxpyveCVLZNjLHFFsfm+t6Zaogf6l2ashWu3nHO8/oHoD+AY9TXVQDWABhj2OcUAG/lu2zS9TcBqE7z+XQA7wBgACYBmFeAMgYB7IIyCKJg9w7ASQCOAbBM2vY7AHeor+8A8FuT43oB2KD+76m+7pmHsp0JIKS+/q1Z2ezUgRyV7W4A/23jd18PYBiACICvjM9Prspn+PwPAO4q0L0z1RA/1Ls0ZStYvcu7Jc8538k5/0J93QhgJYCB+S5HlpwP4F9c4XMAPRhj/fNchtMBrOecb87zdXVwzmcDOGDYfD6Ap9TXTwG4wOTQswB8wDk/wDk/COADANNyXTbO+fuc85j69nMAg7y8pl0s7psdJgJYxznfwDnvAPA8lPvtKenKxxhjAL4J4Dmvr2uHNBpS8HpnVbZC1ruC+uQZY7UAjgYwz+TjyYyxrxhj7zDGxua3ZOAA3meMLWKMXWfy+UAAW6X325D/huoyWD9khbx3ANCXc75Tfb0LgNlqyH64h9dA6ZGZkakO5IofqF36JyzcDX64bycC2M05X2vxed7unUFDfFXv0uhbXutdwaY1YIx1BfAygFs55w2Gj7+A4oZoUv2SrwEYkcfincA5384Y6wPgA8bYKtWy8QWMsQiA8wD81OTjQt87HZxzzhjzXZ4uY+xnAGIAnrHYpRB14G8A7oHyoN8DxSVyTY6v6YbLkd6Kz8u9M2qI0sFQKHS9s9K3QtS7gljyjLEwlBvwDOf8FePnnPMGznmT+vptAGHGWHW+ysc5367+3wPgVShdZJntAOSl6gep2/LF2QC+4JzvNn5Q6Hunslu4r9T/e0z2Kdg9ZIxdDeBcAN/mqiPUiI064Dmc892c8zjnPAHgHxbXLGjdY4yFAFwE4AWrffJx7yw0xBf1zkrfClXvCpFdwwA8DmAl5/yPFvv0U/cDY2wilHLuz1P5KhljVeI1lIDJMsNubwC4kilMAlAvdRPzgaUlVch7J/EGAJG1cBWA1032eQ/AmYyxnqpb4kx1W05hjE0DcDuA8zjnLRb72KkDuSibHNe50OKaCwCMYIwdpvboLoNyv/PFGQBWcc63mX2Yj3uXRkMKXu+sylbQeudlFNfOH4AToHRHlwBYrP5NB3ADgBvUfX4AYDmUzIHPAUzJY/mGqdf9Si3Dz9TtcvkYgL9CyXJYCqAuj+WrhCLa3aVtBbt3UBqbnQCiUPyb1wLoDWAWgLUAZgLope5bB+Ax6dhrAKxT/76bp7Ktg+KTFXXv7+q+AwC8na4O5KFs/1br0xIogtXfWDb1/XQoWRvrc1E2q/Kp2/8p6pq0b77vnZWGFLzepSlbweodTWtAEARRxNCIV4IgiCKGRJ4gCKKIIZEnCIIoYkjkCYIgihgSeYIgiCKGRJ4gCKKIIZEnCIIoYv4/oHaEsb7LLV8AAAAASUVORK5CYII=\n",
|
| 204 |
+
"text/plain": [
|
| 205 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 206 |
+
]
|
| 207 |
+
},
|
| 208 |
+
"metadata": {
|
| 209 |
+
"needs_background": "light"
|
| 210 |
+
},
|
| 211 |
+
"output_type": "display_data"
|
| 212 |
+
}
|
| 213 |
+
],
|
| 214 |
+
"source": [
|
| 215 |
+
"plt.plot(t_vec, eye_ratio_v)"
|
| 216 |
+
]
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"cell_type": "code",
|
| 220 |
+
"execution_count": 10,
|
| 221 |
+
"id": "d0c9ca0e",
|
| 222 |
+
"metadata": {},
|
| 223 |
+
"outputs": [],
|
| 224 |
+
"source": [
|
| 225 |
+
"blink_closing = (eye_ratio_v > th1)\n",
|
| 226 |
+
"blinking = blink_closing.copy()\n",
|
| 227 |
+
"n_smp = blinking.shape[0]\n",
|
| 228 |
+
"blink = np.zeros((n_smp,))\n",
|
| 229 |
+
"\n",
|
| 230 |
+
"befor_closing = round(n_smp_blink / 3) - 1\n",
|
| 231 |
+
"after_closing = round(2 * n_smp_blink / 3) - 1\n",
|
| 232 |
+
"\n",
|
| 233 |
+
"for i, bc in enumerate(blink_closing):\n",
|
| 234 |
+
" if bc and (i > befor_closing) and (i < n_smp-after_closing):\n",
|
| 235 |
+
" for j in range(1, befor_closing+1):\n",
|
| 236 |
+
" blinking[i-j] = True\n",
|
| 237 |
+
" for j in range(1, after_closing+1):\n",
|
| 238 |
+
" blinking[i+j] = True\n",
|
| 239 |
+
"\n",
|
| 240 |
+
"blink[blinking] = th1"
|
| 241 |
+
]
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"cell_type": "code",
|
| 245 |
+
"execution_count": 61,
|
| 246 |
+
"id": "fd3bb860",
|
| 247 |
+
"metadata": {},
|
| 248 |
+
"outputs": [
|
| 249 |
+
{
|
| 250 |
+
"name": "stdout",
|
| 251 |
+
"output_type": "stream",
|
| 252 |
+
"text": [
|
| 253 |
+
"2 3\n"
|
| 254 |
+
]
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"ename": "NameError",
|
| 258 |
+
"evalue": "name 'a2' is not defined",
|
| 259 |
+
"output_type": "error",
|
| 260 |
+
"traceback": [
|
| 261 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
| 262 |
+
"\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)",
|
| 263 |
+
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_12744/4152700513.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[1;32mdel\u001b[0m \u001b[0ma1\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0ma2\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 6\u001b[1;33m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0ma2\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
|
| 264 |
+
"\u001b[1;31mNameError\u001b[0m: name 'a2' is not defined"
|
| 265 |
+
]
|
| 266 |
+
}
|
| 267 |
+
],
|
| 268 |
+
"source": [
|
| 269 |
+
"a1 = 2\n",
|
| 270 |
+
"a2 = 3\n",
|
| 271 |
+
"print(a1, a2)\n",
|
| 272 |
+
"del a1, a2\n",
|
| 273 |
+
"\n",
|
| 274 |
+
"print(a2)"
|
| 275 |
+
]
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"cell_type": "code",
|
| 279 |
+
"execution_count": 11,
|
| 280 |
+
"id": "a348192e",
|
| 281 |
+
"metadata": {},
|
| 282 |
+
"outputs": [
|
| 283 |
+
{
|
| 284 |
+
"data": {
|
| 285 |
+
"text/plain": [
|
| 286 |
+
"[<matplotlib.lines.Line2D at 0x1d8004f6a30>]"
|
| 287 |
+
]
|
| 288 |
+
},
|
| 289 |
+
"execution_count": 11,
|
| 290 |
+
"metadata": {},
|
| 291 |
+
"output_type": "execute_result"
|
| 292 |
+
},
|
| 293 |
+
{
|
| 294 |
+
"data": {
|
| 295 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD4CAYAAAAJmJb0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAABNzElEQVR4nO2dd5zcxPn/PyPt7t35zv3c29nGBhewwYdxoYPB9JIQSkIJJARIIU7yJUDyJSSQQEISQso3gQAJofOjg+nFGBww2ODeOz73dne+urua3x/SSCOtdlfa1Zbbe96vl1++1aqMtKNnnvnMM88wzjkIgiCI0kQpdAEIgiCI3EFGniAIooQhI08QBFHCkJEnCIIoYcjIEwRBlDChQhdAprq6mtfU1BS6GARBEB2KhQsX7uGc93H7rqiMfE1NDRYsWFDoYhAEQXQoGGObk31Hcg1BEEQJQ0aeIAiihMnayDPGyhljnzLGFjPGljPGfmlsH84Ym88YW8cYe5oxFsm+uARBEIQfgvDk2wCczDmfAGAigJmMsSkAfgvgXs75IQD2A7gmgGsRBEEQPsjayHOdg8bHsPGPAzgZwLPG9kcAnJ/ttQiCIAh/BKLJM8ZUxtgiALsAvA1gPYADnPOYsctWAIOSHHstY2wBY2zB7t27gygOQRAEYRCIkeecxznnEwEMBjAZwGE+jn2Ac17LOa/t08c1zJMgCILIkECjazjnBwC8D2AqgB6MMRGHPxhAXZDXIgjCnS17mzF3DfWKCZ0gomv6MMZ6GH9XAJgBYCV0Y/9VY7crAbyU7bUIgkjP8fe8jyse/rTQxSCKhCBmvA4A8AhjTIXeaDzDOX+VMbYCwFOMsTsBfAHgoQCuRRAEQfggayPPOV8C4EiX7Rug6/MEQRBEgaAZrwRBECUMGXmCIIgShow8QRBECUNGniAIooQhI08QBFHCkJEnCIIoYcjIEwRBlDBk5AmCIEoYMvIdiPaYhrZYvNDFIAiiA0FGvgNx0u/n4NCfv1HoYhAE0YEgI9+BqDvQUugiEATRwSAjTxAEUcKQkScIgsgxK7Y1oObm2Vi3qzHv1yYjTxAEkWNeWbINAPDm8p15vzYZeYIgiBKGjDxBEEQJQ0aeIAiihCEjTxAEkWM4L9y1ycgTBEEESFzjaI26z0xnLM+FARl5giCIQLn2Pwtw2P8Wz8x0MvIEQRAB8u6qXYUugg0y8gRBECUMGXmCIIgShow8QZQovJAhHUTRQEaeIEoUsvHFA0fhfgwy8gRRopCNLz4Y8h9DSUaeIEoUkmsIgIw8QRBESUNGniBKFPLjCYCMPEGULKTWEAAZeYIoWQoZ0UE4oARlBEEEDXnyxQclKCMIgigRiiW6iYw8QRBEDigSG09GniBKlXRGpj2m4ap/fYrl2+p9n/vTjfuweW9ThiXrHBSJjScjTxClSrqB12Xb6jFn9W7c+sIy3+f+2v0f44R75mRYMu9s3tuEZXX+G6FioFjkmlChC0AQRG4oEhuTFaIh2XT3WYUtSAZoRfL8yZMniE5KAQI9OhVyT6qQ9j5rI88YG8IYe58xtoIxtpwxdqOxvRdj7G3G2Frj/57ZF5cgCK8UiSPZaXHrSRWiYQ3Ck48B+DHnfCyAKQC+yxgbC+BmAO9yzkcBeNf4TBBEnigWTbizUiyPP2sjzznfzjn/3Pi7EcBKAIMAnAfgEWO3RwCcn+21OhovflGH217yP6hFEEFQJDam06IViZUPVJNnjNUAOBLAfAD9OOfbja92AOiX5JhrGWMLGGMLdu/eHWRxCs4Pn16E/3y8udDFIDopRWJjOi3F8vgDM/KMsSoAzwH4Iee8Qf6O6/1G13vmnD/AOa/lnNf26dMnqOIQBEEUlGKRywIx8oyxMHQD/zjn/Hlj807G2ADj+wEAdgVxLYIgPJLGxhSHCSpd5BDKQhr8IKJrGICHAKzknP9R+uplAFcaf18J4KVsr0UQhHe8ZqGkUMoc4RZdU4CHHcRkqOkALgewlDG2yNh2K4C7ATzDGLsGwGYAXwvgWgRBeKRI1IJOS7EMvGZt5DnnHyG5M3BKtucnCCIzisPEdF6K5fnTjFeCKFGKZeCvs1IsnjwZeYIgiBxQJDaejDxBlCpebYxfW0Q9BG8Uy/KLZOQJokRJZ4sztdXFkl2x2OHc/e98Q0beBwea27Gvqb3QxSAIT6TzJDP1yItFay523BOU5T+GkvLJ+2Dir94G0DFzWxOdkDS2WHjkfs0OGXlvFMtzIk+eIDopmRqhIrFdRU+xPCYy8gRRoqQzMpka+TiJ8p7QiuQ5kZEniBIldwOvxWG8CG+Qkc8DFHJGFIJ0A6+ZGusicVCLHlt0TeGKQUY+H5CNJwpBunqXqbEudaelqS2Gnz67BPUt0azO49aIFiJBGRn5PEDdW6IYIU/enSc/3YKnF3yJv7y7NqvzFMtjIiOfB4J+KUrdkyKCIV0toTh5dxTD3Y5l+eIWy3MiI58Hgp7eXCR1hyhy0hlxTdP/9yshFEvUSK4IqcLIa1mdp1jeUzLyeSDoH7tI6g5R5KTX5Lmn/RKPy7BAAPYcbMPeg22ZnyAPqIpu5LMNFS2WHjfNeM0DQXfb9MpD6/kQ2ZGpDcumPtfe+Q6A4p41HlZ03zcaz9LIB1GYACBPPg8ErskHezqi00KavBvBefLuf+cbMvJ5IHhPPtDTESVK7kIoMzuuo2Bp8jTw2mnxO/AUvCZfHJWHyC/1LVGs3tHoef9cTYYq9bQGIUOuicVp4LXT4reFD3oAplgqD5FfLr7/Y5z+p7me98+VJ18sHmquEHJNLjz5Qjw6MvIZ4NeTKXHHh8gTq3x48YD3OHnfIZQlXp8NG5+1J+9GIXrhZOQzIOozfpY0eaIYyTzVcGlXQHF35Ml3YuI+Q6tIkycKgdfJUH4pdU9ePLdAo2uMd7YQj46MfAb49eRJkycKQa7yyZe6Ji9seywHcfLkyXcQCq3Jl/YrRgRFrvLJl3p0jWjEsk1r4CrXkCbfMfDbwgfh+ci9gVLXRInUeP/9cxNCWerVz/TkA5RrUm3LNWTkM8Dvjx+MkZf+zvpsREfGa/WjEMrMEI1o1nJNkTwnMvIZEPetyWd/Tc3myWd/PqLjEpSR7UiafD4NphbUwKvbtgI8OzLyGeA3cVEQvytP+oHobHitT2nj5DO8fiEk+XzaRuHD5SLVMMk1HQT/A69ByzVk5TszXutT+oHXDuTJ5/FaZgrmgM4DWL8FhVB2EKI+Z8IFYuSRWGGIzol3Tz5dnHyGRr4Arnw+GxbzUllekjz5DkwhQihp4JUQeO3J5W7gNbPjsiGvck1AnrxbT4lCKDsI/hcTCFiuIVe+UxOUke1IaQ3yaRzjxv1l23ugyVAdmIJ48rJck/3piA5McJp8ptfP7LhsyK8nH8w13SdD5R8y8hngd9Q98IFXsvKdGu6x+uUqn3zQk/uKDW7KNcFNhjLvl0IoOwa+Z7wGkLHUNlJPvnynptCafDwAQ+W3N5zfEEoeyDW5y9/kyXcQ/HoyQRhlipMnBEFr8n6XhA/CCy/ErHHv19L/D1KuMQdzO6omzxh7mDG2izG2TNrWizH2NmNsrfF/zyCuVQz4NvIUXUMESFAGL+M4+YB7pl4oSJx8gK68FSffceWafwOY6dh2M4B3OeejALxrfC4J/HpSgbyUpMkTBkENvBYyd02hl9BMfS3j/yzPY5dY7efOJ4EYec75XAD7HJvPA/CI8fcjAM4P4lqFpB/2YSSrQ8WBdUB7U9r9B2IPRrI6hOo3Z/3rapyb11fqN2d1LqJj0hf7MZLVQd2zBoi1J92vGvUYyeoQrt+Yst5xLYqRrA6DYluApr2ey6FpGkawbRjJ6oDGnb7uQbwTfG/qsgn6GPeMA1t8XScblFgzRrI6DNO2Avs3+T6+Fxp0O1G/HtDiAAA13oaRrA69WjYC0ZaAS5waFlQLyRirAfAq53y88fkA57yH8TcDsF98TkZtbS1fsGBBIOUJnL3rgb8cZX2uOQ646tXk+29bBDxwgvX5q/8Cxl+Y8eUP1K1Bj38ebW24ajZQc2zG5yM6GAe+hHbv4VCY8b5OuAy44O+J+7U2oO2u4ShjMf3zBfcDEy5xPeXCB67HpG1P6B/KugM/3QgoatqirHj+Loxdcrf+ganAT9YClb3T38O2L4AHTrQ+X/oUcOgZyfdv3IH47w+DKu75ipeBESck3z8gtv7pVAw+8Jm14evPAaNO9XZwtBXNdw5FF9amfz7xFuDEm7HiD2dibOM8fdthZwOXPB5omRljCznntW7f5WXglestiWtrwhi7ljG2gDG2YPfu3fkoTmY0657O32LnYl+Pw83PyfffAwC4P3aW7fhMYcb5XoxP0zc07cnqfEQHo2UfFMbxQOwsxLoNTV6f2hpQxmJ4Lm44ACnqXZf2fdjNu2Fu2QlAWz2gxTwVJdy6D3HO8EhsBsDjQOsBb/dg1Nl/xU5PWzb9+31QGcer8WOMz/mp8+XR/VikjcAflG8a1/Xx7kab0YW14dn48YipXcxjK2MHsFIbgt0VI7K2BX7JpZHfyRgbAADG/7vcduKcP8A5r+Wc1/bp0yeHxckS4wX4rzYOzRUDzW5Y8v310amPtbH6Z6/BzUngxvWWaCNt5SE6CcbvP187DFpFz+T1ydhvsVlPUtRTruEgr8AGdXj6fW3HxRFDCAu10eZ5vB2n7/eJNsbb9Yz9l2uifAGM+HqAcQ3beDXmMsMx5h6fC2CWeYk2HJpaZt6jAg07eS8cDFd7f84BkUsj/zKAK42/rwTwUg6vlXuMHyYOFRpT0v/wxvdRhGzHZ4ow8u3ifFk2GkQHw/i941DAoSDpsKCz3qWop4xr0KAgLsyAR2PGeBxxKNDEcV7rtua9bPL37TAkpDw5NozHoUFBjPu8P2lfDQo4U817EM8sztS8O2hBhVA+CeBjAIcyxrYyxq4BcDeAGYyxtQBONT53XIwfJs6Nyp3uhzJeyhj3WKHTXl8/n2nkyZPvXBj1STesLGkjz+N6vYh6cQacxtqr46AZBstn4yD2iwmjne56pmMT9nedLGGcZ3Z/0r5xKOBMMe+RQW9QOTw4iAETCuIknPNLk3x1ShDnLwqkH09javquo+m1eKzQ6S4vvBpuVHgy8p0KrsXAIDxEljQyhWtxMADtwrlIUU8ZNN2YcWM6lGe5hkMD8904iLJZvds0xxnnjebZsWGIQwNDTDwXP++u3ONiinmPjGuSJ186ck1poUlGHqrnrmZQco2oLIGdj+hQxONW/UvpyfuQRIQsEc/AWGtQoMFf42BKjl57t87981TnhYwVE48jA7mGgxmevKXJa2C6hENGvkjRrK6m3kJ7lGtMTz5bTV6/Hsk1nZN4zNB6ueEhJtHkhVzjxXCb3qVPj9XUl33KGaJsnuWaoB0ljzCuIc4VtGfkyRuNMVfAoVpyjTn+0UE1+U6B0OSh6pXbY3RNu9euaRpMucbUJ2ngtTMRN+qf7j0nl2tk3Vt3Rnx48j6iazQoegAC4Lluc7NsHo12QrBBPj15hqjmU8YCzGcRF7KaiK4R4x9egjYChoy8V2RN3odcE5Qnn1DhyZPvVMRjloeua/Kp5RpndIcbwphZnrxXI6/3ADTu05N3Oj5p36FCavKKZeQzGHhNiK4xxj+4l6CNgCEj7xG5G6xJAypJ95c0fJ5CQ/VMgj5JRr4zoWnO6JpkA6+SXCNFd7hhyjV+o2u40Jf9R+UAHUGu0aNrolkMvGrO6BqugYN5C9oIGDLyHuG2gdf0rbHYn/NgBlssL0hE19DAa2ciHo8C0OUaPQwvtSfvR67RfEbXMC2ujw34lHk056Bw2gg1o87nOaJMfy4sqzh53bmz3nsFmvfw64AhI+8VmyafXq6xefJB6HCaI/6ZjHynQgy8mnJNsslQcXmAVk3tyRuGJ+bbk5f0ZeOzp8P8RteYnnwwYcheUYyYditENBO5htlkNUUOoSRNvjjREiZDpTPy1gAMggibIk2+U5Mo1yTx5LlwRph3uYb7M/LiOM2cierPyHuVhwoVUZYgY/mRV+Qev9TIMrnhIE++SImLbrDx46eTa7iIlxWefLYLEDgmWeTZGyAKiybLNanqk/DkzUE+D3KNz3h3JKRD8Fi3nb3bNNfTnIPI+dLkTYOciSavP4u44/mLgVcNpMkXLdwMYRPdOJ66cpsVmgVilOUBNc5C5Ml3MuTJUBxI4clLg5tp6h0Dd8zs9J67Rh8b8HecFfnD0kb+6DtKvZc8yhwiph3iHjOQa7jDGVOMwWoKoSxmpJcnztJ3U7nm8LwDGnjVuAIo+e/yEYWFSwYvVYIyuzOQRpM3tHX/mrxmRY8AvuUaK4eLjwi1PNZ53es2GjDFZw8iYSzO0ORBk6GKHhFCaRuQSfFj2eWa7L0Qxp2ePE2G6kxo8mSoFHHy9iyI6eQaw/D4zV1jJCjjzN8ckIRghLTjWpbnjzzWecX05OG/ByEnKJPlGjMZHKU1KFqs0DRv07ltHpUHryUtpifHDO+CPPnOhGaTa1InKANkuSa9J+93MpSlL/uPygESY8iT7m7z5PNU5zk3vW4AaZ9h4vFWj1t+7xVwaY4NefLFSYImj9QtcsByTbKZdETnwPRqeWojD6funcaTzySKxJzYk2GCMrMXkHbRECkaJ191XgQ4GBFH3MPERxvS89eY7MmLZ6YC4HntiZOR94jsIcU9pCowJ0OZA6/BrAxlvSDkyXcmEjT5NHJNuvQHgJS7xu/MTu6Ua7xnrzTL5mEWuK1hy1ed51KPGchOrrFp8iKpW0BpTnxARt4rUlqDuAcPxj7wGoAOJy8aoVB0TWfDnq4A8D7wmrzeKYbsEvOZgyaxB+Bdy9dL7rE3KsfV+x0AzRRpTAOAIRP5GXi13lM5x5UqJC6WQdKzLCEj7xGuxQyPh3mr3AmafDCToaxGo2MMvM5esh0/f3FpoYvR4fEcmSLqCVfTOxdGWF/Mp+zCNGcPwKNcwzXzGE8Dr7Y8MOmN7S9fWY4fPvWFp7Ikv6hjwpbfkEfu6Elpccvwc3kCWf6cNDLyXjEiCgB4k2u4ZuQECUausVWeDjTw+t0nPsdjn2wpdDE6PJrZyIvcNUk8eZuOnbreKVyDxv3PeAX0um32aH0uG6ifwfvAq9dxqH/N24QXF23zVpbkFwVgGXnfQROS3GPmk5e2kVxTxHAtZv5A5kuRytBqcVPXSzfz0BOyJkuToToddk0eyQ2PFOqbzkAlLP/nV67xmcBLrCgFwJvjIzVseYsok8bSAG89DrfjzXz7XLM5aL4XPw8AMvJe4XEzRaoXuYbLXksQkQEJAzoUXdOpkKK7Uq0MZU3aSx/VpRgDr+aMV6/RNUKu8TnjVfbkPTk+fqNxgsDpyaeZUJZ4vGNwWYvbDH/c5wSyICAj75V4zPTMPSVYkiZU+A7Dcj2dI5yMslB2KjR5IB9IWvcSorBSyTUJA6/+omuiflPxctnx8SnX5C1O3m7kkaEnb0tQZjP8pMkXLZxrlifvoXJzSa7RAlg0RIvHLI2/A2nyRDDY8r6k0uTl6BCWOu+KlU3S72QoPeeNtTKUtwRlNrnGSzCCjwaLZ5sA0HFNm1yTQYIybv5Ocbsm73OWcBCQkfeKFjMrqCXXpDC03DnIlN2PqsldXRbKiyf/wZrdWL6tPufXITzgCO1LZni0mJ6t0ovEIWZ2xvwuGsKzl2s8pevmcoOV+l6a263vNC0Lg++IrvEtjTr1d4dc43eBliAgI+8VzdLkrYROqUIoub1RyDZBWVwetMqPJ3/lw5/irD9/lPPrEOnhkoxgZkF1wUp/oKb1QnUjz3zLNaIH4HflJM651bv1NPDqDKFMXucbWqPm362xLN41N00+U7lG9OClc1rJDUmuKT60mN0LAVLLNTwmyTVBePIxu3dRQLmGc46/vrcWG/c0+TqGyBy7XJM8rUE87j1nkqXJ+081bF8b1qsnH3PINWk0ee6Ua5Jfp7HVeh9a2rN415xyjd93V/Q+xIpbXLMPvOZ5lSuAjLx3tLipxXsacJLj6nkAaQ3iklyjhPJaSZwcaI7i92+twdf/+YnnY7LpQROQjE/qdAVa3DHj1YNc43vmKud2mcdHimI/cg2TG7Y0M08PNMuefBbvhmhYuBwZ56Pyyj0ubtwjefIdBMmT9uTBcFmuYVlH12hy3H2Bc9eIKt/kw2OKk5XPCs41Sx5JmYXSmpntRa6xZ5P09hsx6HXR94pS0sCr18lQ4l7SRbnsOdhm/i08+b0H2/DMZ196K5t5UWtlJwBWGKRXpIYpbso1hdXkQ3m7UkdHi0uToTz8UPJALc9eruE2uUYFtLY0R+SOmDlN27vh9rMvkYg9Wiu5gYzLPT6mALw96TlDyCy6RnFq8n5CL4WH7GXFJe4MNkh+L3slI98a1c/7w6cX4cO1ezB5eC/UVFd6K6PIGqkogAZPM21tRdb04WhNPB8etxt+8uSLGO6QX4xtSXeXuqaBDLzaomty78nH4slf3GjcCBMz7HZbLJ7WU8+XJ3/rC0tx1b8+zcu18gnT5PBDhmQDr/YB+hT1TsqnkkquufSBTzD1rnftZeHxtMe5X9MRJ+9hMpQm75/kfatvjmLtroPmZ2Hk9xzUG4Wmdh/viriGohtjvzNe5XkKcdELkCQgzYsKEDBk5D3C45YnHfUyocHMXQNduwwg1TDPYwhlewojLxoA4Z0f+vM3cN1jC1OeL54nT/6J+VswZ/XujI/nnOPQn7+Of8/bGGCpskeTnIZUaXq1eFTaL0VsuVi5jClSvHvivh9v2Ivt9a3gnOO+d/TBdmYsgGGtgerdk7fkmvQzSe2OTfJUHhN+9Rb+8/Fm83OLYeQjIf3YNj8avVEmZnjcXmQlt+M1KIhrQq6xJCBz4JVCKPNPXOP4y7trUd8Sdf3+0w27pUGjRLnmjWU77DHlkhcS5z4XA3bBPvCa+7QGbdH0nrwswby9YmfK8+V7nLix1f13TEdM42iLabj9lRUAgAPN7bjrtZWIpmj08oIt70vqlaFMQ5JiMpQItWSK6kmu2by3Gfe+swY3PvWFuSi1cQLvdVE6ToQXzlu3B49+sjnF/vLMU2+/QatRdyOqfq2DrT48eeOdVlTJk/cp1wAwsnvao2s4KHdNQXl/1S784e01uPPVFa7fh2C9PFGXqILrHltojym3Za0MQK7h+ZVrUnk/wuBx7n3iSb48ecGG3d7DO2Wcxvy3b6zC/XM34LWl24MolmfiGsd/1+2xNnBp4N3oGc7801zc9fpK23EJOZOSGMZoXG8EmaImlV1kyW5bfQsAIKQwY+DVOCZJ1MtrS7fjQLNDQ9cSo2u+/uB8/O+Ly9wfgtQocA9rKPTsEgaQ6Mk3+jHywqDLnryfoAkp7DPGHXKNLZKJNPm8I+SJZPqdiEQAgBj3KNeY+2cv10Cz5J/8GPnkjVJM0uTlxkDTON5Ytt3V8Oc7ukaOtvBDNGYvp4jUEL2XXFHfHMX8DXvNz//8cAMue3A+PlhjSE+SgeRM1+RX7WjE/R9ssJ1HlhVTRdfEonr90T15d7lmZ6P1DLfsbQYA9OlaZg686idIHJjcdqAFNzz+Ob7/pCO3uyTXxL3IIM65IWk86l6VEQBAa3scew62mZ2dBh+9uhe/2ApAfy7prlt75zv46bNLzM+cc3y4Wu/RWgOvlief0dyCAChZI7/3YBuuePhTrNzegJcW1SWVYQSiQjDRDZWIaxwh5hhIBaxumJuXKnleMZ7ck39t6XZ8tHaP63e20yUMvAZfSZ7+bAum3/0eOOcpPfl2SZMXXhMAfP/JL3DdY5/j+S/q9HJKz2Xmn+amHMwNGl/em4RzLIKxxPqQC6555DNc/MAnZuO63hhI3HZA96BlA8nBzBmwTuQoLKSYyBOLuck1+jmXbD2AjXuasKuh1dx/k2Hke1REYFvoWknsLYgUA1/uaza3/XPuBtQ3tTrGFdINvGpJNfl1uw7iW48sMAdZAaB3VRkAfcZr7Z3v4L/r9UbTj3T3yEfr9dtSpLED6V1bVlePe99eA0B3JJ5eYIVort99EAs26u+ynooZ+j2aIZRSPnny5DPn3/M24q3lO7BqRyPmrtmNM+77EDc+tShBhuGcY8W2ButzstSt0L17BZoZVeOcIeg6SKlpkiYPV6+Fc44bHv8c33hofvobs0UapB949WJQ1+xsxJa9zZi9ZDua2mL46XNLUXegBW0xLaUmLw+8Nks9n9mGpCG86Jjkve9tase+pnas2tGAk38/B/ubkofDBcFf31+HKx5OHmWzbtdB0zuVybf2/tHaPWiLxbGkTh/PaWrTf1dVMbRr0VDaBi1TafKSJJIiMiSWQq4596/zcNLv59jywdQZjU1rLA4mG3mXiVmiTiiK1UD++rWV2LKnMW2qj70H23CwLZZ4z457+cXLy/DOyp34bNM+c1t1le7JO2e8/ua1VZ5kxbimq+YAoKh6dLkzueDZf/kI97271ta4WMcDRo5Qy5OXJkPJM16bWttRc/NsvL9qV9pyZUvJGPm4phvt219ZgWsfXWiroIDlXazb1Yj65ij+/d9NOPPPH5qVxHxnmG58v/vE5zjsf1/HW8t3oKkthhCk3DWOOHlnpXpj2Q7saWxBHAoiISMlq4vX4i8tgCP+OYknsPjLA1i4eT8O+dnr+O/61D2E0+6di+PveR/ffeJzvLzYWlGnpT3uKtfE4hqW1dVLA69wrewhheHWF5Zi9M9ft21vi2n4w1trsGFPEz6WpIkgEcZx3a6DmLsmeZTNqX/8AMff837C9mRG3ovxX7uzETU3z7Zr6dB7a+N/8WbCs1q6tR7feGg+fvfGajMiUgwSih5ES3scze0xrN/ZKHnoqT15W46jZPH0MSHXhGxyjdxoy+9Qs2F4m9vjrnJNNK6ZdUbcg2rcgwhIEJJnSGEJs8CF4zDpzndw5n0fGsWR88/b70U1PO2YJKN1LdM1ebORkNjVmF6+29XYahrpdHLNjvrWhG3RuAaVWRknTZlWShMtnvWuet3B+MPbq9OWK1tKxsg/9slmnPnnD83PzQ5tvW83vSt36h/n4oK/z8NSw3PasFvvFouXmEGvyLOXbEdrVMO1jy5EU1sMquS9RB1hUM4G5brHFmLznkZoYKgIq9YAjIP1fgYHneFkjop35n0f4vR75+K8v83Drc/ra6rOXZNo5JvaYq5evuwYNkfjrnLNbS8vx9l/+QhbpG54S3vifmFVwRPztyQ4m63ROPYZHjyDHtOerCvd0Bp1bUB21Ldi3jr3xotzniCdyWMBbyzbjpqbZydotJxzPPnpFtS3RJMa8yaH4Xjwww2Y8pt3zeu1tMdxwf/9FwDw4qI6276/fGU5DrbFcPZfPsIzUvdeDGY+9NFGszd4sC2GpVvrTanjztkrMfa2N/VU02Y+lRRjPFrMHDOSJZHz/zYP//hgvblbVBh51R67vX6XVSfld0jUh5b2uF2uMaTDmX+ai8N/8RYAoMEw8oph5EVAggoNleURjOxTlTAZqi2mmc9S1K/1O+utBsuRXjtkNOayQVcUhoiquEqzQlbc2dCKi+//GDslKSoa11B3oAVT73rPNNKqKdco0ntuXUv0bJzX0Hv8+ixd3RnkiMXazXOJdzis6Odsaovj8NvfxN/eX5dwvqAoGSO/3dGy3vjUItvnuMaxea9egTfsbkJEtcfQirArxljC4OvBtjhUaGb2Sc0xGcpp5AGYjUJ5WEFU072BuMZNmeKv763FQx9tSDgOAN5ZsRPX/PsztLTH8dKiOr3y27quiQOvK7Y3YPXORgDAJuM+//HBevzxrdX4zqMLsHFPEx757yaM+8Wb+P6TXyR4IrLn3tIes33+vznr8NKiOrxqePu3vmAtzN3iYoiF8XJysC1mzkx84tMteGL+FpvhkTni9rdwuYuMde5fP8LXH5zvOg4SjfOEHDmNrVGzq/6X9/QXyRl5s7SuHrc8vxQ/e2Ep2qWB1/1N7eZgaFNbHAea23GwLYaam2fjztkrsaOh1aw/D8/baBqclqiGpVvrzd/a6kUexE3PLsGnG/cZZUv0OA+2xXDOXz/CR46GTGGaWe/c5Jrt9S04874PcbCl3TF2o5dv0ZcHcPfrq3D9YwsRjWuIGyGUikOu2bDHmlQk91DFO9EspEuh4yu6J79+dxPa4xpWbGswG25FYTZjqDAOVVWhKsyQa6yGqi2m2fLPvL96F/Y0tlrBBlDAtThqbp6Nv89Zb/bYZGOtMKAsrNhi5p338uzCrZi/cR8emLsBuxpbsXJ7A2Y9vQjT735PP4ch18QdcfJPf7bF7GEAQN3+xDre3K7bCXM+jVH2tna9zsuefNx4vxpbY2hsjeGeN3Pn0ec8rQFjbCaA+wCoAB7knN+di+sIPS4Z9S1RnHDPHKlc+v/vrNyFY4b3thmr5ja74broH//Fy6pLCKWLXCOMo5gwUhZSEYsC4By/eW0lHvpoIyKqkqDj/3r2CizYvB8v3DAd3/rPAgDATc8twSuLt6Fft3J015xyjT2qRaY8rJrG58+GYduyrwUrt+tjEK8v24HXl+2wHSN77s3tcZsm/7s3kldANyPvjPiQz7vXMHxlIf1ZLpfGRQTCWH62aT8AXQb5+5z1uOXMMWa3u7Ethm7levf81SXbMLJPFQb1rEg413WPLcTiL+vxk9MPNWdFnv+3ebZ9xBjCq0u246uTBpvbL3twPrYZjeG8dXtw7ztrcNbhA2zHrtjegCfmb0F52PKXvtzXjHP++hG6loew8OczEoz51+7/GJvuPstmoAQJYYcGijkBKdGTn79hL95fvRsrtjegLnwQVSx5dM3ry3bg8837UdmqX7uiLGKTa2QvWC63kGCa2+PGoiFWpA+TrnHmnz/EFVOH6WVmMI2nfg8amKIipLKE6Jq2mNXLA4Bv/uszPFSlWTNPpRDK376xCmeM7w/ALpsojCU867DKEI1bAQJ9uuo9+i/3NWPqXe8lRH0JuaaiTLcn+qSmOH763FLbflulxuuzTftw+KDuaGmPQQE3Jy0KWbe1tQ2VELHzhuQb1Z9zfUtux6aAHBt5pk8b+xuAGQC2AviMMfYy59w9GD0L3Lr2Mpscg2z7m/SHPHfNbsxdsxtXTx9ufnfjU/bQr2icQ1U1HDmsN45X+iB20OiOazG8vWKnraLsNoyQauTqNiu0FserS3RP2G2g9p8fbky4jznGoMxnG/fhVNtygpYnv+dgGy5zZIMMq4kRIat2JBpTGfm61/5nIYb26pJyf0Gzi/6ZjKa2mPkS7jeM2ZzVu7GroRV9u5Wb5Xh7hdUAcc7xtfs/xv7mKKYdUm1u33uwHfPW7sH1j39ubptc0yvhmp9s0L3mO5LMf9i4p8nmlck9QNEoAsCnxtjNbEe8/IWGRDOyj5UbZYVxXGNrDMf85h3X61758KdmXLfMY/O3uO4v6hOgGx65J3PxA9bvH4I8M1bvQTrluf3NUUSbdeNYWR6xRdfIRlL2ws1eSrtdugRTsbvB7tXO37APbqiGkVcY0z10Wa6Jath+wN7otUej5gAoZ4o50QiwpKDtUkPpFgjVo0sEuxvbTEeszajnX+5vcQ3r7dc1BLQBNdVVwBagsV3Dhv2J745cZy76x8cAgBlj++FoOdRaMzz5Nt0maLDyBImU0LkOzQVy78lPBrCOc74BABhjTwE4D0CwRn7XShy5/p+YFdqfdJfwLoaTQ9YD7b+9HGNCUqWaD8wKAYO3VWDr/hac7Hgy1awe3bp2QaiFod1ooaMrXsGK1XMA6McCgPLBJ5gV2ophyk7U8WpEVGNSRGs9vh1/Gk2h1Ebx2d+/jFkho3WPAwgB2vtAt9Bm7EA3AIZcE2sFf+/X2Lh5P87auxdnSeUNtTPEQv4qT5+FZZgVMganmvV/0z3UjjVPP2veezq2vfgSZoUMT7EOON44bunjb6N2WE90rwhj6aZ92LJ+r3nODx94B1e1HwBCwM6XnsMs476+eOQ1fLm/2X7trd7KLPPyn57FpKE9rboTRWZvxX7HceLv9iTnMzo7Cc9uAzDJZf/D2BbLsAJQtBhmhZ5N2G8U24pW6F4oZwrQ1gjtvd9gVsiSxbY89wK6cl3aq6oot+SaDXMwMbbDfBYjNlSiZ0iXtiKtCtpDGipbRcSYZeT3rJmPWSHLAIcPMERDHF0OqDhN2j6Q7UWL0lsfeIUCNO0272Hf7DnoEtUwK2Q1EIfwLVKUiwoWbTb3r9nVBaNDzei/tRyjjfd4wvYe6B06YHsevVgE+0LtGPDFPGBrFQ7btA+zQnuh7mGY6fKOnNerBdgOnDNxCG77fB8a2jQMZvUJz7rnugiGhhxe+BrgaGV1glxTseIZ85kJT77nptcwK9TNfvz6CmDkyQllypZcG/lBAORcn1sBHCPvwBi7FsC1ADB06NDMrrJ7FabXPZT+BZe/b4b73Tcm2Q4A/cchskXB7ngl0HUgwuvfxo3OfRfB3PZu/EiEVIa1fAgQfQ/fwjPpn3hb8ut/oI0FADR1OwQAB5v7OxwN4Gi3/f2OtrQkv25gJDOgO41/QOL9bLcaAwDWfaX6nfyyDTi2A+RjnR2fDADYXzkCHMCNoedd93s+fiwAoKHrKCDagsi8e+z11LBtrTwM9BoBgCHa+zCEN8/DdMyz3qMDSGy4NH1ltPV8IAAg3mcMxjS8hTGKowdi7Ov8jRZWjUQECjapwzDt4BzrHoxUQdMc+y+tPBloBJp6jAJ43Nq/3ji3/B7vAE50/o6ikTXcyqTvi2A7gLLu6NJnCIB92BIajulsbuKzTtZ4A1igjTZuaRCghNFz4yto4BXYxnujp9IDqOyLPnXvJNiOvUv7o3cHNPJp4Zw/AOABAKitrc2s7zLuAty0cgTeW7ULh/StMrvoAHD3hYdj9tLt+NDDhKNUPPntKZg6sjfCT36BJi0M/GQlltXV4+y/2JfHO3/iQLy4yApHnKgo+I92Bm654z5Mu+tdU+PNhm01F2Dcmdeh5ubZnvY/pG8V1klZ+jLhX1cdjT+9uxaLvzyQ1XlSsenus3DTs4sxd80efHzLyXh7xU5c+2jqxGcA8Nas43HavXPNzz+eMRp/MCasVIRV13EDmZ5dwhg/qDt2N7Zh1Y7G7G7ChcW3nYYJv3rL9btU5auMqDh2VDVuO2ecTdt+K3QSXhk/B88s2JryuhtGXIoJX/kf7GxoxTG/eRe/vmA8bn95uU0i+H3fCQAWY8fX38dxv7OHlA7qUeEaRVIZUdEkIk6+8oR5bxdNGoxXlmwzgxjcuGX0YSjfuA9PlF2Ey37y57R1eNb40cCONdg+4iIoR12BGdLvLJgxth++Ob0GU0f0xvBbXrN9d+bh/fHa0h343VeOwPrdB3H/XPfxIsGr3z8W4wd1R0jjABbjpW6X4pY9p6c8BgAmDO6OxVut3FXVVRHMxSTgtj14c/kOfOfRhQgpDCOVCuB/1uLlxdvwA8eM4KuUGtye9kr+yXV0TR2AIdLnwca2wGluj6NbeRjXHj/Ctv2SyUMxxKO+nAzGgJpq/RxhlZmDlM9/nngrn285YPs8vLrS1P7k2ZOXHZPYa7l8yjC8Pet487OqMPzzilo8de0U235+l9Kr6Z38/m87eyz6GoNRqejXrRw/OPmQtPv1SXOuH80Ybfv818uORJmRY6S5PYb6lii6V4TBGMNJh/U19xvhyAf+y3PH4bhR1fj3N4/G6H5d8cNTR2HSsJ4AgAE9KjD/1lMAAGeM74+bZh5qHvfMd6bi0slDbOfa3xzF4J5dUFkWjM9TGVFx1bQaALoRryxTk+5bW2OUuXu5uU0EEYwb2B33X16LQT3sA8qPfrI5rYGXEYPoZSEV3SusAIUjBnc353e75ft3GnhRRnmxmP3SQHEkpKB7ReI4g0xFREV5WEnZEMhMGNIdgN75SDYLe0SfSkwbWe06O7mPMQv2s0370hp4AGb5xTwuVUk8p9u2qnJ73RlRXWU2pmKco3tF2Hx32xwN+49mjMZtZ49NW75MyLWR/wzAKMbYcMZYBMAlAF7OxYWa2+OoiKg2L2XCkB4AgK5ZvryXTR6KAd31F60spCAa1yeNPCylo/3WsfrA7ZZ9zagyrjeoRwUG9ahwTc5VHkp88avKQxjVrytumnkoXr/xOKz79RmYMbYfpozojdvOHotvTNEbBo0nzgNIheJS+WeO06MTIiElIQZcIB9WXRVxrdz6d/qL9OAVtea9J+OGE0faPp99xEDcef54AMAXWw7gzeU70d0YkAyriu6VXzQBr3z/WAw2omc23X0WrpxWg0evOQYnHqo3BD88dTSe+c5U3HfJRJw/cSD6dSvHS9+djru+cjhuOPEQzL/1FNx94eE4uqYnQkpitQ+rzAyrBYCBktGtHdbTjObwwrJfno7bzh6Ly6cMw/M3TENIOu/vL5pg2/dMI1rnpMP6ms9b5GBxybDhC1Ht2o1wybKQgh7Gs73uhJF49rppEI/Cy2SvMQMsDVnEqe+QBj4jIQU9KlJHuakKQ3lIdQ2UcDZmN54yymo0ePJ8Sr0rk19TDOiLsGLBQ1fW4vBB3RP272oYa8YYFOae5sTtZykPqZg6orf5eXh1pbm4ToMRsSQ3gM7gi9H9utpmCAdJTo085zwG4HsA3gSwEsAznPPlQV9n/oa9eG/VLlRGQjhmeC8M6VWBV79/LF767nQA1g8nEN5mN2P7HeePxws3TEs47+nj+gGArTKEjfBHeRLGRZMG49Yzx5gvZ5eIbsBra3pCURjiWuIknYqI/uhD0g8rDOQNJx6CMQO62TyTq48djsun1ADQK/vY294EAPzqvHF4a9bxGNmn0hb+J+MMK3v5e9PRs9KqcM5l/FSFYdUdM03jCwA9K5MbeeHhRUKK6RGOqK7EuRMGon83y1jOu/lkm8G78MhBAIBq4/f4+oN6XLx8le+fMgpfmTQYlWUhvPOjE7Dsl8m7zqrCcN7EQeY1JgzpYYZq9utWjksmDwVjDGGpDMOMXs60kdVobLPCB2eO143v8zdMw7PXT8PfvzEp6XV7V0ZwgXEvgGEgFIY7zh9vGsaeXcK4fMqwhN/owqMG4Yqpw3D9CSPNxrhvV/2ZXVxr9Tg+vOkkPHvd1KRlcENUOTP1bkgxeytdy0OIhBTzmvKktl5JjOaYAV3Nv0NqYpx6JKSYDXRF2HJihBMgylQecTfy824+2Xw+RwzujhtOGmm+Axzc7JHcfeHhuLh2CCYaTlyvyuS9x+qqCBhDwgSpU8b0ww9OGWV+/slpo1E7rKcZlgsAIUVx7eHI9UdQHlbxyNWTMevU0RjZpxJdy0OIGj0PMfmua0XYWmjH0ZMRvdlckPPJUJzz1zjnoznnIznnv87FNWIax5FDe+CcCQPQo0sEH950MsZLhlkYz0P6VmHT3WeZ4W6i8oUUhiOH9sSr3z8Wj1w9GU98+xh8rXYwBhqexWGSBxNWFURjmi2W/lfnjYeiMLOlvnTyUBzarytmnTraNOIat3vGwpOXY7vTecHCyMpGuyykYHS/rnj3xyfi9xdNsBkC0bjJxgvQX2Lxcjsbn3MmDMTrNx6H8rBq82zDqpLUyIsY8bCqmM/0uhNH4s+XHok/Xmx5rk5P7Y8XTwRgdakFzolt1nXUtM/IC/IzP31cf6y6YyZmju9vMwRXH1uDxbedhqOG9jS3fXjTSXhLktNOH9cPT3z7GCz83xm417iXZHxx22m4w2g0F//iNHN7WUjFr84bjyG9uphpAC6dPBTPXT8NX5EahCG9uuDwwYmep+ArRw3G7756hG2b+GWF11gWUlBlSEeyxwrY5zu4hXYCQO/KMpwzQR9wFdVmt5QuoEy15JqRfS15TQ7p5YDhybv3HMScj8unDENZSDVlEznj6ZgB3fDbrx5h1s9UnnzX8jAqwqr523YtC+HUMXrvTzhj4wd1w/dOHoVnr59m86YVxZ42QeCUZgB9ElYkpODGU0fh3R+fiJCqIGrcS0NLDJURFSGFmTmynJ58JIdGvuADr0Ew/ZBqTJdiqJ1Uldt1tqG9uuCTDftQXVWGDXuazAorNwzTRlajuT2G40f3MT0GQP8xonFumxUrT4QBgIlDemCWoT0Lw9jcHsNWKba23PB05BY8nQETDYbsBZWH7bJPbU0vvPfjE8AYQ3VVBIff/hauOXY4Zj292Nynd2WZaeSdscLXHjcCo/vpHpvwWEx9MklGRvH8VIVhwuAeWLh5vznByE0qcjKyTxV6VUbMyTDp5jxky1XTavDSojos2VqP3pUR8xnWGzMuH76qFoN7Jo5jiLEdUdZ/fGNSRlkqu1eE8fi3jknw3sSpupaHzPEFmbKQak7ucfKLc8eiW3kYNzlS3wKW1xgJKaiM6HVM/C9+W/mZ96qMuKbcKA+rOGN8f7yyeJstzYFAURh6GEZ+VN+uWFbXgDPG97cP1nNuaPJx16RhwnMW742QSzQORIXsFBZx6HoZ5LGU+y6ZaJvrUFUWQkVYxX7jt339h8eZv6343ZPVa5UxRF1yyVeVhbC7sQ1HDO6OJcZgqxiDEURUZkpgja1RdKsI64s2JvHk3XoHQVEyaQ1SIYynMDgj+lRBVRj6GPlskmWg7BIJ4aRD+9q2CblGZAu84cSRCS+6PPgoKuvPHQsjiIZB1vxG9atKeR9Wg2G9WG7dvBF9qjC8uhJdy8PYdPdZuOBIyyN85XvHoiKiSlkO7ceWSQ2W8HjFPiGXSVaA5TEyBnz3pJE46dA+OH/iIFuZU1ERUTFT0rwfvLI27THZoCoMz3xnKn40YzSuml5jbp82UncUjj2kT8rjX/rudPzf14/KKg3x9EOqUeuYvCXqZ6qBWtmgCe/vw5tOMmWGz/93Bp673i7r/PiZRQD0RkK8C2KcSHH15C3P+D9XTzb/Lg8rNofHeZzGgZF9q8x7eO76qbj34om259S/ewXKwypiGsd7LhkYexjXFvfJTE+e4w1jlraQ4Eb17WqU1+p5nDfRks0AS5Zqj1kNnYW9QXGiKsx1rEI8w1vOGINnvjMVm+4+C0cM7mHbJ6Qq4Fx3ohpao+hWHoaiMDMza3s8DlVhpuNGnnyWiK6pqNCXTxmGo2t6Yc5qvZL5kQCEURXdv1PH9jO/mzm+P/4+Zz36S4N2wktwJtUqM7wI2U7IA1tuCCNrM/Lh5AZBRmH6S2g2LiIzg0OukRsN8UJHTI/eKuxfLzsS33viCz3TrHEOBj2n97++aRkGL548APQzdOijhvZIeGFyQXlYtWmyAHDvxRPx04bWtC/ckF5dXCO2upWHXDMgekXYmopw8vpYGQmZOV5uPGUULps8FD0luaJXZQRNbfqzFL+sCNstkzR5Mdju5snLRr4iYtWvirCKgT0q8IeLJmDd7oP4+5z19rxNnOPQ/rrh3dnQhknD7I3YHeeNw6lj+mKTkX1VpO+QuWnmoRjWuwtmjOln297YGjNDk0Udvf3ccTh7wgCM6tc14TyCruVhmxEvU637EQ3KkZIkJ6MqzFWu+eqkwVhaV4+hvbskSJAC4ZnvbGjFm8t3onZYT5QbslE0ruFv7+uT0/R3i9uk0aDpFEZeaG8ikqCyTO8Ojx/UDf27l+OcIwZ6PpfQF0XomOj2AsBPTjsUV06tsQ00iQpWWRYyV4+XzzOwRwV+NGM0tuxrTttlE+dqabdr8l6IhPSwNdFF/eqkwfjXvE04ZUw/vLtyl5n6t0yK+gmrCp789hQzpE9cf2D3cswwGrdvThuOL77UZ0i62XOz253G1ld31a+RagHxXFMRUTHcEarph09/dmpW1xcNYpdI8oZbdkjCKrMZeIH5rDlMDxbQ68oNJ43Ehj0HzZ6W8LJtRl46p1wnRUjrVyYNxuPzNyccp3Hg2EOq8Y0pQ3HVNCtNiCjPCaP7gjGWIG/KdImE8M3picd+ud9KSyLqfEVExXGjUve6upaHbPcgN+Aj+1Th5e9NT+pcqYqS4MkrDLhi6jBccNQg2yCtE/F+3zlbn4W1emcjpozojbZoHJvlFCsssVxB0ymMvJAbnNpbWUjF148Z5utcosII/VZ+IVWF2bx4sQ2wNwbdykNm7osR1ZU4bZy38DwR+id7T05NPhkRVTfyQo4ZN7A7Nt19FgDgsW8dg5G36pNInJVt6kgrLEzcS0jVE68tvf00VEZCWLG9AXe8ugLjBiYODIpnLnv08289JaFBE4OvXuOnixGvv0UyxCNKaeSlQT/VJRRUP48VkSKnco6EFAzoXoHHv2XNuxC/iqyt95Iir8T2iUN62O5PyIx2I88RVhXcef7hrvclZFG33uchfd2lSnGdL/dZ41lee6+AbuRlT95Zv1P1GlUlMbdM13J9DkcqAw9YdkI4TY2tMTNxoDwOJorWoaNrioGxA7phzIBuuO2c7CcbiEpywMgel24CjeXJWxVz9g+OwwVHDcbYAd1w9bHDkx2aeC7jbWmWB15d4u3dmDxc7zrL3VVnGYHUlc0y8vr/XQ2dcfyg7nj6O1NdjZywQ3ID269beUKYngijdC7A0pk42tDoK1IYebm+JRkiMQ035/ZILLeeomh8WyWPX5ZrRverQnlYwS1nHGa/hnER2eFINkXvljPGoCykoJ8RTlvhqCfVVRH8v++4h4eK+uPmyXuhIqyaureqME9jRAKVMVtytx/PGI3/5zGMVVxHhGnfdeHhKA/pA86iYRzVt8p8/uTJZ0lFRMXrNx4XyLnEi7LfxZN3Q1Qw8WMeMbi7qee+5rNMqvFWt9o0eW+V475LjsSqHY1mHHMyvLxAfvRDr3KN8ORTLSBe6vz50iOxdtdBdE3hJVZJzoKa5HewPGe7kXcaV8Ayos7oGkHvqjKsuuOMhONEo93i8OTdOPPwAeakL8Dq8YhxoqeuneoqOwGWJ79Nmn0b8mGoGbMMu1/dW1WZrWf5fccYTipEGUUjeMnRQ7BiWwNao3Hzmf3yvHH49iP6uARF1xQREUmuURWW1igq0o/NGPDoNcek3D8VzooDeJcIxDhE2mukqGxiEMpPhVRc5Bo3xDhGOo21lKksCyVEryTsE5E9+fTGTsg193z1iCT6faKx7pYmNYF+nP6/zZP3mG1DaPIaB44bVZ1UqgEsOaOxNYZelRFsvOtMz1FNn/5MT22RaQSLytyjazwdK97VaByRkALGdFvRGtXMBrU8rJrvhZffMlPIyPtElmu6RNS0FU5UsKa2GA4f1D1tbo9UyBVHEJSW54w0cUMMiiYLpXTDrMRpvK+KiIoP/udE3HXh4Sn36+zIck0yj1Z4v5xbS/GNHeg+uGjKNZKx9hJtpkr1emD3cpwzYSCuO2FkmqN0ZMekLI3cKN6X+pYoKsLp3zcZMXNYzdDIK0mia7wQknrdZYZTpGvyllxTEVbxn2sm46JJg9GtIneiSqeQa4LElGuaojavKhnmy9Aes0XdZILZRbZNxMpusE/woxmjE5KHOellaLVC3/eCV7kGAIb1zjyypbMgG+BkuU7kgU7hyScbKBSnkD15P/W6JRpHddcy/OXSI9MeI6iwGfnUhlfufaSTRgWH9utqLoUJWAELfh2ikMIyjvZSpSAJ0biUhxVoUsNbHlYxZkC3pCGcQUFG3icRKU6+S4pJKwLL44l71s+ToSh60qR0k6FyRU11Jd784fG2VZDS4RZdQ2SON09eR/bknfmbBKYnL2nPXjxe4VE3t8fNOQ5ekUMo09XfsKonVTvQHPVs5P/f9VOxVwpXztiTlwZep0lRZl4IOeQawHLIxBKPbmMkuYDkGp/IcfKePB5mefJBeN0hRbFFoORywMaNQ/t3TanbOxEDezlKsNfpkAdek2YtlAZedxqTu5LJhMzhyV93wkj065a+x2lGerXFfDsvskTjxfCKHrDX96dbedg230FIJ74HXhVmhl/feuYYX8cqUq9b3GOZaeT13lWq+QJBQkbeJ6KiNLfHPXkWwovgPBhpRVFg5s2R86QXK+L+/YSuEcnpEvGuyQN6TvhBPSqSatnOePdZM0Z50r2FvWxqj6fV1Z3IIaJeeqK9Hdld/SLqXiZyjcBvTzQkyVkR1S4X7TeNPHnyRYnseXhZZEI2bhUBtNwhxVpwwe9ErkIgXo5s8rwQFrKhS2t4OEfd/pakU+/1c+j/7znYjsqI6tlgy7+nX49UNm5ePPnexozrVPMHUpFpdI0zI6UfRLhzS3uiXFPf0g7G8ie1kibvE1ke8ePJA8G03F4nLhUbuQwR60yUS3UuqSdvbI5rHFv2NeOUw/q67gdYhmxXQ6stsd5Fkwa7ZuIUqCzzel0ekjX59McKqSlVTp9UZKrJy/eYqSdvG3gVkXnNUZSH/EUKZQMZeZ/IRt5PFAIQjJGXX+xcJjUKCs3MdljggpQI8mBdMglMbH11yXbsa2q3LaPoRJxib1M7RkgD6vc4VrByYq/XPmUQVTES23kzvGJyWKZyjRVd4+941SbX+LumnDE2rNo9+f3N0bzp8QDJNb6RvWc/0TWA3YPJFHG+sMpytlxYkAgPqNrDOrJEejwZeeOZL9i8H9VVZeZSj6n2BeArxFex1Wv/xlfUCy+9UZEaINNxHXGcX7lHvp5fr1teYlJOqAbovaYuHhzEoCBP3idyRfE0aUSqHH4SKyXD1Bc7gBcP6Hlqfn3BeJzqSB1LZEZFxLsnDwBDe1WkdAZkGSLdIuz246y/M+mhKgyIw5snL8a+nAvceEW8M5VZGHm/co3bsaInsrepHSen6F0FDRl5n8ihaF5aY3l2aCCavAgH60B6fEcYIO4oyKGBXgyPWIA+GbKx7p1irVQnNuclg7qoR/VwTxKKkDtiLqs0eUEYXL/eczZyjSyrikXE5eu7LSKeKzqOpSgSbJq8B7lGySIKwQ3xcnUkI08ER1hVzARiydJLyLZ/QPfUE5XkcMvuPqbWK1mONVlL96U/VhhMt2UPvSCek2+5JouBV7mBEIudyz2JET4mFGYLWYoscFsdyImszQWxCHWmkQJE6cAdS/c5kQ23SO+bDPkU6TKUymTrvAjlxbmIuxuizmcq14gOQDZyjd9AGPlYEfLcRXr//fSasoUsRRaMTrHsmECOr002tdwPotHoKJo8ETzmmrtpZrwC6edyyMY63UIYMnL1y2asycs4gHCm/KTTkBH5ZyqykmsyC6EEgPMm6ivPyYPmPSszT1ToF9Lks2Bgmq4w4PTks/9hLU8+P7PliOJDhKUmG3gNSzJO+lTY1t9+MqQqWcTJy3gx8lNG9MYz35nqKVW2G2IJRL8hmEENvP7p4okJ25yL5uQSMvIZcNW0GqzZ2ehr+jcQjFwT6oADr0SwiLztyYy8PJiZLq+MzZPP1MhnURe9Nix+Mp86EQvR+DXyPST5yv/Aq/VM3OyEvPpWriEjnwG3nzvO877yOpxByDVmHg6SazotPI0nb5+bkdqwyafw48nbZl5n4cnnY9Znm+nJ+3v/+lRZPXW/5VTTrLmQr7w1ABn5nCOP0AfiydPAa6dHS+PJy6Tz5GXjlbFck0Fd/OB/TsS+pvb0OwaAWN3J78CrLCVlE0JZaMjI5xi5RfeS0Cwd+Vj4lyhuTE3eg3eZLg49U21d1vIz8UqH9a7M2yIxQpP32+OwG/nMNflCQ0Y+x8gvYhCGWWjyHSk5GREsIpDQkyefpp5kaoqySVCWb4Rc4/edsRl5n0Y7mSf/q/PGZRwKmilk5HNMtUiTGtCLIDR+8uQ7L+k0eZl0BjjTFbtko1fsDkd7hkZelq8yTVDm5IqpNf5OFABk5HNMSFWw/JenB6Y/drTcNUTwpI2Tl0jryRun6OljIhRg9+TDRW7krUVD/Dla2SwaQnJNJ6OyLBSIHg9YMdDkyXdezPTNAQy8inNNGeFvDVPZ6IWLyKC58Y9vTMLzn2/FkF6p8/g4kVOYkJEn8obIYU9GvvPCfXjy6UIoh/WuxAOXT8Jxo/r4KoM88Opnzd9CUFNdiR+d5n+pTDk3kF9VK+R3KakcQka+gyF6BGTkiSA8eQA4LUW++WTInmqyRGkdnbCSuSdfRI485a7paIiFSmgyFOHFk8/V2I1drinNuig3Xn6NdjGtaVyav04JYw4eFVElIgqDF+8yV1KKfO1S9eTtRr7j3iMZ+Q6GGOSKxTNbQIEoHQo5q9Im1xSTNhEgYVv+mQIWJEvIyHcwRLhaLM8TKojiI1UEx2H906fBzgb50sUkTQSJPOaR6T36DU3NBTTw2sEQYV1iggfReUlleJ67fhoOtsVydu2OsIh8oXn+hmkY3MNf2GYuyMqTZ4xdxBhbzhjTGGO1ju9uYYytY4ytZoydnl0xCUFEFUuhkZHvrPzglFFp96ksC6VdFSobvOTN6ewcNbQn+ubwN/BKtp78MgAXArhf3sgYGwvgEgDjAAwE8A5jbDTnPJ7l9To9wpMnI995+dGM0fjRjNEFLUMxTfYhUpOVJ885X8k5X+3y1XkAnuKct3HONwJYB2ByNtcidCYM6QEAOPHQvoUtCNGpIUe+45ArTX4QgE+kz1uNbQkwxq4FcC0ADB06NEfFKR3GDOiGlb+a6XvleYIIEpJrOg5pjTxj7B0AblPifsY5fynbAnDOHwDwAADU1tZSyIgHyMAThYbkmo5DWiPPOT81g/PWARgifR5sbCMIogQo1bDJUiRXcfIvA7iEMVbGGBsOYBSAT3N0LYIgCCIJ2YZQXsAY2wpgKoDZjLE3AYBzvhzAMwBWAHgDwHcpsoYgCCL/ZDXwyjl/AcALSb77NYBfZ3N+giAIIjsorQFBEEQJQ0aeIAiihCEjTxAEUcKQkScIgihhyMgTBEGUMGTkCYIgShgy8gRBECUMLRpCEASRhAevqMXmfc2FLkZWkJEnCCIjfvfVI1DTu7LQxcgpp47tV+giZA0ZeYIgMuJrtUPS70QUHNLkCYIgShgy8gRBECUMGXmCIIgShow8QRBECUNGniAIooQhI08QBFHCkJEnCIIoYcjIEwRBlDCMc17oMpgwxhoBrC50OVJQDWBPoQuRhGIuG1Dc5aOyZU4xl6+YywYEW75hnPM+bl8U24zX1Zzz2kIXIhmMsQXFWr5iLhtQ3OWjsmVOMZevmMsG5K98JNcQBEGUMGTkCYIgSphiM/IPFLoAaSjm8hVz2YDiLh+VLXOKuXzFXDYgT+UrqoFXgiAIIliKzZMnCIIgAoSMPEEQRAmTdyPPGBvCGHufMbaCMbacMXajyz4nMsbqGWOLjH+35bmMmxhjS41rL3D5njHG/swYW8cYW8IYOypP5TpUeiaLGGMNjLEfOvbJ67NjjD3MGNvFGFsmbevFGHubMbbW+L9nkmOvNPZZyxi7Mk9lu4cxtsr43V5gjPVIcmzKOpCjst3OGKuTfrszkxw7kzG22qh/NwddthTle1oq2ybG2KIkx+b62bnakGKodynKVrh6xznP6z8AAwAcZfzdFcAaAGMd+5wI4NV8l026/iYA1Sm+PxPA6wAYgCkA5hegjCqAHdAnQRTs2QE4HsBRAJZJ234H4Gbj75sB/NbluF4ANhj/9zT+7pmHsp0GIGT8/Vu3snmpAzkq2+0AfuLhd18PYASACIDFzvcnV+VzfP8HALcV6Nm52pBiqHcpylawepd3T55zvp1z/rnxdyOAlQAG5bscWXIegP9wnU8A9GCMDchzGU4BsJ5zvjnP17XBOZ8LYJ9j83kAHjH+fgTA+S6Hng7gbc75Ps75fgBvA5iZ67Jxzt/inMeMj58AGBzkNb2S5Ll5YTKAdZzzDZzzdgBPQX/egZKqfIwxBuBrAJ4M+rpeSGFDCl7vkpWtkPWuoJo8Y6wGwJEA5rt8PZUxtpgx9jpjbFx+SwYO4C3G2ELG2LUu3w8C8KX0eSvy31BdguQvWSGfHQD045xvN/7eAcBtNeRieIZXQ++RuZGuDuSK7xld+oeTyA3F8NyOA7CTc742yfd5e3YOG1JU9S6FfctrvStYWgPGWBWA5wD8kHPe4Pj6c+gyxEFDl3wRwKg8Fu9YznkdY6wvgLcZY6sMz6YoYIxFAJwL4BaXrwv97GxwzjljrOjidBljPwMQA/B4kl0KUQf+DuAO6C/6HdAlkatzfM1MuBSpvfi8PDunDdE7GDqFrnfJ7Fsh6l1BPHnGWBj6A3icc/6883vOeQPn/KDx92sAwoyx6nyVj3NeZ/y/C8AL0LvIMnUA5KXqBxvb8sUZAD7nnO90flHoZ2ewU8hXxv+7XPYp2DNkjF0F4GwAX+eGEOrEQx0IHM75Ts55nHOuAfhnkmsWtO4xxkIALgTwdLJ98vHsktiQoqh3yexboepdIaJrGICHAKzknP8xyT79jf3AGJsMvZx781S+SsZYV/E39AGTZY7dXgZwBdOZAqBe6ibmg6SeVCGfncTLAETUwpUAXnLZ500ApzHGehqyxGnGtpzCGJsJ4CYA53LOm5Ps46UO5KJs8rjOBUmu+RmAUYyx4UaP7hLozztfnApgFed8q9uX+Xh2KWxIwetdsrIVtN4FOYrr5R+AY6F3R5cAWGT8OxPAdQCuM/b5HoDl0CMHPgEwLY/lG2Fcd7FRhp8Z2+XyMQB/gx7lsBRAbR7LVwndaHeXthXs2UFvbLYDiELXN68B0BvAuwDWAngHQC9j31oAD0rHXg1gnfHvm3kq2zromqyoe/8w9h0I4LVUdSAPZXvUqE9LoBusAc6yGZ/PhB61sT4XZUtWPmP7v0Vdk/bN97NLZkMKXu9SlK1g9Y7SGhAEQZQwNOOVIAiihCEjTxAEUcKQkScIgihhyMgTBEGUMGTkCYIgShgy8gRBECUMGXmCIIgS5v8DDR3eHuy/3BMAAAAASUVORK5CYII=\n",
|
| 296 |
+
"text/plain": [
|
| 297 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 298 |
+
]
|
| 299 |
+
},
|
| 300 |
+
"metadata": {
|
| 301 |
+
"needs_background": "light"
|
| 302 |
+
},
|
| 303 |
+
"output_type": "display_data"
|
| 304 |
+
}
|
| 305 |
+
],
|
| 306 |
+
"source": [
|
| 307 |
+
"plt.plot(t_vec, eye_ratio_v)\n",
|
| 308 |
+
"plt.plot(t_vec, blink)"
|
| 309 |
+
]
|
| 310 |
+
}
|
| 311 |
+
],
|
| 312 |
+
"metadata": {
|
| 313 |
+
"kernelspec": {
|
| 314 |
+
"display_name": "venv",
|
| 315 |
+
"language": "python",
|
| 316 |
+
"name": "venv"
|
| 317 |
+
},
|
| 318 |
+
"language_info": {
|
| 319 |
+
"codemirror_mode": {
|
| 320 |
+
"name": "ipython",
|
| 321 |
+
"version": 3
|
| 322 |
+
},
|
| 323 |
+
"file_extension": ".py",
|
| 324 |
+
"mimetype": "text/x-python",
|
| 325 |
+
"name": "python",
|
| 326 |
+
"nbconvert_exporter": "python",
|
| 327 |
+
"pygments_lexer": "ipython3",
|
| 328 |
+
"version": "3.9.6"
|
| 329 |
+
}
|
| 330 |
+
},
|
| 331 |
+
"nbformat": 4,
|
| 332 |
+
"nbformat_minor": 5
|
| 333 |
+
}
|
codes/jupyter_notebook/fixations_in_AOIs.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
codes/jupyter_notebook/rtn_2mdl_et.ipynb
ADDED
|
@@ -0,0 +1,791 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {
|
| 6 |
+
"id": "s4mSPpqO8oEz"
|
| 7 |
+
},
|
| 8 |
+
"source": [
|
| 9 |
+
"## Retraining 'eye_tracking' model for subject and predicting eye track (pixel coordinate)."
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "code",
|
| 14 |
+
"execution_count": 1,
|
| 15 |
+
"metadata": {
|
| 16 |
+
"id": "Ks_AbdKIb-xZ"
|
| 17 |
+
},
|
| 18 |
+
"outputs": [],
|
| 19 |
+
"source": [
|
| 20 |
+
"from tensorflow.keras.callbacks import EarlyStopping\n",
|
| 21 |
+
"from tensorflow.keras.models import load_model\n",
|
| 22 |
+
"import numpy as np\n",
|
| 23 |
+
"import pickle\n",
|
| 24 |
+
"from sklearn.utils import shuffle\n",
|
| 25 |
+
"from joblib import load as j_load\n",
|
| 26 |
+
"from joblib import dump as j_dump\n",
|
| 27 |
+
"import time\n",
|
| 28 |
+
"import os"
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"cell_type": "code",
|
| 33 |
+
"execution_count": 2,
|
| 34 |
+
"metadata": {
|
| 35 |
+
"id": "T5XUNQBvcGf6"
|
| 36 |
+
},
|
| 37 |
+
"outputs": [],
|
| 38 |
+
"source": [
|
| 39 |
+
"# Parameters\n",
|
| 40 |
+
"path2root = \"../../\"\n",
|
| 41 |
+
"models_fol = \"models/\"\n",
|
| 42 |
+
"models_et_fol = \"et/\"\n",
|
| 43 |
+
"trained_fol = \"trained/\"\n",
|
| 44 |
+
"subjects_dir = \"subjects/\"\n",
|
| 45 |
+
"data_et_fol = \"data-et-clb/\"\n",
|
| 46 |
+
"sbj_scalers_boi_fol = \"scalers-boi.bin\"\n",
|
| 47 |
+
"sbj_model_boi_fol = \"model-boi\"\n",
|
| 48 |
+
"sbj_num = 24\n",
|
| 49 |
+
"sbj_num_test = 25\n",
|
| 50 |
+
"model_num = 5\n",
|
| 51 |
+
"r_train = 0.85\n",
|
| 52 |
+
"n_epochs = 40\n",
|
| 53 |
+
"patience = 5\n",
|
| 54 |
+
"trainable_layers = 2\n",
|
| 55 |
+
"chosen_inputs = [0, 1, 2, 6, 7, 8, 9]\n",
|
| 56 |
+
"y_scale = 1000.0"
|
| 57 |
+
]
|
| 58 |
+
},
|
| 59 |
+
{
|
| 60 |
+
"cell_type": "code",
|
| 61 |
+
"execution_count": 3,
|
| 62 |
+
"metadata": {},
|
| 63 |
+
"outputs": [],
|
| 64 |
+
"source": [
|
| 65 |
+
"sbj_dir = path2root + subjects_dir + f\"{sbj_num}/\"\n",
|
| 66 |
+
"trained_dir = path2root + models_fol + models_et_fol + trained_fol"
|
| 67 |
+
]
|
| 68 |
+
},
|
| 69 |
+
{
|
| 70 |
+
"cell_type": "markdown",
|
| 71 |
+
"metadata": {
|
| 72 |
+
"id": "wGy7E9iA8oFA"
|
| 73 |
+
},
|
| 74 |
+
"source": [
|
| 75 |
+
"### Retraining 'eye_tracking' model with subject calibration data"
|
| 76 |
+
]
|
| 77 |
+
},
|
| 78 |
+
{
|
| 79 |
+
"cell_type": "code",
|
| 80 |
+
"execution_count": 4,
|
| 81 |
+
"metadata": {
|
| 82 |
+
"colab": {
|
| 83 |
+
"base_uri": "https://localhost:8080/"
|
| 84 |
+
},
|
| 85 |
+
"id": "bvzEN-j4cGFQ",
|
| 86 |
+
"outputId": "8ae84f7e-2cca-4f92-d03d-43e5e5f431a4"
|
| 87 |
+
},
|
| 88 |
+
"outputs": [
|
| 89 |
+
{
|
| 90 |
+
"name": "stdout",
|
| 91 |
+
"output_type": "stream",
|
| 92 |
+
"text": [
|
| 93 |
+
"\n",
|
| 94 |
+
"Loading subject data in ../../subjects/24/data-et-clb/\n",
|
| 95 |
+
"Samples number: 4200\n"
|
| 96 |
+
]
|
| 97 |
+
}
|
| 98 |
+
],
|
| 99 |
+
"source": [
|
| 100 |
+
"data_et_dir = sbj_dir + data_et_fol\n",
|
| 101 |
+
"print(f\"\\nLoading subject data in {data_et_dir}\")\n",
|
| 102 |
+
"with open(data_et_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 103 |
+
" x1_load = pickle.load(f)\n",
|
| 104 |
+
"with open(data_et_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 105 |
+
" x2_load = pickle.load(f)\n",
|
| 106 |
+
"with open(data_et_dir + \"y.pickle\", \"rb\") as f:\n",
|
| 107 |
+
" y_load = pickle.load(f)\n",
|
| 108 |
+
"n_smp, frame_h, frame_w = x1_load.shape[:-1]\n",
|
| 109 |
+
"print(f\"Samples number: {n_smp}\")"
|
| 110 |
+
]
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"cell_type": "code",
|
| 114 |
+
"execution_count": 5,
|
| 115 |
+
"metadata": {
|
| 116 |
+
"id": "ygdBBcjud--4"
|
| 117 |
+
},
|
| 118 |
+
"outputs": [],
|
| 119 |
+
"source": [
|
| 120 |
+
"import matplotlib.pyplot as plt"
|
| 121 |
+
]
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"cell_type": "code",
|
| 125 |
+
"execution_count": 6,
|
| 126 |
+
"metadata": {
|
| 127 |
+
"colab": {
|
| 128 |
+
"base_uri": "https://localhost:8080/",
|
| 129 |
+
"height": 336
|
| 130 |
+
},
|
| 131 |
+
"id": "x4GOLYKnAWR-",
|
| 132 |
+
"outputId": "2352588f-265d-49a7-d627-2ea272a4c1f2"
|
| 133 |
+
},
|
| 134 |
+
"outputs": [
|
| 135 |
+
{
|
| 136 |
+
"name": "stdout",
|
| 137 |
+
"output_type": "stream",
|
| 138 |
+
"text": [
|
| 139 |
+
"[ 2.92193524 0.12354425 -0.86240532 -1.57755936 5.01365275 70.57027415\n",
|
| 140 |
+
" 0.51105064 0.5061323 0.46361178 0.46345159]\n",
|
| 141 |
+
"[0.00530988 0.016 ]\n"
|
| 142 |
+
]
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"data": {
|
| 146 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD7CAYAAACscuKmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAABP0UlEQVR4nO29baht23ke9oz9dc69V7IkS0WokqhviUkxhtTh4jq4FGMl1HVD1B/GOAlBSVXunzZR0pRYSn+khhRsCHH0o6SIuMEUUzlWTGXUkNRV7R/9o1qKTSNLUazain2FZKkgO7q+55z9Nftj73fdZz37ed8x5lp777WOz3xgs/acc8wx3vHxfo6P2aZpwoIFC/7w42DXBCxYsOB+sDD7ggXPCBZmX7DgGcHC7AsWPCNYmH3BgmcEC7MvWPCMYCtmb639QGvtC621L7bWPnhbRC1YsOD20TadZ2+tHQL4VwD+FIBXAPwKgD87TdPnbo+8BQsW3BaOtnj3uwF8cZqm3wSA1tpHAbwXQMrsb3zjG6e3ve1tq+sQNCxwpmkaun95ebl2DwAuLi7Wyru8vFy7zvLWNO7/6zqmaUeh71TX7lmvTPdc6e7d72HkPU0zSsMoTVm6qH/veYWRsTHn3bntvGmbAMDp6SnOzs7sC9sw+zsB/A5dvwLgP9BErbWXAbwMAG9961vxYz/2Y6tn5+fnuLi4WA3ii4sLnJ2drf6/vLxcPT8/P8fl5eXqncePH+PRo0dr77766qsrBp+mCWdnZzg/P18r7/T0FADW0ul1CBMVFj1mDwHUS6PlxDW3RZTPNET7cN5aj95ga62t/g4ODm7U6eDgpmeXDT795Ty0jEhzdHRk0wYODw8tDVn+DBaGGW3n5+elQAVyJcH33T33btDbqw+ncdeuLnHNaT/72c/m5ZRU3AKmafrINE0vTdP00hvf+Ma7Lm7BggUJttHsXwbwbrp+1/W9rdBa28g8jnfd/9W9beEkeS+N0+pz6pxZD067R3tWWlDTVvc4P/6NZ5x/aw2Xl5drmknzUnovLy/LfuJ3tLxo04ODg43H0Eh/3gW4PtEGmSWp97mNK2zD7L8C4Ntbay/iisl/BMCf673kTGMm/uLiAgcHB6sKhB+e+elVXg6Rtw4Uvj/id4XZzXECNUmnaVq5DZnZzmndc26vcGv4HXUdKjfCmfNax9vwx52Zz2Z9dg+4asPDw8O1vNSM5fLimY6ruD/CBD1EeUFXtDvTEeWroHGmvsIxccbwzoWJsdvDxsw+TdN5a+2/AvDPABwC+J+mafr1zjtl0IsrwczHTO78YpdfSMoqMMSNrAyvtGneAG4wI4C1gRq0RjqmnYWY/nIcQpndDaRRZs/aoboewaiAcMx+eHhY+uwuDZD7sUoTC+AofxMB4KxGHissdFTgb2ItxDhygo2vgz+4vAzbaHZM0/RPAPyTbfJYsGDB/WBZQbdgwTOCrTT7XLAZlwV1Dg8PV357PO+ZapmPnfmkmX/em8bSaTr1oS8uLtam+qZpwpMnTwBgdd+Z6+5+5D9ilo8ENUfnjkcDm+oC9d5xwbzMhWCzVP15fs/5rnqfp/puw38P+oIWXduh/rMrs2fWZ3TGe1G/uGbXscK9MjuwXhH14YPZDw4OcHZ2tmJMF01m6ACIvLS8i4uLNB9ldL6ntMb98/PzNeZ2c+Mxr5sxtvt15bvnI1F3TZfVL/JwDOiYOQv8Vffcc+0T9dk54BUMdnx8vMqD/fn4P/zyg4MDHB8fr+Wv8+wOPT87aNJ+c0G1EUHo0BunQYcKnDvz2W8bm0QY9w0uWLIJo28ydTSi4av8qym028acfHWqz4Gj7y6YxumqceWUhMIJSQ7w7isWn33BgmcE9+6zV352pOFfNuPZrB8paxTOfHamr1samU17qOnO9zhtNW2W3eulqfx61ZBqjt/GVFxGg+apbaTTSEoX085z3my681g5PDxcc7Miva6NUJrmaOc51tQcqBXjrC6OHYxgZz67C4a11lYBlaOjo1WamG/mTo9O1DlUzdcFimINOqfj5/HLgbi4p8EznQd3a995UOu11kFpqXzj8Ne0DTW9zgW7erv8s/apzOlKeCmTAzc3L6nA4aBuXB8dHa3usxLg8RF/R0dHZbD36OjIBvFG/Wodz/qeixNpnaNNqns6LjnQPSqcduaz8+o4hkZaDw8PV/5Q/PWWVHIZVZDLNbAyuNPcLpiXlRHX2Z/CMQTn4xhW61Rp50ywuLTZAOyl6b2bCdqMftffytS6Yi4L7jLzA1cBOxYUvBIu8qmggtSt5NMxpPEAtxAngxuXHI2v3l189gULnhHsVTResY0vdBcRZGD+Rom78OcU2/iN6iZU+cwp4zZ92bvqyz8MmLN24N6ZvVrHy/6H+rRhfrHPHiZ+FqhwG2fU13br7t11NofuNqYw/ZXZ3vPRFSPz2dW0kwsY9vLL3ImMFmbyylWo8nB5ZnBBR36WbShhuP0N1ZqMCm68MVyeMY4raGyI3RaOOVQ03yuz9wazC4oBr1eI32fmrhbeuKCaK4eDgfHLzB2r5fRdt/CGfVIWGnEP8Ezl0IuOOx/c+X/ut8fsvb7Kyh6hpRJKozMCFaPHcy0zEwqMKlrfs1hcHEoxIuTc86gPK70IMI7s8lt89gULnhEszL5gwTOCnQXoMpNXr8MsYp857o2Ywtn0FCPbAMP3o1w14TVfV4dtoAG0kQBhz+/PzN547v7fJG1mWqsprHXswR2AwXPt/MzdczRl9cs2tWRtuMlmm5HDLdgdDJ9d71V1AXYcoFNfVoNjzGBx4OTZ2dnqHleYfZqA+uecb+a7Rzr+P3azPXnyZC2dEzZuU0IvwDT6rBJmPbjAmQ7MucGxkTKZTm7veLbJWvJg4Nj4oszMf3oIhDsIY2QXmmP8YPie0HPz6txvbqGZUzj8bgQQNZC4V8xeQZk97vGzOXmN3JuTZjQqXSHSb6vxN8ljbjS/ujenzCw4tk0baHDQBQuztPcNV+cRq7S365HvD1lE3RQLFiz4Q4G90Ow9Ce/87pG8ttEc2bTUiGl9X9h24cq2mq5XNtNX+cXqs/fWDvAfP6/M+bvCXeQdbcJLbXuuwsg42Cmzu40jGuCKNLGAJf7cAhg97dXNg6v/nplPjh4dONnuuE1QzSVzmhF/U+noMc8IHVUZGUOP+K+aRn1qx8jhQ+tCKnfyqm5y2ZT5dRzw75x97BxHCITfrQd1MCL/w8NDu/tyBDvX7Fk0np8zY7p07n7VAMz0PWQroG5Lo/eYvGJG56u5aHeVz1xmzyybXowkK08Z1n0RxjGX/q9BR5d2NDCpdeEAsIIFT6TtwQk4DjS7NCrUVCGOYPHZFyx4RrAw+4KnBruMj/xhwM7NeCA34dU843nN+It7sdc9/KGDg4P0cEE1u6opjRHa+f/R6ZIMbopKn4/mM/JOL+A4YrZnZrCmcQtORsrTfKKPdXOUfgDCfRDCzbOP0qTz7FmQcNMgLtPaM803Cc7ulNkzHxxY/yKMO4UkwPd0E7/61xzhZL9n06h2b2BWqAbcnGcj87UZlClHBqmuf8j6hftV+2EuzW4h0+Hh4eqP+z8Ccs5nj/tzmMqhilO4gzbm+PEuEs/PXYR+dNXeU2vG3+V0yoIFI3jaxuBTy+wLFiyYh51uhHH3RuaN457z5/h/Z/KGGcfmFvtk/P/Ikt27moJzU2gj02QjCy5cbGSkPODmV0ozM17zyNKM0MvuWZxtEPGZ8Ms1/2xOXX327LRivuemX7N+38XZ8Xs5z67MzJtS3K6dat6d/3jBATNwdArnwYE+twBHd7NF/tkuN0Vv8FaDXX3ekXereeuof7ZGIQuKVte9uIQLWvH/+rxX3uHh4eqrLtG/x8fHaweQss/OAVpgfVGNy98hW7Ph7rk+Y+EUnzLL4hWbYFOBshfR+E3hNPucxuTjeOcEqar7rvN7AzrTiC4qP6LNXYRY6dH/R4OCvbQ9GipBUOWlmlsFhtPk2ZnqI1970XycJejorALNGQ33ZQksPvuCBc8I9l6zj/rE2RRehU3WGPfy7/l/zi9WbZm5CpkGrPKf2yYj6Jn+mrayHnquidPmvfPWKnp601ShiV0MYyT/24Qz17OpuRHcO7M7MzkLymW+ZbzDn0HunSTjfH1OF/47+7iuPDbznD9bffgi0mSMrfe03jGPzObk2dlZ6sqMBL+ydA4jcQRXTtTZ+etcH+DmRxzCZ4//1WTnhTM6785+PvvT1ZdxnHDkuJL65b1FWSMMmW16iWd83fs8cyXMdr6oZtM0yhAcROP7mgen1w7sdY7m6VZozdGiVUAuC04yw3C9Oc9N/ONRwZDFNKr3M+3NjBtwzO6YHFj3yTlNlJFF6XXxVXX6UNxjTX95eWk3r7j3XJv1wNZFXHOeVVyitGpmUbFgwYKnFl1mb629u7X2S621z7XWfr219oHr+9/aWvvF1tpvXP++5e7JXbBgwaYYMePPAfz1aZr+eWvtjQA+01r7RQB/EcAnp2n68dbaBwF8EMCPVhlN07T2CV13uAR/iIH96/C1Yl17vM/+Ovvs6pur6Z7Rp8iCRfosQ2XCVvk4F4TnkeO94+NjW5/KhHT+9Mg8cPjYnM6Z1+5/9a+BsY0q6oe7uqhLoB9RYGiMIMrQPF27xXw5/7m4Df8C/QNGXIzgLtBl9mmavgLgK9f/f7O19nkA7wTwXgDfd53spwH8MgaY/fT0NH3OQTA+LpoDLSwg+GSaOAGWA2QcdOMy4l7lHzN0YFdwEVw38IHaF3PgRSRcr8yPdkFJV+csUq70A1fMwswXPnAWOMsWvVQCNws4Rl5aX47OBzMHDbEgR9MyMh+eg3Gu7S4vL1dfKnLtrW0XOD8/v1GPrD2csuC6jIybVfqhVK8X9G0AvgvApwC8/VoQAMBXAbw9eefl1tqnW2uffvXVV7tlVBKvSvs0w1kP7k/Tunc4zZyBMJfeKu/qrHZm3LlnrM+pyzADbHDOe1bWXbT1bWK4pq21NwD4xwD+6jRN/4afTVdcZzlvmqaPTNP00jRNL73hDW/YitgFCxZsjqGpt9baMa4Y/Wemafr569u/21p7xzRNX2mtvQPA10byUh+dtTP73WyeX9MAADg7O8Pp6enqYxFsUrGZzfPt8VtZAs58y9IFnEk5B+wvOx+78uX5/8qMV2h5+kyvnX8bZm+Y7zFdFn9hSscvm9m9s9o0jqNw58fx3vbw0fUwE36/0uZR32pKjfPKFuBU7awY3fy1LbrM3q6o/SkAn5+m6e/So18A8D4AP379+/FeXpeXl3j8+PHqOvxzZtjw6YORIw378hcXFzg9PV3l5UzDaZrw2muv4ezsrBvA4nwZuqDBzWlr/Zy/XnV4NlD4GadVGnuM7gbcqH8O3JzHffjwIR4+fLjG2CcnJ6v/Dw8P8fDhQxwdHeHhw4c4Pj5e0eI2EylOT09vLJY6Oztbq5POqbOwiQU4GhjM6qMBW113wXRnG4p0PT1fV4KL+515IOjkejD02W1+svl7AfwFAP+itfZr1/f+Jq6Y/B+11t4P4F8D+OGBvG40rGrnaFD+7JM7RtqtZnMN04tsasfNwajkDvTSOk3fSz8HI3n2tDsH5FibBqMdHh7i5OTkBrPrSsWsPir89Nhw4PWAGlsOMehVk7strXytGjxboqp+Obd9dqrMCJSWuZgT+xiJxv9fADLq3zODrgULFuwQ975cViVjZmKyxOZtglXUeY65ymk2jVpvIsHnmNCbovLZbyv/0L4cL2ENnu05qDR7pNNprk0xspR0zpi5rYMpsnychVCZ8HNxr8zeWsODBw9W1zwvHh3Mn2gOn11NeDbv1d/ngFyUxSYhuw0XFxdriyx4zlT9sOiIKsDofGr9iqj6i1mn9YJzkcb57Gr2OtNTy3BCz/mD5+fneO211wBgzW/ngFgcLnFycrL6gkn0ZfjfIcR5PLh6ZoOdFQEH6CI/7tMK0zStpYlFW9x+ul6DF3bx+ABw40TjiCEwOC4Qc/UaUFZkPrxu7NnWZ781uMGjB0jE87iOwRE+OmuMeOYCbK21lb8YDRsdEQ0aA1H9JtVUwbAabFJmdx3FPqMy+iaoGJa1Ig+ozHJxwkYHjmpH185cRxeNjzY9OzvDkydP1tI9//zz9hNNAd7pl2k+FjK8qEaDc1kbZjMcvHiLrUumg8dv0MvtE/XMyuYFO6poHNx46u2EW9EylGrBggVPPRZmX7DgGcG9m/Hsw4T5pXON/H+Y2mwe6TRd+O9PnjxZ80+Pjo5u+NDxxUz+cqYLGob/HSZb0MHmW6xxrpB9gaQy4zMf200R6nw6m69ZENTdd1OQ6o8CN9cSRFvG/601nJ6e3pgSivaMdte2DDx48OCGnxv5XlxcrPpB97IfHR3h+Ph4zWdnMz7rJzbXo/664YX7ntcAcIxG5+s5fxcEVHeB+5z7LhsDWnfOO8O9MrsGZNTvdZHYaNwIzqm/Ev7648eP8ejRozUfShs/GFQ3VHC+IVQ4UMg+mvpVXIaLsroAkevILGqs/qTmryu4+GQW54tG/u5ZtmhIffbRiLSbNVEfOoJ98fxNb3qTFULx/PLyEicnJ2txl1jY8/DhQ7zwwgvd8l0MQscg15GDwRz30fUeEWNSIcfQvucxnzE458+7RiM4d3JycqPODnthxmfTHyqZNWLMnaedyHn1ptZ0EFZTNI7WXv4B1TKuPpvk7wa1i6xnAqVCJjBG33XWiHvumMyhtyLNtan+Zu3Dvy5Qye/z/9XYGS2vwsiimZExuBfMvmDBgrvHzhbV8P/q1zhNoNIx8zErLZnRMYf2KC98Up2GclKerQ6ewsmQbRbR/HmKUE1zR3M2xaTPGJWbtUkbjlgHPB40vds+G+Z9nGnAdLk98NV0nJv+GrVoRrRr9KFzSbkv3Xvcz5GPLhHeG58duNnQma+kTKVpomMjSBJBtPBfpmlazelyZ4VPHkGQrOGz55oma1yOASijK036TqC3iIfXHXAeyqC6KKRidKXBbRAaxYip6to1+kfTxQCPfo41+FHnx48fr5g90p6cnJTui9sYw4u1dFxywDYWZem45fx01WBrr+8pmBO7UWXH+wF4YVhl8u/0dNkRzaAVdoM/GitrQEW1XLE3sJ1PlpURz6Nj4i/zTZ3W1nuangWSo0e1fk9L6cAfEUpz4gm9vAK8Sk3BcRpd1BIBrOz0V1dedixVFkeI9ozxUlmUmZbOrrNYQ9Q7/qKcOdH4xWdfsOAZwcLsd4RsznXU/xvdtrgp5kbW3bvZ9JGbRhqJyFdwGqt6JyvXXTstPqevRq2mbbDNBpjAvS+qcWampgHWg1TaGdlgcsEknkNl/ynyqYJgvNPOmffqDrC/F+C12lwvZ7qp6armJS/qiPd4vpnjF67NNF4wwmCZ0KqgAbwwO3nTRtDG7eXiHPxOtEeYznEwSebPHhwc3Jj3dnEWdV10LQG3J8dJpun1vRtcJ24fN8aqaeL4VTeF25Dru7eLaoB1jZX5x45gZVLH8ArulLiO9JXkdpHzaHgNhjFDnp2drT7HFL5yBHM4T+7ArL7c4dwGOgiz+EOmbXUgap114Ct6AsMJ7/BvXTCMhbFGmjMG5o1QHDhz6bUN3eef3EIlrSuPpSg/hG+msOJdbWc+TUffYSbWdojgJLdjBOhc2YrFjF+w4BnBwuwLFjwj2ItPNquZk00XsRmVzU0ydLorfp1Jm02XsCkf5pXSw2n5zDU3VQLcPNBCnzOyaZ+svupXZu2r0DbXZ9o23Ka99M6kjnd1XTcvEon2dnu4eVot3CV2i9j9Yncx3CqmVdvYjbuqPXtTj730zoXS9lKznn+BsYDuzpnd+X7O59FTbTSdgzInb2LgQFek5YEUAyL+dwOAf4Gb/hPDra2O+xWjO2ZXhmO/PfxJ3mHm1uS7dlfmdXENpdX9r3lqgI7peO6559bef/jw4Y0AnQal2G+OnXCx5oD7CvBxIW1rrWO1lkPHKZ9u5PLP2s8xdkDXEbAPH6cCcT68qGavAnSMTaYqekG16l6lFUfBefSETaBidM0nC7htg0wDb4pMM+k1l+vSuIHPaXvlbIrRMdALPgZ6mn0uKmuhaq8eFp99wYJnBDtfLqv+9ByJBtzUHpmGn2P6a57Z1NDImvFsem1kkcSmWiF+VUOqxt3UysliLHNo0mcuLb9zm5bJiOXEZnvVTiPjao6Pz1Drx7XNnMU2O9/1pj6WNjKfvBl+Gf+xwIhTTeP94+Pj1bx33Ds5OVnz7bQzw8+N8oD1deqcl+s4nYcf9c+5PA4K6smnjKCV/fY4eDH+zwJujkFHzEM3V6+CI+qogTldY6AbVdj/5EGsH4PgNuzNmffgmDkbj/rMvaP9G/nz4idd5JPtk+CTeqPucc31H2X6nWp2t6NM4TpZG5AHVMWMnEbfVwnNZSnTx3ON7LKg0Hpynno/axuFMijnq7MC/I775boydKBqmjnWQKaZGCwAXL00+ryNz+ro7zE7X7u6syLK4LbLKsM6hnf15/aKtlhOl12wYMEaFmZfMIxtZzIYt+mD3yZdTws2ab97N+N7CxXUX4q109UBAJeXVx8gePz48ZrJrWY9gLV7BwcHa6eFMpzboHWIdAHnI48EZDTw4tornqu5GIcoqBvD72cm/AiTbJKGP/yoc8X6SeWYNw6a+DrSZQE+5670fPaR6bSRe9l6ejWpNe7i6qj9Hr56+OhsxusXbNUN2iufnRtJP8fkwM/Df46VahFsi5NNomGAm0JDA2fBGBw3YCHAm0xcng6OsbUjXB7xjh47rYtq3EA6Pz9fBTHdgRdcdzer0EOvzplwC0Z2C2QiXTA/18tt7HC+ugakRoOOvej6SB3nBAG1PzKGZYHGY1mDkxzg4/sjWMx4wW2alwsW7BMWZl+w4BnBvR9e8fDhw9cLJ/PTgc13PWMtTEL2zdl/ifT6ldgKm5h4Lg8G181Nz/QsCX5HD7dQVyPaIytf/feRumocoDdNyj6pruPWde66yYX9+wCnd22iuIs+nmO2a/7qmrl5cTXHeQ+B2/BSTb3tjc9+dHSEt73tbatr3TUG3AzUKfHxDn/COfxW/mxQpOOTQhkj88fRyKOdzSfh8Dx/dJh+cqo3gPWe80cjrxCIIUCjzhzgjLbgfKrB4fxBpdvNpcfCD/XZnQ//4MGDtXq6TxBzmdomPcYdWeW4iYDPxicfxhHPeWefLi6K9lIGV7/ebYSJ/EZdz3vX7L2AVcbs8cuWAAekRqLet73BJIMu7NH/FZWmGkG0I2vhqi0qWtx9x9x67TQUv1tt6x3JP97ZB1TCIXs22q7V4iFNW+XtMNx6rbXD1tqvttY+cX39YmvtU621L7bWfra1dtLLY8GCBbvDHM3+AQCfB/At19c/AeAnp2n6aGvtfwTwfgB/v8ogTJaATokxKjNezX/nU95FVN1ZIlW6TaTvXHrc1KGmYa1RLU+eSyPnC9w8903vq/k6imjLu7DM5vrwm8QEnCs053rUcu1hiNlba+8C8J8C+O8B/NftqtTvB/DnrpP8NID/Dh1mPzk5wYsvvri61krw4hk9SDB+Hz16hEePHuEb3/gGvvrVr64xv24aGWkcFg4xqJygcULELaKJwZyZyr2Oq4JhTYIxHJTkNQLRDurHh6Dl4GXFQG7g6jx4zJWzqR3XvKgm/FAW9urWuTo7hXCbzKlw7cEnJAXYhw5BFIu7tA+fe+65G+6L+t3RPioYe+Nljkk/qtn/HoC/AeCN19dvBfB70zTFcS+vAHhnQszLAF4GgHe/+91405vetCJKI60cUOIVSZz2m9/8Jg4PD/EHf/AHa9F64OZqolEf7+DgYG0HmZOsGVNw2pHy5kjokUUivIpQr7ksjnNwm3F+Ln6iAS4OHkV5buGHY3YNoroytX3cbM1dxl6ccOGgMKeLOsY78RurMlt7PSibxTniWn11vb6NuEX3zdbanwbwtWmaPrNJAdM0fWSappemaXqJI/ELFiy4X4xo9u8F8Gdaaz8I4CGufPYPA3hza+3oWru/C8CX5xY+umRzwYJ9w9M4brvMPk3ThwB8CABaa98H4L+ZpunPt9Z+DsAPAfgogPcB+Hgvr7OzM7zyyiura52mcWZ8zNnGXORrr72Gx48fr8z4MPMuLi5wenq6+kJHtVgnygoTshck5PjBNv4jv6OmauTPGDHZeGFR5O38S/bZwxx1m4B668B1rlwDcUo391FmImudlZE2/YrspsiCdhpc1PEQPjzXXw+bAF6fhlT/XM12Z+5vg23m2X8UwEdba38bwK8C+KneC0+ePMGXvvSl9HkwbQyM1l4/bfTBgwc4OjrC6ekpLi4u8OjRo7Wdc+fn53jy5AlOT08BvC441Keey6xZAA4YO7m0ynfbzouBoIFJZXhddcf/V8yXxRd40M+JTkcAKysv8u61yybtlglOJ+h1rYcLlPHpw0wTb8ZqbX3XWtzT/Dgw5/z1TeusmMXs0zT9MoBfvv7/NwF899wCewsSHDNyAKSXx0g5zzJY4MXAUgZ37zzt0KXGu8Iuzf/d137BggX3gntdLhtr1TOobxdSMA7iPz8/x9nZGS4uLvD48WM8evRoZa6fnp4Of0hiU60/arZuki/gTcpqaiyeszuhG394+kfzqRY1MbIDEau6Zu/cBnptPEd7Kk28QEn7W+fKXVnO3Ncv2mSbYXT+/bZxr8x+eXmJV199NX3Og48HMJuasQHm9PR0xezhsz969GhNmPBCGw7kxbXLnzu4mgN2gyGrc5aXIvOtGVk+cd/NBbv8q0UaI6h281UCxDHH3PKrNs/WSmRlZGsqeDy4L6oqY0Y9NCB3cnKytraAFz3pJhf98o2LmTg/vgqwMu5ds/ci5KqZ2V8P7R6f69XPOTm/Pjp/JDjnBsg2Czh6dc0GZG/Lb/bMpYu6a76bLFu9C+y6fMAL2WwhFTN6XFfPVBg460CfZ4FRpWMuFp99wYJnBDs7cLL3TDU0L43NNFyl+TZFJeE3gVocnFc2E8FwU1V8X6e6sm2vGp2ufNQ5qKyZKLOKQ+yDph9xz9w7zj1gTc/3szzuEvfK7BcXF/j93//99LnzlTlgp4toYtNBmPcxBx95sJmv+XJnsJmvi1M4bTXnru/oPV6bruvTs7leN4+vp5L0gl5uWo2DR70BlgWMVHAo7fw7klbpqp5n6D137exO9wnwLkGeG9f/+ff4+Hjlk/M1t71euy/VMg29Oo8Kk3sP0D169Gj2OzHwWasHw7PPrtH40WizMrzOP2eS3gWmHLNrmqBdg2nK/HrqCW8q0XtBJ/9miIE5EmR089O9d3ttkD3j+EKF29SOWeyEx0RcuyCdMr87QVf7S5naBeSymEpP8FZYfPYFC54RLMy+YMEzgr0y450/pz57di98eM5Dp/ECGkjJfGcOfmV+e2Vmse+pi1CyjTqcxh1euMm8tKLyoUemHzmuMVpOlvc+ws29q8kd99xe/mwRjcuP89Rpucwty4J/Pdw7s3/zm99cu6cDzzG1+3NBrgjIBWLuPVAFe9wGkGBQ9d8z+vXkmsvLy7VNKgBuHMvVY4DK/+WNQNU71fuM0YEz8l4WaKoi8Q4uwj0nUj5nwU9WdrY7jT9nFQzuAnSVsOD8tSxHvxMSe3mUNHCT4XrRa73vouu3Pd12W8im7QJOgzDYogD8iji3YKhiqCogd3m5/gnqu0YvOLhv2NQyuQuLZpM8F599wYJnBPc+z85r41U6ZeZ5/KpJrte6Lp6n4jgPzl+n9NRNiLxceTp1FvllyEzyTEo7rafLOedMMfbyb61Z+ntxAt1ow++FqTmXvsql6LXbnHxHpim5XI2pOH+c6Yt3qliO2y/vltwqzW5evhp/98rsjx49wmc/+1nbWdUCkx5GN5Do4hb+ZcZ2fr5zH9w8+Zz81A3JFqO4Dt9kbXs1gOJ5zy/uzXG7HWHZOvIerb301aIg/SiF3s/or+6Hsjg4OFj1fRzG4fqKmb3KN4tLVG2UCcMqAH7vAbo4TirQ89tGBnM1SF20XbUyv+e0tXtfGdtdu3c034r53WILrXOvfTiNChFlwmp2YKQfeDUa513tkMvy1XzcO5tq+NH0PdpGEWOqEqRZnCWLyPfKcrj3AB2wvgIrrrdB1fjZYHFBLW7kypVgl0LTZQzMyAaptkfVLjo7UGkP1Rw6pTja/lnAkVfkZXS7ZbcundYjcxGyPCNfrVvlFmTLTVXoOvoqqKAbtZaUP24LO2F2YD/mXNX8dzQxU1fr29Xnj3vxO6KBR++rUHL/8/vOTHTmYs9E5zrzOyNR/LmCfYSpqjXt/L8yndLCVoSzDOfQxNjEFdo2TfXezphdobu3nCSeM58MbB7AqRbyxNdQK2Z3NM4Z7I4h+ZfL03Kq/OJ/1nqZMHB5MMPr+/qFWhYAmQaOZ0D+JZYenLXB+wXYdw9mZlqYtkp4OEGwS2wytveG2cN0yTZ1RGBkkwDeplBfnE+5UVOe02fIzLNMQiuDKV29cp3pqUw64jow3RmzO9fA1XWuz+7A9dFZE/4NIQTcPHI7nruNPkpf1Knna/dordq2Kn+knBHsDbPrMVSBLKiUrWhT9ISDPs980lgNN03TmrbggeD8fkej09QO7Av3gnk9QcPgAVSZ+E4YOD+WNSb3XXxjLs5S5+OddMlttGm0/9HR0WpMaJ9n1hP3nTIxMz3XJ9P22l6Zeb8tE/ZM/Sr/yipy2BtmZ0RH9TQgd5jzrappOH3ea3T2SVnDMzNmebln2a++xwOtF82v8hvx57m+Pe3P91nTR98dHBzg/Pz8hiaNj0xEe2qkOnB+fr627JTrkR1dFowb1+qLt9Zwfn6eCimuQyBzL2+D2as4QiV8+J2whkcYfi+ZPfPxeozBGGX0zB9W0xnom1wZbSPvVXlVjFwJqU1oZS2n5joLHx2UMfccz5npgdf3LpycnKC1dkOTa5/3FiepKZ8hW5OvJryjw5n5WtYcpq/6Q9uW27iKJajCqsq4d2avpm+AeT5Qhh6jO83oIu1ZXpkfNuJ7zWF6Z75ndLhyemVy3lk9MuuJnwVTKMPEH3/xhDeOcD5s4msZc7Sns8SyugPrU3sxNpUGXbyjq+S4zo4WLq9Hk1pySksWrxgpYy81e4asMqPBnCyPzBfM8ttESweqQM+I5TLHL+9pkvjd1Apwz9mtcaY+a9VIz/fdYqHRoB4Ln02YKzOrLy4ubsRpHHOrAHCCVNOO0Nh7NprXvTJ7dGK1KiqDVmhOcMKZ+NlyWZ1S4zzCPHWYK8XVVHNMN+LXa9mKkcHiBrl7n+lT/53TZn6t8/lZ46uG57yzpb6ZRTg6ntz3553WVmEUbebckcqS6iETECP92KvzU6XZ50679bTwnACegzJsvOs0i+u8bTSqalItJzP/ld5R9MxF5286s5SDSlwHXnOu02QaNMvWvd8GetNzSpe2p5vzz+IL1ZTfKOZYmTtbLjsKx3iqETPGqoI8LtASmjt2zykTq9ZnDcXvO7r02pntLj3XtWfqMyorRAeo+1/LcVOdmaDpXTsNH/TyJhOn3cOsZg3rmEa/ssp1yNpEhRKDd6fxcz6cxFkZ1XLfEcbfxmVU3Duzb+O/cFpucG20TRuI83ONn2k0/t+V74RRlsYxvPq1labumf49Zs3KycrjMlzfqvCoEAwfdLCprNearjLnq37jgLFbvenGGNOk1klA14qooLsL9Np4rz4SMTcPl9eoqa+DgzVLSOsRUzyzMno+Vs8s5nSV6Z0xXubCKL09jPrCzKgh8HRRUEYHsL43QX/ZQmNr4OTkxC5lHXGj+H4wfM89YOsmaHJrCdQScVaCpnXm/yie6kU1czDXj4+OdRouGpjTVP5poKdNe88q9OIOWdpNYgGZeV+Z/S7PytpQy0FXILr8tb8ijWr8gDKKmt7qFjHDK/S+0/4qaLIgnjL2bRwBNmf87z2zZ5W5rTXy2vGsldwgqsrNhAM/c4JiE2tHTefMUhjVcBXUXB2hK7uu0mfuQ+Ye6By5y7Myr53W1TQZqoUuLFgODw9vLPvVpcKOBidUtsXeMHvld951eXyP/VaX3jF89o7Lf64Z7dAz+7eFcw16mn2bcqsYhKZRH1kDiAxnTldLsZ2vHu+7pdVujDCTRrCRPxse6XS836UvH9gJs88JWPXuVfk7AeLm0bkj+ZNSjk5mbJbUMSDcDIAOzAxuAPJf1KPyo7lucwfQNgNOBRkjmyN3Qi8Twlm66sw8NaeBm6fpME1V/Znhua6qteM+pw2/nseOjoXM4sg0vFo0t7Y2vrX2ZgD/AMB3ApgA/OcAvgDgZwF8G4AvAfjhaZq+MZKfIhsk2UYVlsK96RRu3Grpq34jjp9pw7JfBmC1uePi4sL6lz06GcoMPOXjVvptwqC96LAy12gZKpziT5fPbitU4rfXpxylZ2bp+dSaj2OsypJTayPGBpv2bGW4sqpgoSqdzO1UjGr2DwP4p9M0/VBr7QTA8wD+JoBPTtP04621DwL4IIAfHcwPQD/YodBnmTDghnOMzgzt7ocWVl+vmrceMaHnuCfqGoz8jtIR6Z0vvqmLkTFwj+EzrV3FHbQe+j/nHQwVDBpRd6ZJt7/26sj5VT41a39diKMMqppdFaCbDp7bR11mb629CcB/BOAvXhdwCuC0tfZeAN93neynAfwyZjJ7IDogM62vy73xjj6r/DdOwya1vpMJh9Damld0ZryTMejcwBWwbs4xIzP9ut0zno1o0BFfPMAaSBlT3wsm4l/+U+Z3glT7S9vAWWBxT013pvPg4GrrLW9u4TlztdoyzaqB3FEtH+XxPXdQZ7ybMbwrrxfMG9HsLwL4OoB/2Fr7YwA+A+ADAN4+TdNXrtN8FcDb3cuttZcBvAwAJycnJUMGKkbvLXGtkDHcppF97YjbQubXujrOtRaq67iX5VWZlI6GzKznZ/xdcs5DV+w59yjT/lk7MQ2xLNcJ5yyIB3jffZThe1YMWx5Bg3Ol5mr0wAizHwH44wD+8jRNn2qtfRhXJjsXPrXWLAXTNH0EwEcA4IUXXpicRq6YpsfoygSVVnfPR5h0hEmyMh0DcBrHAJx2tGMzRuNy+H5lwmu5ztfuab7sL7R7MJpuH3WMpxaNHg/m3q2i82oSq8DJjrAKOPNbhVHmbgE3t9WqRcH7A5jGKGdTjEzivQLglWmaPnV9/TFcMf/vttbeAQDXv1+bU/Bc6TR3ZRyj8icz0+c25zejLMdwPTM605qMEY3KeTlT2tHD71T5VcKvR4srzwkFdgE4eJWVnZn7PTeA01XuXtATyKLlKgDUTXTCTN1GfXdTdDX7NE1fba39Tmvtj07T9AUA7wHwueu/9wH48evfjw/ktfaJphURR+MzgHMqy8s1Q/Lq4IjgCfvAzBT8G6gW+uhUkOblBp6mrcxBZxK6esXRT66M4+PjVbu4AVSZ3Y5ebuOoqzJoXMdW1jiXTqfJtL7Rrlz/ON337Ozsxuk4zCxMN9MZeXHwLqCalq0iF0WvPsvNbaL/c73iXabXCTtOp/9rmzmMctlfBvAz7SoS/5sA/hKurIJ/1Fp7P4B/DeCHRzJypvu20LzcvK5bJstpOViSmbdZec5UG6lfDJxMC/D/jiErF4GZlQcVD6ZMw3FejuFdPdw7PVOe+0YHMbcL9xH707wuXRm3aht+rn46MzL/H+3d61dnqbnPhHHZYbrzu1nQTttI71UYYvZpmn4NwEvm0XtG3leMMIJ2Fg+AOX6sk6KOFud79TS6+obceZnACJoijWoDzqe65zRt7x0u35mojt6MSRzc1FC27iDyrg6r0AEdZfMpvxcXFzd8+Ii2V2avCjrnw6uWZ7pU8GXrPsLaUzouLy9xenp64+OQyvDPPffcmiXk3Agn1B12vsV1xBRhxqgGjzNxgdrfrwZ3pmkDOhBVg7r/uT5sEjrGy8qu8u69G3RnWiDT4qMCdq7FljE6WwFMAwvKSBvMHwKM3UKeFmVk/roT9Mzo2WKXqt4avQ+cnZ2tTrt18/BHR0c4Pj5e0/I8pvk8vxGrY6dr47UjnbaZo8kd5kYvXYP1GLAXOGHt4OZ/49e5OJUQ2MQVUkGY0bKtm8VaXRlE8+drnf/m/ACsCcfIm6P0cf/k5GStrfXT3BnNTHvQxoymC3DmCMFMQGRWFNPixo0Kvr1ldpZkQL0abnTgzdHoc8EDwZndOi/M7zB9au7NMcfnxgR6qHx0rccm5XH+vVkP1eaq1QNsnke76Vdf9D6Ph2qcOT+eaeRNLdUpSApniY1E7xVqOToBWOHeD5x0c5fcca6hddDcJhP3oD6u0jcq2VW4cTCoF0/gcpjxsrJ7z92zyNcNtkoYqcDILLPoexd1ZwGTRfK5PJc3cJOZ9bjqaZpwdnbWHT9aJ54m049MjLhUyuDR7xx74LpGHTl/J3A5WFm5ZoGdnBsfUG3Z6wQ27yrtrQzF5UZjVmXFezx4WFMo/VVemTZmsz4E2uggrJi4N+Di/UqDZAzvaHE0ars8ePDAWgycrzI4M0AW12EtH30T2jem5eJd/hjn2dlZKkgzq1K1KE+9KTNzndx6++jvOGknNlJlrh7TxgIhmJyFXTU29mY/O3BTazsrgK/naNU5Zmg02pyFPNmKKh4I2aBVE441l/6pBnauQsZQWq6714s/KJwfHu+7CDa/p8ys2p0FgLaPCsmIzHN8gI8XC5p0bzk/z1bjOZNZ+1JXvI2Y1RHwGxmf1fhioVZhp1+EcRXMpiGq/ID8pE7XCGyaBZxGcs80n4D7aoj+ZhYJz7GqmcpaztV7rq+v9/mZCwQp1Lzm68ysPT4+vpFO/6KOfK2Mr+Mipt3UQuIxEW0eJvPR0RHOz89X1gD/6n3VpvE8+iTuKc3cVpVr5Ji85x66uXcOWO69Zmci5zA6v9uTbCoUMnMpm6Lp0ZCVp9qKn4/46tlAmWOpBHrm/0i+zr92f9m7/OvuqTXkmFxNf52OZdpYkAbzA1jNxYc1UB35HFCFwJbFwcHB2px5KJSe1p7bj5Xv3sO9B+gAr42zSmeVqObbAdwQACoUwufTr4jylIqW0QsOKoPzPefPcZlO2+qg1vpxvlkb8G+WhmnN1p07upW2TOipFlIGVssg0+QKx9Ss4XQRFtMZzMnvAK9bN5VLo2Y/9yMrk2D4rB6Z25qNbe5PZfCRgPFOFtVo4zsGcn5alScjC0BxXsz8lVvh6BqhN6Nt5JmaY8pgzs9XcB17NLCm1h1gGZNzGVX+OvgrAebyqtqI+5M1fNClWj/S8Q6yEPgslLSPR4KRnG7OeHRtkI0xHRdzsRdfhNGDK7J0+g6w3hEV0yrz8EBQIQBgZd6phnd0Oo2uGxiymMLh4eFqcxD7nlwPHogafc6YLRMO/I760IeHh6tNMmqRRGArotqs/appSQ18cXnOR3cMoPVSQVT5rC5CzfVWAaFLb4P+bC5b66z9zdtlR61XbQNt28xt7QmCnS+XBeqpIMdklVnE6LkJKvnnmOxapg5UNyC108PMYxodTXGfp1v4nZ5FkmnpCIrF79HR0er76cyUAFZrs09PT1cMr+vSgf7KQmZwpknr4gZzT6hxGVF+Zi0wXeyzHx0drbWvKgQnUNjkV2HNTKq06Hhz77g69pRZhp18EWaOqRtwna8+S4/51edhJuIGzqLSbtVUNVXoOo6f6zWXrUyjEeI5+fJBEVGPaZrw4MEDPHjwAMfHx3jw4AEODw9XTB1bUSM/1ijK7NpmStPJyclavlFOMFf8AlibeaiYQ6+5rXmMRV/znHzEa+IdNu2nacLx8fGNesUf+/lqVXI5KqAZGptwml1dTMXIzIliZ0dJj5o0jKqzR8waZnK1GHigaiNyo1e0s8DhuU/XmS5/LoOlujK5MnvPZA9Nenx8vBqwwNVil+eeew7PP/88Hj58uGLGg4ODlYaPPE9PT1eLUc7Pz9fMeZ6y0nqxBXF0dHSDydXK6EWvHdRyUWbX9fPBlGxFqWuiwjfq6dyC6Aett17rp6JC8GV1GtHWGoTduwCdQxbgyoIeFVzQBLjpS/I9vc/X7Lu5evRozN7TSHWvPs4s1jrqIFETk9NfXFysGPfs7GyllTSYFmldWcDNXVfO3w/tyRqe82GNXkX3q/YBXhd+2k5Mm4vXBA3VltN4l5WG064qiCMNuwhOKcypJ1+PBu12vhEGmD9vPHcQVAytaatrDcIwLRwgGnVL5mowR5uWp4EqpluZPRj99PR0NRUVJ9wwncDrpj/fv7y8vOHj8jusrSNfjhMwLRygc4yW1Z+v2fTN3mWzmf1r9tnjWTAqa3kO0rm+drRFGarZ2bpwDO+UU9UWvfG0s7XxlS82F1nn6yBUDefy4YZ1Wl07mvPcpB5zBNccza7WgLoJwOsbO87OzlZMeXR0tLYyzcUelAYeqMzIYarHdezN1ukvZ004hmcLamTwO6hAUbqZORWxCo8X4GQBu/hlgeH+j7KrMRlw43kOdrrrLeCmp/idCs5kzzR5JSlZ+6lA0OCYSvhMEKjJxqakC37dBtS046CS1vX09HTF8E+ePFn51ADWAmah8XRQaiBNz5fjc+aC6WNqLxjr9PT0Rh0qMz7rN+6XuJ+5czwG+QxCFWoamIt81Md39HHfa3yILYKIJ+hS20jnxnQlHCp+2ZvDK+J6boRRoYw+mnY0D2bQDD0BxYPSDZbM53ao6qi+ow46DagF40fEHaiZHVgXXJH2wYMHa6Y8+9GZZuX8ON+srvE/06Ru1EiQy9VJlVLkGdo/gm3xvrMW1ZXKXLy4p7ECxghPjLjC98rsMWidCc+dV5n4rF3VrFPNXVVe07ppI2YyNkfnSNVgbGXu0La8GYOtCsfg1bVqdG6XKEstjbOzs5UJn/nJfJ/zcEIwfPLnnnsOR0dHeOGFF3B8fAwAq+BcTMPF9k6n0dgKCPqjLaNsbR+2mjQgqlpb39XZEA00qgk+TdOaZRK/7MrxuGZXxblATsCNxn3mYKeaXf3BuOd+N0GP4Stz3mmEEU0xh97KDBzBCD2cr2q9GJy6P8DRwoM3EyzB7OHbPnny5AadITDjV4U7m7RubNyWu5NBrYQIXLJ7wG2nm2hUUalAVg3P5bEP31NUVUAvw06m3tyiiVHmZl+HzdDIt2eabzN41KLQ/ICbZ+CzL6mmIXc0B4fYmmAmc/Q6rei0i6NV65Axtz7L8omyY/CHS8D+PwuYqC9DmQvADUtHrTF2V5yFl9WX8+dfro+2u26Z5XpppD7q49wyth7cF2KifKZFaXbz+RV2suut+j+QEc6mUhXYq7Cpdsii+/G/Bpa04ys/Vf1adVEyn8+B/eIeo2fYJEbAtEXb8CYTDhZyfV1dmXnUbM8Y2Ll47vmcemVuDf/PgtyNEafh41f7PerN2jvre41N7JXPHnBaXBlFGUbT8WBgZglJyXk4Sa8M5spSGjWNDi7nfwYt/BUWpiloVn+YAz8jgUttt8iT6cvM9TmoBJbGJmJlna4SY+2rNPGg5bbOBK3+jc5NVxq/qmPQwpodWJ+W07auxiGXpz47m/XaNjrWRvp0L+bZXWBiVIMBecS89y4LhirvjNl62kQR+Wg65/dFx/YWiTCtfB2uQRbs24ThezSoWamBtQAzZVwrNADL6VRgjjD6qIWTMbxTTMz0sbgo6OA+dRZPvF9ZM1EuM73T4KOzWDvx2bUB3FQUd3AvIJFVlOdGdYDwfR5Y6isGra78iBZHXjxPzWkArKLP0WF6ZhnPb5+fn6993MDVw1kYjBgkzny9DdN+RKtk02dc72revhdz0CW8zuR3tEcZyjiZwmFmcv9fXl6uzqmP/uUTcMLCUVp4rGudnU+u49zRVGEvFtUEMu3pnqk0j3sugObSZRqd6dOB6jYtqAmWXfMuKwc+HTW0si5RrZh3hIEzDdNDZhU4Judn2t+VC+QW7LAb4pAF7uJZBo2D6Fy9G6Pat9wPvMQ4NHwIbK6DBvC4TOe+OiUX7/P4zdpYsfN59sDcIJsDm72Vv5a9O2rqZmZnb8okK4efabCH24vfzfJxGmvUdO2lCcbgd5QmNueZOeJ5rwzVbJW574TcNuNok0VdVT9oe+hKRjXLR91PLm9kzAV2tsWVMccfnQtnivWYrWLIyn9WSTzi/yk9OkXHA8aV7cpw5akfqO9XDK9Mpe9zGyv9cT9W4KnmjPplArSiUe/PdUscMsuzEp5KP8dM2AKIdOzLZ8ovo43HgPvuXJXXvCU4twD1q+aYk5oPUM8tzl1hxKj8UEeL1mGuhlAmcUyT0ZiZ05XUZwHQ+9M6Zr8jVk0PrsweXZtgWyUyWob64mq9lcw5MLbn1GMvjpLeFs78Un9MB0XPL4vrDNV8aq8DuKPU92KfjRdc6KDplVn5vCPmtP6vmt2VHdrMza64e0zPiJauzHhgXMBmbcZtP5oPn1PIQk8X33B7KR1KuzJyNVbnMP1TyexzOtitqnLvuWmNOSbWKLIVYwFlCP5lZndMx+kZjnnj/8y6yt5xdDpfPQa8CgCtv8YjKhdK6bkNsz2gU17b5MOzODENxzNHepDpNL2+1j4wMu7mWq47m3obRTVQez6di9Y6jQyszwnr9MocZNrCmdXKpFEWL50NfzemeOJP66DCgbWNCjiu6yjjqLAJ2ng9fCwc0nPmjo+PV/+zQHDxCabFmfNct2zVmoPzs921G1PMnI5OjalwnVTz8//6RRoW+k6hqfnvFFaFe/XZ5zI6YxuJOzoYnNbP3nVBpuy5ImuHTJvrPSc0XFqtH9dzDqNrGarJWXMzk8e1ni+ndai0vcNtzNxk5alZ3KOF4QKUkScfLc5tVVk9PXrnYkizt9b+GoD/AsAE4F8A+EsA3gHgowDeCuAzAP7CNE03TyK4JWhDcIerNov/+V0ndR0qgTTXfKx8rqw8/p/Pdjs/P1/T8NU00Rz6eq6K+tx80gz/xYKiuA4NH1H4qvzKQhsxrXWmJROkmd+s6ViDZ2U7K7JK46xLdb2qOEvPNx8RFN0UrbV3AvgrAF6apuk7ARwC+BEAPwHgJ6dp+iMAvgHg/d3StkAMMJaMGXqdmf31JOw2lkkF1QisAdi0d+m2RdUeamGo+a2mPGtuPlxSfXlH+6Z+eCZQ3WyGq4+2g+bl+ryKb7j6OJdKwX3q2ljp5zqOjodRM/4IwHOttSMAzwP4CoDvB/Cx6+c/DeA/G8wrJ0YqqINEzaAK7jTUQDWwNaiUmWZBL99zg6RHq9KgX0YJplFBx23D0IGVDa6MpoqxVZuzVtfjovWAyQwaJKyChgE3BkbcLa33qGDReIHSXaUNS4yZXmnhtuZrp4iqfuuha8ZP0/Tl1trfAfDbAB4B+N9xZbb/3jRN59fJXgHwzoSIlwG8DFydU57BSSfV0BExHaD5xvtsyvM9p/nd/xEwUxOvMqe1fiMdxaadDoJ4roPE1XnUD+/dyzQNW1osmFyaqi4V3ZUZ3UvD7aJ9NJLvHDdpxIR3pjnTyQqkWrjF9FVj16HL7K21twB4L4AXAfwegJ8D8AO99wLTNH0EwEcA4Fu+5VumStLKezeuNaikA0Sn2RRcdqZt2G9iRJ58aEak45VMKoHjt1pOq0IkfHU978zlEwKQl9ZWwTcnIBzU0uFvrIcGj9/Y5MMRdxUEzsrp0ZEJgoy54m/ERVCt7AK0ca+3oapHY9yr3EPOX99lOnvLhyuMBOj+JIDfmqbp69eE/TyA7wXw5tba0bV2fxeALw/kdYMJVKtlBMchCDxfqSZrthzWDTAeeKMaLjpdzzzP6jMaVAladA80n8l+eXm5tjFGmZ3bJODaITRdz7xXtyKOgY76s08ez2KHX/yyua27ATM4SyVjsCywV7WBS5vlo1NhlSLpCQGum47Bqhw31eraYAQjPfDbAL6ntfY8rsz49wD4NIBfAvBDuIrIvw/Ax0cLzZiMO3rE1Kryr0y7igYHlsxOoKi2zPLkueHQwFV5anqqL1+Z7C7iy+1aCTjWwi6GEW0R2tx9zYUtnJ7PXt13DOaYmX9H2jbT4o6mTRm9B7erTRHtNzJD0MOIz/6p1trHAPxzAOcAfhVXZvn/BuCjrbW/fX3vp3p5aRCCf6/LWqtMz8yMvGJJaVzzyiROD4xpW5a2msfh4eGaWc0DWpdMxmopzscti+yZY5F35HV+fr76bhvX9+BgfXkt77KqBKhqGva1OfDGc8VsxvPnnVhAjARStc49U330Nxs7TnBkzFwJHH0/e+7qr2PE3deFPJxf9ONcph+yraZp+lsA/pbc/k0A3z2rtGtUml21T0+SuTQc5NhUCgI3tQXnp1K58s1VE2naSjM7qIZlxq78Uq2bmuzOv2bTXRnZrYMfZW6mYzRNZZ5r+jkLqXqmfuUnz4VaWiNQ7c4YDRADe3JSjT5XLeQaxpk3nE6PehoFM3HG2Hpfy2Ca3If7WHiMdBTXNfJif17bS0/e1UHLwbzIP8pwAbjwueNXI+06XehMeI01xD3t10zzch7bMLpqf8fIerDJqBZ1ddRnfF25UgoOHHO0fpryz1UpdrYRpvJtGZUEZPNT1y3r/UCmTfk9F9XXtdjsGwZTx6CI+9FBmUTOymfooAwGik5mIRCmfryTuQlxL2P2CL4FnazlWSA4RneCb3QwjjJpVqfqPb3HbVr56vFbrUPnIHEVE+nBWbpubGfjvdcm935SjRKZmfRxT30UNtFZO7pAS+ThGBfI96azBuRffp/v8dc5OW8e6DolNyIAqimY2EkV5+DxgAOw+jijo5cHZrRR5BkanM/M4zQReWcTP55rHVXwxb2qjlUarl/PIspM7yzK7fKulAz388HBQfnNOn1XYySuHL6vbRf9rXXoCZh71+xqcgI1kTpwgJuSjSWgk476TsBpHe7oLGDjBkM2LRQ0sNbN6lsFgdyZY1wnFTbq1rA146yaYF4121W7sPmuFpK6Z2zW9pgzM5Urs7vKi38d4zut3tP0UZesD7UttG/c2KzcWX6XxzZblKMWIrDjLa497e40dGXK6IDXfcM9813fySLpPO2VDT7XwfrFUFdPhbMu+H2NazDNkUbPONf8WSM7850HlZqvWr/KQlOMmOYjmr+yBPT5SLCtYujKp86CthUDMg3VOMgCcXtrxjNGzfi5DB/3gZvmutP8GV1ublNdiGCuTAM4cMzAuRjZO5qmOumWGTTTmK5t2QdXk9zNMHB/VPWoZidGNL6+0xO0I+a+a5NsPPUQY8KdCefAZvnoeOTxp7SO5rXzDzvy7+g73FG9qYcRxmSwQHBCQq2FbTASZBoNRFXmYM9UDFTz4ny/ink4OI0/qqUZmVvlrucwOXBToM/BJu/06GGLbQSZ78+493PjdfDNqQxwU4I531+fq7/D0AZSBleNHhgx5UcwEoHOUJmU/NwNhMy3rvLWe8r0o4LJ5TEHvUDeprSMMEwm4EYF323QEOlUyOy1Zgf6c6ZVGhUeWYDHBacycy1jcMfc22iDgPrAjhZnIlaBmYpRexrVtSUPQCdcsw9uzMHoAK/SOktsLtM7JtL8M7oUI1ak5j8qbObEjwI7ZfYeM1eozNPIU31WDtxUHaR+ebyr1gE/63VU5derm6HSWlepBUYisZmAcmmcGTwyAKs69jDClM7dyJ7x802ZnoXrSNrq2ZwxzgE+p4T4f46TaKwmw841O7DeKD0zm9/ZJH93PfKuC9j18s0GWiYcNjXbs/d7wjCjaZNYxNy69N4PcFszrYAXRMwI21hco6jG7SbCby7mBBV3MvWmv9qZmX/u4KK0Wl7k4xqmisoqU2dznk7KujpzeSO+Omt1ly9Pm2m+2YIhV2ee3lOGV8E0IggqX3oOdNYiwLRpek5TWVMZRmIbLr3rVx2/Sp+a5D1k2n4k8Azc8+mygZ7pM5Kul+a2pGoW2HPgzuRlpVWU+zbg6soD6K61GzDP0nrWsUlb9d4ZyXPnml3/z9JnyKRtFbWfi8xsdzQ6X0+1YhWU03SjA8Plo/5/di8rX2nt0Z/ROleDxTs9K8mV0evn2xwXSk9lFThf3LXnNnyxlz77HMk2d6Cwr5T5epvAuRv6PKMn68BN/UoWHnPaZQS3oaE3jYSP5juK+7BoGFkkPwu69fK6bexMswN5cMNJumxdOe82U/+dBx1H4of8G9NpTAvDRbH5HY20u7iFIgtUOt88e9+t4Z8DNwugsQBeJqp9qwHNnm/J0WjehOPq1qvXSEykF9DsXWe0zZ1z30T5bZLPXkTjGdlOIWZeNY96K6tGn/Vo6UU9M02tQULNc7SzmdF12u82NUE2yJ2G4sBlZsFosNPB5TPXiuK8epgz1TeKihF7VsYcAVG1y16Z8U6z8iDY5MAJJ6FVOOhGBSfdR/xZoA5+ZYNoGyGjZesaaf519Kjl4SLbI9CNHtn7bLHNnfKc46dX93VabtMZgkzgO2yyzHYTbKrM9oLZXZq52g7wU3YuSOK06Rzfd2QDxih6pqpzcbiM2LjC7zka3VLS27IGeoEkt0ahyqNH1xwmqVZKAvPWNrD1kZVTIbNaFXc1P7+zwyvuKn9gfUBVEn0uc7q4gP5fzf2OYnS6b4757tYYVNHialD35rO3mWdX5nL1m7sRp1eO4raF4dxltiMW0Eg+ip1o9jnY1D/rTa/MpSMYnc3gjMEZ2RLIudpNmXXucs7MdK8W7WTQFWx3FX2PPDPanFuh6Gniu2boTcvYNHK/dz57dn+TCoYU7PnKalWMaOXqeTaH7abadFDODSC5KbbM/Iv33I4ovVdtrhmxLpjp52rvXhuMTFdto9lHaHY0jlpujgGr6P99TI3uRTS+2mEUWicL3DhGj/QxLdcLWmU0RFr91TPlnTDIBusm2q8Xxc7gNtawj6/BvU0xMsU3Zxqrx+i3EbnOZk4c5i5nzawyR4t71gugZouaeu2yF8yuqNZ1u/tZkA7IO2p0bbqCNWTWqbc5FRaMPmr5OKbK6B3Jb040epSmkbJucyrRIdq0Wk/h6Mrq37OEqsU2GW2j/TPizgA7YPaReXS+B/jpI9cBvGgjNLtO5WXBu5GorBuMc6eS5gxiZ1WolTMqAJwbkQXjqoGbDSq2eLJ35tJ923B9MU3TjbMGGSokq6BaD9nYd9OmLniq740GcgN7t1y2morQ+W2VllmwyEnJTcxpFhSj0veuglfVMuI5QanMZJ7LjD3BMHdg3jYygc/948aIi33c1dQYY0RTz7EAgB0dSzWCaNDqFJQRk3JEEo82WBVgydJsgxEa58zd6sCdOyXUwygtI+9tisz1Ysyx6ICbgnWOxdOLtWTT0b2xnR1uuVfR+BFk5vscQbHp1IXSsAu4wamaUS2GEQujWhSSlb0NdsHou7Icovzqvo7l+x5jO9sIowztvokGvH6MMm+EySSri2jqirOgofq6aqTZJcMzXMBohOnvYzpH83GCqaJjmwUyTquNCu4R7Z5tG9ayY9POXOuwChKOKKy5Su3eD69wvlsVOR+Z1skGipu+y/7cLqtZDTmgMUe0bxUJjuvMVO2ZdFnd5yLaqgoczQ3GVfv6szK0vF76bS24OVOLVToXXHMB1Ko892wvp956WpMb4vj4eHW/F9V1jVR955ppiV89rdOVVdHvLJdN4ebBez7gHO3Sy4vBae9y+m0ueoHYKv1cVL56L1/njup4cluARwTeqFWxEzPeTbMB60RHI8bniDhdJaGZUeOroy5y7zbFOMZ3ZWSLINz685HBVQ2inrbW/OeYds7k1ny3jX3MXV23Cea08ZwY0BzXoloHUpVXjekedKlzz1LbuwBdDEA3pdYza5w7oPdc/pp2Lu7Sv1fmnVvOyCDK0rhNL3eF3gKTEVT09fz2u8KIsN6G0TWvqk57w+y9teMjARWn5bL7czRvRqtbBOHmZbdFxfBzfONq2Sajtbb2ued4t0q/KUam60bo5f9d+9yGv91zb3r5Z9bTCMO7eNLcdt8Zs2dSaBtp6/ZwM/M5gXEX0v02TFd1OXp0Vh3PA2VT2rIlmc4t2xab5udiKrueURk10zcVJHOwE2bfZKrDYSTApJtWtslX71eLLUbP8h7diLGJZtXZCE4/Mh0GrPvubm03u1g9OkZod9pubmS/stwqv1lp2Bbb+ONBh7NyXL4jNO/k3Pj7xhxGv03cpY87By6AM5e2u2yzUVr2pT3nYJ9obvdp5rTWvg7gDwD8f/dW6HZ4G54eWoGni96niVbg6aH335mm6d9yD+6V2QGgtfbpaZpeutdCN8TTRCvwdNH7NNEKPH30OjwTZvyCBQsWZl+w4JnBLpj9Izsoc1M8TbQCTxe9TxOtwNNH7w3cu8++YMGC3WAx4xcseEawMPuCBc8I7o3ZW2s/0Fr7Qmvti621D95XuaNorb27tfZLrbXPtdZ+vbX2gev739pa+8XW2m9c/75l17QGWmuHrbVfba194vr6xdbap67b+Gdbaye7pjHQWntza+1jrbV/2Vr7fGvtT+xr27bW/tr1GPhsa+1/aa093Oe2HcW9MHtr7RDA/wDgPwHwHQD+bGvtO+6j7Bk4B/DXp2n6DgDfA+C/vKbxgwA+OU3TtwP45PX1vuADAD5P1z8B4CenafojAL4B4P07ocrjwwD+6TRN/x6AP4YruveubVtr7wTwVwC8NE3TdwI4BPAj2O+2HUNsSLnLPwB/AsA/o+sPAfjQfZS9Bc0fB/CnAHwBwDuu770DwBd2Tds1Le/CFYN8P4BPAGi4WuF15Np8x7S+CcBv4TogTPf3rm0BvBPA7wD4VlztHfkEgP94X9t2zt99mfHRgIFXru/tJVpr3wbguwB8CsDbp2n6yvWjrwJ4+67oEvw9AH8DQOxOeSuA35um6fz6ep/a+EUAXwfwD6/djn/QWnsBe9i20zR9GcDfAfDbAL4C4PcBfAb727bDWAJ0gtbaGwD8YwB/dZqmf8PPpiuxvvO5ytbanwbwtWmaPrNrWgZxBOCPA/j70zR9F672R6yZ7HvUtm8B8F5cCah/G8ALAH5gp0TdEu6L2b8M4N10/a7re3uF1toxrhj9Z6Zp+vnr27/bWnvH9fN3APjarugjfC+AP9Na+xKAj+LKlP8wgDe31mLb8j618SsAXpmm6VPX1x/DFfPvY9v+SQC/NU3T16dpOgPw87hq731t22HcF7P/CoBvv45onuAq4PEL91T2ENrVXsSfAvD5aZr+Lj36BQDvu/7/fbjy5XeKaZo+NE3Tu6Zp+jZcteX/OU3TnwfwSwB+6DrZXtAKANM0fRXA77TW/uj1rfcA+Bz2sG1xZb5/T2vt+esxEbTuZdvOwj0GPn4QwL8C8P8C+G93Haww9P2HuDIj/x8Av3b994O48oU/CeA3APwfAL5117QK3d8H4BPX//+7AP5vAF8E8HMAHuyaPqLz3wfw6ev2/V8BvGVf2xbAjwH4lwA+C+B/BvBgn9t29G9ZLrtgwTOCJUC3YMEzgoXZFyx4RrAw+4IFzwgWZl+w4BnBwuwLFjwjWJh9wYJnBAuzL1jwjOD/B0BzG+XtkItYAAAAAElFTkSuQmCC\n",
|
| 147 |
+
"text/plain": [
|
| 148 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 149 |
+
]
|
| 150 |
+
},
|
| 151 |
+
"metadata": {
|
| 152 |
+
"needs_background": "light"
|
| 153 |
+
},
|
| 154 |
+
"output_type": "display_data"
|
| 155 |
+
}
|
| 156 |
+
],
|
| 157 |
+
"source": [
|
| 158 |
+
"# Displaying data\n",
|
| 159 |
+
"smp_num = 2\n",
|
| 160 |
+
"print(x2_load[smp_num])\n",
|
| 161 |
+
"print(y_load[smp_num])\n",
|
| 162 |
+
"plt.imshow(x1_load[smp_num].reshape((frame_h, frame_w)),\n",
|
| 163 |
+
" cmap=\"gray\", vmin=0, vmax=255)\n",
|
| 164 |
+
"plt.show()"
|
| 165 |
+
]
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"cell_type": "markdown",
|
| 169 |
+
"metadata": {
|
| 170 |
+
"id": "8AzfSjqn8oFD"
|
| 171 |
+
},
|
| 172 |
+
"source": [
|
| 173 |
+
"#### Getting those data that looking 'in' screen"
|
| 174 |
+
]
|
| 175 |
+
},
|
| 176 |
+
{
|
| 177 |
+
"cell_type": "code",
|
| 178 |
+
"execution_count": 7,
|
| 179 |
+
"metadata": {
|
| 180 |
+
"colab": {
|
| 181 |
+
"base_uri": "https://localhost:8080/"
|
| 182 |
+
},
|
| 183 |
+
"id": "DWA57YKb8oFE",
|
| 184 |
+
"outputId": "bcf52386-ae85-4ff9-af25-1f9415bebd56"
|
| 185 |
+
},
|
| 186 |
+
"outputs": [],
|
| 187 |
+
"source": [
|
| 188 |
+
"# print(\"\\nNormalizing data...\")\n",
|
| 189 |
+
"# sbj_scalers_boi_dir = sbj_dir + sbj_scalers_boi_fol\n",
|
| 190 |
+
"# x2_chs_inp = x2_load[:, chosen_inputs]\n",
|
| 191 |
+
"# x1_scaler_boi, x2_scaler_boi = j_load(sbj_scalers_boi_dir)\n",
|
| 192 |
+
"# x1_boi = x1_load / x1_scaler_boi\n",
|
| 193 |
+
"# x2_boi = x2_scaler_boi.transform(x2_chs_inp)"
|
| 194 |
+
]
|
| 195 |
+
},
|
| 196 |
+
{
|
| 197 |
+
"cell_type": "code",
|
| 198 |
+
"execution_count": 8,
|
| 199 |
+
"metadata": {
|
| 200 |
+
"colab": {
|
| 201 |
+
"base_uri": "https://localhost:8080/"
|
| 202 |
+
},
|
| 203 |
+
"id": "SJ-ojxa18oFG",
|
| 204 |
+
"outputId": "92885191-edcc-4664-fa23-d57e7c2b770e"
|
| 205 |
+
},
|
| 206 |
+
"outputs": [],
|
| 207 |
+
"source": [
|
| 208 |
+
"# print(\"\\nLoading in_blink_out model...\")\n",
|
| 209 |
+
"# sbj_model_boi_dir = sbj_dir + sbj_model_boi_fol\n",
|
| 210 |
+
"# model_boi = load_model(sbj_model_boi_dir)\n",
|
| 211 |
+
"# print(model_boi.summary())"
|
| 212 |
+
]
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"cell_type": "code",
|
| 216 |
+
"execution_count": 9,
|
| 217 |
+
"metadata": {
|
| 218 |
+
"colab": {
|
| 219 |
+
"base_uri": "https://localhost:8080/"
|
| 220 |
+
},
|
| 221 |
+
"id": "KGqOn_458oFH",
|
| 222 |
+
"outputId": "9080e5e9-11ac-4e42-b39f-7c5aa5181ffb"
|
| 223 |
+
},
|
| 224 |
+
"outputs": [],
|
| 225 |
+
"source": [
|
| 226 |
+
"# print(\"\\nPredicting those data that looking 'in' screen.\")\n",
|
| 227 |
+
"# yhat_boi = model_boi.predict([x1_boi, x2_boi]).argmax(1)"
|
| 228 |
+
]
|
| 229 |
+
},
|
| 230 |
+
{
|
| 231 |
+
"cell_type": "code",
|
| 232 |
+
"execution_count": 10,
|
| 233 |
+
"metadata": {
|
| 234 |
+
"colab": {
|
| 235 |
+
"base_uri": "https://localhost:8080/"
|
| 236 |
+
},
|
| 237 |
+
"id": "ySFqmNyd8oFI",
|
| 238 |
+
"outputId": "75cea127-1407-47f9-a7a9-ab1be5bcdf09"
|
| 239 |
+
},
|
| 240 |
+
"outputs": [],
|
| 241 |
+
"source": [
|
| 242 |
+
"# # Choosing those data\n",
|
| 243 |
+
"# x1_new = []\n",
|
| 244 |
+
"# x2_new = []\n",
|
| 245 |
+
"# y_new = []\n",
|
| 246 |
+
"# for (x10, x20, y0, yht0) in zip(x1_load, x2_load, y_load, yhat_boi):\n",
|
| 247 |
+
"# if True: # yht0 != 1:\n",
|
| 248 |
+
"# x1_new.append(x10)\n",
|
| 249 |
+
"# x2_new.append(x20)\n",
|
| 250 |
+
"# y_new.append(y0)\n",
|
| 251 |
+
"\n",
|
| 252 |
+
"# x1_new = np.array(x1_new)\n",
|
| 253 |
+
"# x2_new = np.array(x2_new)\n",
|
| 254 |
+
"# y_new = np.array(y_new)\n",
|
| 255 |
+
"# n_smp_new = x1_new.shape[0]\n",
|
| 256 |
+
"# print(f\"New samples: {n_smp_new}\")"
|
| 257 |
+
]
|
| 258 |
+
},
|
| 259 |
+
{
|
| 260 |
+
"cell_type": "markdown",
|
| 261 |
+
"metadata": {
|
| 262 |
+
"id": "tVhzAjUV8oFJ"
|
| 263 |
+
},
|
| 264 |
+
"source": [
|
| 265 |
+
"### Preparing modified calibration data to feeding in eye_tracking model"
|
| 266 |
+
]
|
| 267 |
+
},
|
| 268 |
+
{
|
| 269 |
+
"cell_type": "code",
|
| 270 |
+
"execution_count": 11,
|
| 271 |
+
"metadata": {
|
| 272 |
+
"colab": {
|
| 273 |
+
"base_uri": "https://localhost:8080/"
|
| 274 |
+
},
|
| 275 |
+
"id": "o2yHN0I9cF6o",
|
| 276 |
+
"outputId": "adfd1167-179f-48b5-c016-91e9249c46eb"
|
| 277 |
+
},
|
| 278 |
+
"outputs": [
|
| 279 |
+
{
|
| 280 |
+
"name": "stdout",
|
| 281 |
+
"output_type": "stream",
|
| 282 |
+
"text": [
|
| 283 |
+
"\n",
|
| 284 |
+
"Normalizing modified calibration data to feeding in eye_tracking model...\n"
|
| 285 |
+
]
|
| 286 |
+
},
|
| 287 |
+
{
|
| 288 |
+
"name": "stderr",
|
| 289 |
+
"output_type": "stream",
|
| 290 |
+
"text": [
|
| 291 |
+
"F:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\sklearn\\base.py:324: UserWarning: Trying to unpickle estimator StandardScaler from version 1.0.2 when using version 1.0.1. This might lead to breaking code or invalid results. Use at your own risk. For more info please refer to:\n",
|
| 292 |
+
"https://scikit-learn.org/stable/modules/model_persistence.html#security-maintainability-limitations\n",
|
| 293 |
+
" warnings.warn(\n"
|
| 294 |
+
]
|
| 295 |
+
},
|
| 296 |
+
{
|
| 297 |
+
"data": {
|
| 298 |
+
"text/plain": [
|
| 299 |
+
"['../../subjects/24/scalers.bin']"
|
| 300 |
+
]
|
| 301 |
+
},
|
| 302 |
+
"execution_count": 11,
|
| 303 |
+
"metadata": {},
|
| 304 |
+
"output_type": "execute_result"
|
| 305 |
+
}
|
| 306 |
+
],
|
| 307 |
+
"source": [
|
| 308 |
+
"print(\"\\nNormalizing modified calibration data to feeding in eye_tracking model...\")\n",
|
| 309 |
+
"public_scalers_dir = trained_dir + \"scalers.bin\"\n",
|
| 310 |
+
"scalers = j_load(public_scalers_dir)\n",
|
| 311 |
+
"x1_scaler, x2_scaler = scalers\n",
|
| 312 |
+
"\n",
|
| 313 |
+
"x2_chs_inp = x2_load[:, chosen_inputs]\n",
|
| 314 |
+
"\n",
|
| 315 |
+
"x1_nrm = x1_load / x1_scaler\n",
|
| 316 |
+
"x2_nrm = x2_scaler.transform(x2_chs_inp)\n",
|
| 317 |
+
"\n",
|
| 318 |
+
"j_dump(scalers, sbj_dir + \"scalers.bin\")"
|
| 319 |
+
]
|
| 320 |
+
},
|
| 321 |
+
{
|
| 322 |
+
"cell_type": "code",
|
| 323 |
+
"execution_count": 12,
|
| 324 |
+
"metadata": {
|
| 325 |
+
"colab": {
|
| 326 |
+
"base_uri": "https://localhost:8080/"
|
| 327 |
+
},
|
| 328 |
+
"id": "eNwyQAwL2pGS",
|
| 329 |
+
"outputId": "9812cee4-8a01-4734-b479-81feed0099c2"
|
| 330 |
+
},
|
| 331 |
+
"outputs": [
|
| 332 |
+
{
|
| 333 |
+
"name": "stdout",
|
| 334 |
+
"output_type": "stream",
|
| 335 |
+
"text": [
|
| 336 |
+
"(3570, 100, 100, 1) (630, 100, 100, 1) (3570,) (630,) (3570, 7) (630, 7) (3570,) (630,)\n"
|
| 337 |
+
]
|
| 338 |
+
}
|
| 339 |
+
],
|
| 340 |
+
"source": [
|
| 341 |
+
"# Shuffling and splitting data to train and val\n",
|
| 342 |
+
"x1_shf, x2_shf, y_hrz_shf, y_vrt_shf = shuffle(x1_nrm, x2_nrm, y_load[:, 0], y_load[:, 1])\n",
|
| 343 |
+
"\n",
|
| 344 |
+
"n_train = int(r_train * n_smp)\n",
|
| 345 |
+
"n_val = n_smp - n_train\n",
|
| 346 |
+
"x1_train, x2_train = x1_shf[:n_train], x2_shf[:n_train]\n",
|
| 347 |
+
"x1_val, x2_val = x1_shf[n_train:], x2_shf[n_train:]\n",
|
| 348 |
+
"y_hrz_train, y_vrt_train = y_hrz_shf[:n_train], y_vrt_shf[:n_train]\n",
|
| 349 |
+
"y_hrz_val, y_vrt_val = y_hrz_shf[n_train:], y_vrt_shf[n_train:]\n",
|
| 350 |
+
"\n",
|
| 351 |
+
"x_train = [x1_train, x2_train]\n",
|
| 352 |
+
"x_val = [x1_val, x2_val]\n",
|
| 353 |
+
"\n",
|
| 354 |
+
"print(x1_train.shape, x1_val.shape, y_hrz_train.shape, y_hrz_val.shape,\n",
|
| 355 |
+
" x2_train.shape, x2_val.shape, y_vrt_train.shape, y_vrt_val.shape)"
|
| 356 |
+
]
|
| 357 |
+
},
|
| 358 |
+
{
|
| 359 |
+
"cell_type": "code",
|
| 360 |
+
"execution_count": 13,
|
| 361 |
+
"metadata": {
|
| 362 |
+
"id": "PqdkxbYFcpuf"
|
| 363 |
+
},
|
| 364 |
+
"outputs": [],
|
| 365 |
+
"source": [
|
| 366 |
+
"# Callback for training\n",
|
| 367 |
+
"cb = EarlyStopping(patience=patience, verbose=1, restore_best_weights=True)"
|
| 368 |
+
]
|
| 369 |
+
},
|
| 370 |
+
{
|
| 371 |
+
"cell_type": "code",
|
| 372 |
+
"execution_count": 14,
|
| 373 |
+
"metadata": {
|
| 374 |
+
"colab": {
|
| 375 |
+
"base_uri": "https://localhost:8080/"
|
| 376 |
+
},
|
| 377 |
+
"id": "dpT4PjQUcprr",
|
| 378 |
+
"outputId": "a3aa1dfd-0002-41de-a4b3-9719987a58ea"
|
| 379 |
+
},
|
| 380 |
+
"outputs": [
|
| 381 |
+
{
|
| 382 |
+
"name": "stdout",
|
| 383 |
+
"output_type": "stream",
|
| 384 |
+
"text": [
|
| 385 |
+
"Loading public eye_tracking models...\n",
|
| 386 |
+
"Model: \"model_5\"\n",
|
| 387 |
+
"__________________________________________________________________________________________________\n",
|
| 388 |
+
"Layer (type) Output Shape Param # Connected to \n",
|
| 389 |
+
"==================================================================================================\n",
|
| 390 |
+
"input_11 (InputLayer) [(None, 100, 100, 1) 0 \n",
|
| 391 |
+
"__________________________________________________________________________________________________\n",
|
| 392 |
+
"conv2d_18 (Conv2D) (None, 100, 100, 16) 800 input_11[0][0] \n",
|
| 393 |
+
"__________________________________________________________________________________________________\n",
|
| 394 |
+
"max_pooling2d_14 (MaxPooling2D) (None, 50, 50, 16) 0 conv2d_18[0][0] \n",
|
| 395 |
+
"__________________________________________________________________________________________________\n",
|
| 396 |
+
"conv2d_19 (Conv2D) (None, 50, 50, 32) 12832 max_pooling2d_14[0][0] \n",
|
| 397 |
+
"__________________________________________________________________________________________________\n",
|
| 398 |
+
"max_pooling2d_15 (MaxPooling2D) (None, 25, 25, 32) 0 conv2d_19[0][0] \n",
|
| 399 |
+
"__________________________________________________________________________________________________\n",
|
| 400 |
+
"conv2d_20 (Conv2D) (None, 25, 25, 64) 18496 max_pooling2d_15[0][0] \n",
|
| 401 |
+
"__________________________________________________________________________________________________\n",
|
| 402 |
+
"max_pooling2d_16 (MaxPooling2D) (None, 12, 12, 64) 0 conv2d_20[0][0] \n",
|
| 403 |
+
"__________________________________________________________________________________________________\n",
|
| 404 |
+
"flatten_5 (Flatten) (None, 9216) 0 max_pooling2d_16[0][0] \n",
|
| 405 |
+
"__________________________________________________________________________________________________\n",
|
| 406 |
+
"input_12 (InputLayer) [(None, 7)] 0 \n",
|
| 407 |
+
"__________________________________________________________________________________________________\n",
|
| 408 |
+
"concatenate_5 (Concatenate) (None, 9223) 0 flatten_5[0][0] \n",
|
| 409 |
+
" input_12[0][0] \n",
|
| 410 |
+
"__________________________________________________________________________________________________\n",
|
| 411 |
+
"dense_25 (Dense) (None, 256) 2361344 concatenate_5[0][0] \n",
|
| 412 |
+
"__________________________________________________________________________________________________\n",
|
| 413 |
+
"dense_26 (Dense) (None, 128) 32896 dense_25[0][0] \n",
|
| 414 |
+
"__________________________________________________________________________________________________\n",
|
| 415 |
+
"dense_27 (Dense) (None, 32) 4128 dense_26[0][0] \n",
|
| 416 |
+
"__________________________________________________________________________________________________\n",
|
| 417 |
+
"dense_28 (Dense) (None, 8) 264 dense_27[0][0] \n",
|
| 418 |
+
"__________________________________________________________________________________________________\n",
|
| 419 |
+
"dense_29 (Dense) (None, 1) 9 dense_28[0][0] \n",
|
| 420 |
+
"==================================================================================================\n",
|
| 421 |
+
"Total params: 2,430,769\n",
|
| 422 |
+
"Trainable params: 2,430,769\n",
|
| 423 |
+
"Non-trainable params: 0\n",
|
| 424 |
+
"__________________________________________________________________________________________________\n",
|
| 425 |
+
"None\n"
|
| 426 |
+
]
|
| 427 |
+
}
|
| 428 |
+
],
|
| 429 |
+
"source": [
|
| 430 |
+
"print(\"Loading public eye_tracking models...\")\n",
|
| 431 |
+
"public_model_et_dir = trained_dir + f\"model{model_num}\"\n",
|
| 432 |
+
"model_hrz = load_model(public_model_et_dir + \"-hrz.h5\")\n",
|
| 433 |
+
"model_vrt = load_model(public_model_et_dir + \"-vrt.h5\")\n",
|
| 434 |
+
"print(model_hrz.summary())"
|
| 435 |
+
]
|
| 436 |
+
},
|
| 437 |
+
{
|
| 438 |
+
"cell_type": "code",
|
| 439 |
+
"execution_count": 15,
|
| 440 |
+
"metadata": {
|
| 441 |
+
"colab": {
|
| 442 |
+
"base_uri": "https://localhost:8080/"
|
| 443 |
+
},
|
| 444 |
+
"id": "lgxSezv4jY70",
|
| 445 |
+
"outputId": "5c8f1c31-0c46-4388-c243-de3247676f9d"
|
| 446 |
+
},
|
| 447 |
+
"outputs": [
|
| 448 |
+
{
|
| 449 |
+
"name": "stdout",
|
| 450 |
+
"output_type": "stream",
|
| 451 |
+
"text": [
|
| 452 |
+
"Model: \"model_5\"\n",
|
| 453 |
+
"__________________________________________________________________________________________________\n",
|
| 454 |
+
"Layer (type) Output Shape Param # Connected to \n",
|
| 455 |
+
"==================================================================================================\n",
|
| 456 |
+
"input_11 (InputLayer) [(None, 100, 100, 1) 0 \n",
|
| 457 |
+
"__________________________________________________________________________________________________\n",
|
| 458 |
+
"conv2d_18 (Conv2D) (None, 100, 100, 16) 800 input_11[0][0] \n",
|
| 459 |
+
"__________________________________________________________________________________________________\n",
|
| 460 |
+
"max_pooling2d_14 (MaxPooling2D) (None, 50, 50, 16) 0 conv2d_18[0][0] \n",
|
| 461 |
+
"__________________________________________________________________________________________________\n",
|
| 462 |
+
"conv2d_19 (Conv2D) (None, 50, 50, 32) 12832 max_pooling2d_14[0][0] \n",
|
| 463 |
+
"__________________________________________________________________________________________________\n",
|
| 464 |
+
"max_pooling2d_15 (MaxPooling2D) (None, 25, 25, 32) 0 conv2d_19[0][0] \n",
|
| 465 |
+
"__________________________________________________________________________________________________\n",
|
| 466 |
+
"conv2d_20 (Conv2D) (None, 25, 25, 64) 18496 max_pooling2d_15[0][0] \n",
|
| 467 |
+
"__________________________________________________________________________________________________\n",
|
| 468 |
+
"max_pooling2d_16 (MaxPooling2D) (None, 12, 12, 64) 0 conv2d_20[0][0] \n",
|
| 469 |
+
"__________________________________________________________________________________________________\n",
|
| 470 |
+
"flatten_5 (Flatten) (None, 9216) 0 max_pooling2d_16[0][0] \n",
|
| 471 |
+
"__________________________________________________________________________________________________\n",
|
| 472 |
+
"input_12 (InputLayer) [(None, 7)] 0 \n",
|
| 473 |
+
"__________________________________________________________________________________________________\n",
|
| 474 |
+
"concatenate_5 (Concatenate) (None, 9223) 0 flatten_5[0][0] \n",
|
| 475 |
+
" input_12[0][0] \n",
|
| 476 |
+
"__________________________________________________________________________________________________\n",
|
| 477 |
+
"dense_25 (Dense) (None, 256) 2361344 concatenate_5[0][0] \n",
|
| 478 |
+
"__________________________________________________________________________________________________\n",
|
| 479 |
+
"dense_26 (Dense) (None, 128) 32896 dense_25[0][0] \n",
|
| 480 |
+
"__________________________________________________________________________________________________\n",
|
| 481 |
+
"dense_27 (Dense) (None, 32) 4128 dense_26[0][0] \n",
|
| 482 |
+
"__________________________________________________________________________________________________\n",
|
| 483 |
+
"dense_28 (Dense) (None, 8) 264 dense_27[0][0] \n",
|
| 484 |
+
"__________________________________________________________________________________________________\n",
|
| 485 |
+
"dense_29 (Dense) (None, 1) 9 dense_28[0][0] \n",
|
| 486 |
+
"==================================================================================================\n",
|
| 487 |
+
"Total params: 2,430,769\n",
|
| 488 |
+
"Trainable params: 273\n",
|
| 489 |
+
"Non-trainable params: 2,430,496\n",
|
| 490 |
+
"__________________________________________________________________________________________________\n",
|
| 491 |
+
"None\n"
|
| 492 |
+
]
|
| 493 |
+
}
|
| 494 |
+
],
|
| 495 |
+
"source": [
|
| 496 |
+
"for (layer_hrz, layer_vrt) in zip(model_hrz.layers[:-trainable_layers], model_vrt.layers[:-trainable_layers]):\n",
|
| 497 |
+
" layer_hrz.trainable = False\n",
|
| 498 |
+
" layer_vrt.trainable = False\n",
|
| 499 |
+
"\n",
|
| 500 |
+
"print(model_hrz.summary())"
|
| 501 |
+
]
|
| 502 |
+
},
|
| 503 |
+
{
|
| 504 |
+
"cell_type": "code",
|
| 505 |
+
"execution_count": 16,
|
| 506 |
+
"metadata": {
|
| 507 |
+
"colab": {
|
| 508 |
+
"base_uri": "https://localhost:8080/"
|
| 509 |
+
},
|
| 510 |
+
"id": "diMNdUZZjeLQ",
|
| 511 |
+
"outputId": "282ee5b4-fde4-49ee-c57c-3fc9ae03c483"
|
| 512 |
+
},
|
| 513 |
+
"outputs": [
|
| 514 |
+
{
|
| 515 |
+
"name": "stdout",
|
| 516 |
+
"output_type": "stream",
|
| 517 |
+
"text": [
|
| 518 |
+
"\n",
|
| 519 |
+
"Start of training for model-hrz (x-pixels)\n",
|
| 520 |
+
"Epoch 1/40\n",
|
| 521 |
+
"112/112 [==============================] - 8s 42ms/step - loss: 84873.1875 - val_loss: 83177.8281\n",
|
| 522 |
+
"Epoch 2/40\n",
|
| 523 |
+
"112/112 [==============================] - 4s 35ms/step - loss: 84873.2031 - val_loss: 83178.6172\n",
|
| 524 |
+
"Epoch 3/40\n",
|
| 525 |
+
" 19/112 [====>.........................] - ETA: 3s - loss: 81323.5703"
|
| 526 |
+
]
|
| 527 |
+
},
|
| 528 |
+
{
|
| 529 |
+
"ename": "KeyboardInterrupt",
|
| 530 |
+
"evalue": "",
|
| 531 |
+
"output_type": "error",
|
| 532 |
+
"traceback": [
|
| 533 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
| 534 |
+
"\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
|
| 535 |
+
"\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_13436/1078372996.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"\\nStart of training for model-hrz (x-pixels)\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m results_hrz = model_hrz.fit(x_train,\n\u001b[0m\u001b[0;32m 3\u001b[0m \u001b[0my_hrz_train\u001b[0m \u001b[1;33m*\u001b[0m \u001b[0my_scale\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx_val\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_hrz_val\u001b[0m \u001b[1;33m*\u001b[0m \u001b[0my_scale\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mn_epochs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 536 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1181\u001b[0m _r=1):\n\u001b[0;32m 1182\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1183\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1184\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1185\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 537 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 887\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 888\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mOptionalXlaContext\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_jit_compile\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 889\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 890\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 891\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 538 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 922\u001b[0m \u001b[1;31m# In this case we have not created variables on the first call. So we can\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 923\u001b[0m \u001b[1;31m# run the first trace but we should fail if variables are created.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 924\u001b[1;33m \u001b[0mresults\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_stateful_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 925\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_created_variables\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 926\u001b[0m raise ValueError(\"Creating variables on a non-first call to a function\"\n",
|
| 539 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 3021\u001b[0m (graph_function,\n\u001b[0;32m 3022\u001b[0m filtered_flat_args) = self._maybe_define_function(args, kwargs)\n\u001b[1;32m-> 3023\u001b[1;33m return graph_function._call_flat(\n\u001b[0m\u001b[0;32m 3024\u001b[0m filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access\n\u001b[0;32m 3025\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 540 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[1;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[0;32m 1958\u001b[0m and executing_eagerly):\n\u001b[0;32m 1959\u001b[0m \u001b[1;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1960\u001b[1;33m return self._build_call_outputs(self._inference_function.call(\n\u001b[0m\u001b[0;32m 1961\u001b[0m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[0;32m 1962\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n",
|
| 541 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36mcall\u001b[1;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[0;32m 589\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0m_InterpolateFunctionError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 590\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mcancellation_manager\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 591\u001b[1;33m outputs = execute.execute(\n\u001b[0m\u001b[0;32m 592\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msignature\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 593\u001b[0m \u001b[0mnum_outputs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_num_outputs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 542 |
+
"\u001b[1;32mF:\\Education_Study\\University\\MSC\\Thesis\\EyeTracker\\venv\\lib\\site-packages\\tensorflow\\python\\eager\\execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[1;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 59\u001b[1;33m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0m\u001b[0;32m 60\u001b[0m inputs, attrs, num_outputs)\n\u001b[0;32m 61\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 543 |
+
"\u001b[1;31mKeyboardInterrupt\u001b[0m: "
|
| 544 |
+
]
|
| 545 |
+
}
|
| 546 |
+
],
|
| 547 |
+
"source": [
|
| 548 |
+
"print(\"\\nStart of training for model-hrz (x-pixels)\")\n",
|
| 549 |
+
"results_hrz = model_hrz.fit(x_train,\n",
|
| 550 |
+
" y_hrz_train * y_scale,\n",
|
| 551 |
+
" validation_data=(x_val, y_hrz_val * y_scale),\n",
|
| 552 |
+
" epochs=n_epochs,\n",
|
| 553 |
+
" callbacks=cb)\n",
|
| 554 |
+
"print(\"End of training\")"
|
| 555 |
+
]
|
| 556 |
+
},
|
| 557 |
+
{
|
| 558 |
+
"cell_type": "code",
|
| 559 |
+
"execution_count": null,
|
| 560 |
+
"metadata": {
|
| 561 |
+
"colab": {
|
| 562 |
+
"base_uri": "https://localhost:8080/"
|
| 563 |
+
},
|
| 564 |
+
"id": "vQz7jjQ5CTS8",
|
| 565 |
+
"outputId": "f9c25499-ae97-4d66-8bfd-d5e13d35585d"
|
| 566 |
+
},
|
| 567 |
+
"outputs": [],
|
| 568 |
+
"source": [
|
| 569 |
+
"print(\"\\nStart of training for model-vrt (y-pixels)\")\n",
|
| 570 |
+
"results_vrt = model_vrt.fit(x_train,\n",
|
| 571 |
+
" y_vrt_train * y_scale,\n",
|
| 572 |
+
" validation_data=(x_val, y_vrt_val * y_scale),\n",
|
| 573 |
+
" epochs=n_epochs,\n",
|
| 574 |
+
" callbacks=cb)\n",
|
| 575 |
+
"print(\"End of training\")"
|
| 576 |
+
]
|
| 577 |
+
},
|
| 578 |
+
{
|
| 579 |
+
"cell_type": "code",
|
| 580 |
+
"execution_count": null,
|
| 581 |
+
"metadata": {
|
| 582 |
+
"colab": {
|
| 583 |
+
"base_uri": "https://localhost:8080/"
|
| 584 |
+
},
|
| 585 |
+
"id": "DCu14ufb8oFN",
|
| 586 |
+
"outputId": "55beefd3-f9e9-4338-8b26-4486771d830e"
|
| 587 |
+
},
|
| 588 |
+
"outputs": [],
|
| 589 |
+
"source": [
|
| 590 |
+
"# print(\"\\nSaving models...\")\n",
|
| 591 |
+
"# model_hrz.save(sbj_dir + \"model-et-hrz.h5\")\n",
|
| 592 |
+
"# model_vrt.save(sbj_dir + \"model-et-vrt.h5\")"
|
| 593 |
+
]
|
| 594 |
+
},
|
| 595 |
+
{
|
| 596 |
+
"cell_type": "code",
|
| 597 |
+
"execution_count": null,
|
| 598 |
+
"metadata": {
|
| 599 |
+
"id": "e9L074ihCXD6"
|
| 600 |
+
},
|
| 601 |
+
"outputs": [],
|
| 602 |
+
"source": [
|
| 603 |
+
"# Predicting outputs for train and val data\n",
|
| 604 |
+
"y_hrz_prd_train = model_hrz.predict(x_train).reshape((n_train,)) / y_scale\n",
|
| 605 |
+
"y_hrz_prd_val = model_hrz.predict(x_val).reshape((n_val,)) / y_scale\n",
|
| 606 |
+
"y_vrt_prd_train = model_vrt.predict(x_train).reshape((n_train,)) / y_scale\n",
|
| 607 |
+
"y_vrt_prd_val = model_vrt.predict(x_val).reshape((n_val,)) / y_scale"
|
| 608 |
+
]
|
| 609 |
+
},
|
| 610 |
+
{
|
| 611 |
+
"cell_type": "code",
|
| 612 |
+
"execution_count": null,
|
| 613 |
+
"metadata": {},
|
| 614 |
+
"outputs": [],
|
| 615 |
+
"source": [
|
| 616 |
+
"hrz_train_eval = model_hrz.evaluate(x_train, y_hrz_train * y_scale)\n",
|
| 617 |
+
"hrz_val_eval = model_hrz.evaluate(x_val, y_hrz_val * y_scale)\n",
|
| 618 |
+
"vrt_train_eval = model_vrt.evaluate(x_train, y_vrt_train * y_scale)\n",
|
| 619 |
+
"vrt_val_eval = model_vrt.evaluate(x_val, y_vrt_val * y_scale)\n",
|
| 620 |
+
"\n",
|
| 621 |
+
"print(hrz_train_eval, hrz_val_eval, vrt_train_eval, vrt_val_eval)"
|
| 622 |
+
]
|
| 623 |
+
},
|
| 624 |
+
{
|
| 625 |
+
"cell_type": "code",
|
| 626 |
+
"execution_count": null,
|
| 627 |
+
"metadata": {},
|
| 628 |
+
"outputs": [],
|
| 629 |
+
"source": [
|
| 630 |
+
"min_out_ratio = 0.005\n",
|
| 631 |
+
"max_out_ratio = 0.995\n",
|
| 632 |
+
"\n",
|
| 633 |
+
"y_hrz_prd_train[y_hrz_prd_train < min_out_ratio] = min_out_ratio\n",
|
| 634 |
+
"y_hrz_prd_val[y_hrz_prd_val < min_out_ratio] = min_out_ratio\n",
|
| 635 |
+
"y_vrt_prd_train[y_vrt_prd_train < min_out_ratio] = min_out_ratio\n",
|
| 636 |
+
"y_vrt_prd_val[y_vrt_prd_val < min_out_ratio] = min_out_ratio\n",
|
| 637 |
+
"\n",
|
| 638 |
+
"y_hrz_prd_train[y_hrz_prd_train > max_out_ratio] = max_out_ratio\n",
|
| 639 |
+
"y_hrz_prd_val[y_hrz_prd_val > max_out_ratio] = max_out_ratio\n",
|
| 640 |
+
"y_vrt_prd_train[y_vrt_prd_train > max_out_ratio] = max_out_ratio\n",
|
| 641 |
+
"y_vrt_prd_val[y_vrt_prd_val > max_out_ratio] = max_out_ratio"
|
| 642 |
+
]
|
| 643 |
+
},
|
| 644 |
+
{
|
| 645 |
+
"cell_type": "code",
|
| 646 |
+
"execution_count": null,
|
| 647 |
+
"metadata": {
|
| 648 |
+
"colab": {
|
| 649 |
+
"base_uri": "https://localhost:8080/",
|
| 650 |
+
"height": 338
|
| 651 |
+
},
|
| 652 |
+
"id": "FDC8rCcVjjDa",
|
| 653 |
+
"outputId": "48c3b4f3-1946-4554-b0e9-6ae7c0ca6120"
|
| 654 |
+
},
|
| 655 |
+
"outputs": [],
|
| 656 |
+
"source": [
|
| 657 |
+
"# Displaying data\n",
|
| 658 |
+
"smp_num = 0\n",
|
| 659 |
+
"print(\"Train\")\n",
|
| 660 |
+
"sample_train = (int(y_hrz_train[smp_num] * y_scale),\n",
|
| 661 |
+
" int(y_vrt_train[smp_num] * y_scale))\n",
|
| 662 |
+
"sample_prd_train = (int(y_hrz_prd_train[smp_num] * y_scale),\n",
|
| 663 |
+
" int(y_vrt_prd_train[smp_num] * y_scale))\n",
|
| 664 |
+
"print(sample_train)\n",
|
| 665 |
+
"print(sample_prd_train)\n",
|
| 666 |
+
"\n",
|
| 667 |
+
"print(\"Validation\")\n",
|
| 668 |
+
"sample_val = (int(y_hrz_val[smp_num] * y_scale),\n",
|
| 669 |
+
" int(y_vrt_val[smp_num] * y_scale))\n",
|
| 670 |
+
"sample_prd_val = (int(y_hrz_prd_val[smp_num] * y_scale),\n",
|
| 671 |
+
" int(y_vrt_prd_val[smp_num] * y_scale))\n",
|
| 672 |
+
"print(sample_val)\n",
|
| 673 |
+
"print(sample_prd_val)\n",
|
| 674 |
+
"\n",
|
| 675 |
+
"_, ax = plt.subplots(1, 2)\n",
|
| 676 |
+
"ax[0].imshow((x1_train[smp_num] * x1_scaler).astype(np.uint8).\n",
|
| 677 |
+
" reshape((frame_h, frame_w)), cmap=\"gray\", vmin=0, vmax=255)\n",
|
| 678 |
+
"ax[1].imshow((x1_val[smp_num] * x1_scaler).astype(np.uint8).\n",
|
| 679 |
+
" reshape((frame_h, frame_w)), cmap=\"gray\", vmin=0, vmax=255)"
|
| 680 |
+
]
|
| 681 |
+
},
|
| 682 |
+
{
|
| 683 |
+
"cell_type": "code",
|
| 684 |
+
"execution_count": null,
|
| 685 |
+
"metadata": {},
|
| 686 |
+
"outputs": [],
|
| 687 |
+
"source": [
|
| 688 |
+
"sbj_test_dir = path2root + subjects_dir + f\"{sbj_num_test}/\"\n",
|
| 689 |
+
"data_et_test_dir = sbj_test_dir + data_et_fol\n",
|
| 690 |
+
"print(f\"\\nLoading subject data in {data_et_test_dir}\")\n",
|
| 691 |
+
"with open(data_et_test_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 692 |
+
" x1_load_test = pickle.load(f)\n",
|
| 693 |
+
"with open(data_et_test_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 694 |
+
" x2_load_test = pickle.load(f)\n",
|
| 695 |
+
"with open(data_et_test_dir + \"y.pickle\", \"rb\") as f:\n",
|
| 696 |
+
" y_load_test = pickle.load(f)\n",
|
| 697 |
+
"n_smp_test= x1_load_test.shape[0]\n",
|
| 698 |
+
"print(f\"Test samples number: {n_smp_test}\")"
|
| 699 |
+
]
|
| 700 |
+
},
|
| 701 |
+
{
|
| 702 |
+
"cell_type": "code",
|
| 703 |
+
"execution_count": null,
|
| 704 |
+
"metadata": {},
|
| 705 |
+
"outputs": [],
|
| 706 |
+
"source": [
|
| 707 |
+
"# Displaying data\n",
|
| 708 |
+
"smp_num = 0\n",
|
| 709 |
+
"print(x2_load_test[smp_num])\n",
|
| 710 |
+
"print(y_load_test[smp_num])\n",
|
| 711 |
+
"plt.imshow(x1_load_test[smp_num].reshape((frame_h, frame_w)),\n",
|
| 712 |
+
" cmap=\"gray\", vmin=0, vmax=255)\n",
|
| 713 |
+
"plt.show()"
|
| 714 |
+
]
|
| 715 |
+
},
|
| 716 |
+
{
|
| 717 |
+
"cell_type": "code",
|
| 718 |
+
"execution_count": null,
|
| 719 |
+
"metadata": {},
|
| 720 |
+
"outputs": [],
|
| 721 |
+
"source": [
|
| 722 |
+
"x2_chs_inp_test = x2_load_test[:, chosen_inputs]\n",
|
| 723 |
+
"\n",
|
| 724 |
+
"x1_test = x1_load_test / x1_scaler\n",
|
| 725 |
+
"x2_test = x2_scaler.transform(x2_chs_inp_test)\n",
|
| 726 |
+
"\n",
|
| 727 |
+
"x_test = [x1_test, x2_test]\n",
|
| 728 |
+
"\n",
|
| 729 |
+
"# Predicting outputs for train and test data\n",
|
| 730 |
+
"y_hrz_test_prd = model_hrz.predict(x_test).reshape((n_smp_test,)) / y_scale\n",
|
| 731 |
+
"y_vrt_test_prd = model_vrt.predict(x_test).reshape((n_smp_test,)) / y_scale\n",
|
| 732 |
+
"\n",
|
| 733 |
+
"hrz_test_eval = model_hrz.evaluate(x_test, y_load_test[:, 0] * y_scale)\n",
|
| 734 |
+
"vrt_test_eval = model_vrt.evaluate(x_test, y_load_test[:, 1] * y_scale)\n",
|
| 735 |
+
"\n",
|
| 736 |
+
"print(hrz_test_eval, vrt_test_eval)\n",
|
| 737 |
+
"\n",
|
| 738 |
+
"min_out_ratio = 0.005\n",
|
| 739 |
+
"max_out_ratio = 0.995\n",
|
| 740 |
+
"\n",
|
| 741 |
+
"y_hrz_test_prd[y_hrz_test_prd < min_out_ratio] = min_out_ratio\n",
|
| 742 |
+
"y_vrt_test_prd[y_vrt_test_prd < min_out_ratio] = min_out_ratio"
|
| 743 |
+
]
|
| 744 |
+
},
|
| 745 |
+
{
|
| 746 |
+
"cell_type": "code",
|
| 747 |
+
"execution_count": null,
|
| 748 |
+
"metadata": {},
|
| 749 |
+
"outputs": [],
|
| 750 |
+
"source": [
|
| 751 |
+
"smp_num = 8000\n",
|
| 752 |
+
"print(\"Test\")\n",
|
| 753 |
+
"sample_test = (int(y_load_test[smp_num, 0] * y_scale),\n",
|
| 754 |
+
" int(y_load_test[smp_num, 1] * y_scale))\n",
|
| 755 |
+
"sample_test_prd = (int(y_hrz_test_prd[smp_num] * y_scale),\n",
|
| 756 |
+
" int(y_vrt_test_prd[smp_num] * y_scale))\n",
|
| 757 |
+
"print(sample_test)\n",
|
| 758 |
+
"print(sample_test_prd)\n",
|
| 759 |
+
"\n",
|
| 760 |
+
"plt.imshow((x1_test[smp_num] * x1_scaler).astype(np.uint8).\n",
|
| 761 |
+
" reshape((frame_h, frame_w)), cmap=\"gray\", vmin=0, vmax=255)"
|
| 762 |
+
]
|
| 763 |
+
}
|
| 764 |
+
],
|
| 765 |
+
"metadata": {
|
| 766 |
+
"colab": {
|
| 767 |
+
"collapsed_sections": [],
|
| 768 |
+
"name": "rtn_2mdl_et_35p_predict_tst.ipynb",
|
| 769 |
+
"provenance": []
|
| 770 |
+
},
|
| 771 |
+
"kernelspec": {
|
| 772 |
+
"display_name": "venv",
|
| 773 |
+
"language": "python",
|
| 774 |
+
"name": "venv"
|
| 775 |
+
},
|
| 776 |
+
"language_info": {
|
| 777 |
+
"codemirror_mode": {
|
| 778 |
+
"name": "ipython",
|
| 779 |
+
"version": 3
|
| 780 |
+
},
|
| 781 |
+
"file_extension": ".py",
|
| 782 |
+
"mimetype": "text/x-python",
|
| 783 |
+
"name": "python",
|
| 784 |
+
"nbconvert_exporter": "python",
|
| 785 |
+
"pygments_lexer": "ipython3",
|
| 786 |
+
"version": "3.9.6"
|
| 787 |
+
}
|
| 788 |
+
},
|
| 789 |
+
"nbformat": 4,
|
| 790 |
+
"nbformat_minor": 1
|
| 791 |
+
}
|
codes/jupyter_notebook/rtn_mdl_boi.ipynb
ADDED
|
@@ -0,0 +1,505 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"id": "370e2b14",
|
| 7 |
+
"metadata": {},
|
| 8 |
+
"outputs": [],
|
| 9 |
+
"source": [
|
| 10 |
+
"from tensorflow.keras.callbacks import EarlyStopping\n",
|
| 11 |
+
"from tensorflow.keras.models import load_model\n",
|
| 12 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 13 |
+
"from sklearn.utils import shuffle\n",
|
| 14 |
+
"from joblib import load as j_load\n",
|
| 15 |
+
"from joblib import dump as j_dump\n",
|
| 16 |
+
"import pickle\n",
|
| 17 |
+
"import tuning_parameters as tp\n",
|
| 18 |
+
"import numpy as np\n",
|
| 19 |
+
"import os"
|
| 20 |
+
]
|
| 21 |
+
},
|
| 22 |
+
{
|
| 23 |
+
"cell_type": "code",
|
| 24 |
+
"execution_count": 2,
|
| 25 |
+
"id": "f1344641",
|
| 26 |
+
"metadata": {},
|
| 27 |
+
"outputs": [],
|
| 28 |
+
"source": [
|
| 29 |
+
"path2root = \"../\"\n",
|
| 30 |
+
"subjects_fol = \"subjects/\"\n",
|
| 31 |
+
"models_fol = \"models/\"\n",
|
| 32 |
+
"models_boi_fol = \"boi/\"\n",
|
| 33 |
+
"trained_fol = \"trained/\"\n",
|
| 34 |
+
"data_boi_fol = \"data-boi/\"\n",
|
| 35 |
+
"r_train = 0.8\n",
|
| 36 |
+
"n_epochs = 2\n",
|
| 37 |
+
"patience = 1\n",
|
| 38 |
+
"trainable_layers = 1\n",
|
| 39 |
+
"chosen_inputs = [0, 1, 2, 6, 7, 8, 9]"
|
| 40 |
+
]
|
| 41 |
+
},
|
| 42 |
+
{
|
| 43 |
+
"cell_type": "code",
|
| 44 |
+
"execution_count": 3,
|
| 45 |
+
"id": "5c062b07",
|
| 46 |
+
"metadata": {},
|
| 47 |
+
"outputs": [],
|
| 48 |
+
"source": [
|
| 49 |
+
"trained_dir = path2root + models_fol + models_boi_fol + trained_fol\n",
|
| 50 |
+
"public_model_dir = trained_dir + f\"model{tp.MODEL_BLINK_OUT_IN_NUM}.h5\"\n",
|
| 51 |
+
"public_scalers_dir = trained_dir + f\"scalers{tp.MODEL_BLINK_OUT_IN_NUM}.bin\"\n",
|
| 52 |
+
"sbj_dir = path2root + subjects_fol + f\"{tp.NUMBER}/\""
|
| 53 |
+
]
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"cell_type": "code",
|
| 57 |
+
"execution_count": 4,
|
| 58 |
+
"id": "43563004",
|
| 59 |
+
"metadata": {},
|
| 60 |
+
"outputs": [
|
| 61 |
+
{
|
| 62 |
+
"name": "stdout",
|
| 63 |
+
"output_type": "stream",
|
| 64 |
+
"text": [
|
| 65 |
+
"\n",
|
| 66 |
+
"Loading subject data in in_blink_out folder...\n",
|
| 67 |
+
"Sapmles number: 60\n"
|
| 68 |
+
]
|
| 69 |
+
}
|
| 70 |
+
],
|
| 71 |
+
"source": [
|
| 72 |
+
"print(\"\\nLoading subject data in in_blink_out folder...\")\n",
|
| 73 |
+
"data_boi_dir = sbj_dir + data_boi_fol\n",
|
| 74 |
+
"with open(data_boi_dir + \"x1.pickle\", \"rb\") as f:\n",
|
| 75 |
+
" x1_load = pickle.load(f)\n",
|
| 76 |
+
"with open(data_boi_dir + \"x2.pickle\", \"rb\") as f:\n",
|
| 77 |
+
" x2_load = pickle.load(f)\n",
|
| 78 |
+
"with open(data_boi_dir + \"y.pickle\", \"rb\") as f:\n",
|
| 79 |
+
" y_load = pickle.load(f)\n",
|
| 80 |
+
"n_smp, frame_h, frame_w = x1_load.shape[:-1]\n",
|
| 81 |
+
"print(f\"Sapmles number: {n_smp}\")"
|
| 82 |
+
]
|
| 83 |
+
},
|
| 84 |
+
{
|
| 85 |
+
"cell_type": "code",
|
| 86 |
+
"execution_count": 5,
|
| 87 |
+
"id": "b7c5f075",
|
| 88 |
+
"metadata": {},
|
| 89 |
+
"outputs": [],
|
| 90 |
+
"source": [
|
| 91 |
+
"import matplotlib.pyplot as plt"
|
| 92 |
+
]
|
| 93 |
+
},
|
| 94 |
+
{
|
| 95 |
+
"cell_type": "code",
|
| 96 |
+
"execution_count": 6,
|
| 97 |
+
"id": "227878ef",
|
| 98 |
+
"metadata": {},
|
| 99 |
+
"outputs": [
|
| 100 |
+
{
|
| 101 |
+
"name": "stdout",
|
| 102 |
+
"output_type": "stream",
|
| 103 |
+
"text": [
|
| 104 |
+
"[ 2.90088087 0.06944803 -0.15008799 3.02945633 -3.65793277 56.22252096\n",
|
| 105 |
+
" 0.45436421 0.47427553 0.46212459 0.46236947]\n",
|
| 106 |
+
"2.0\n"
|
| 107 |
+
]
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"data": {
|
| 111 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAOkAAAD6CAYAAABEdWDWAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAad0lEQVR4nO2dfahmV3XGn5WZyXyP8xUnYyYmKUpFsFEYUsX+IbGBNBX1Dyl+UKYQyD8tRLTopIVSoYX4jx/QYhmIOAUxahUSgkXSOFKEEh1NtPkgZhTEhEmmk8xkZjLJmMTVP+478b7rPPee55773vfdNz4/GOaefffZZ599zrrnPOusvXZkJowx7XLJrDtgjFkcG6kxjWMjNaZxbKTGNI6N1JjGsZEa0zjLMtKIuDEiHouIYxFxcFKdMsb8jhj6nTQi1gD4OYAbADwB4EcAPpKZjyy0z7p163L9+vW1nUW3GazPQ/cbAmtHabvWmVSfWTuXXLJyL0m1T8p4KNeMnUctY+eljKPC0PP47W9/29uf2u+1a9eObZ8/fx4XLlygJ7KWFYpcB+BYZv5y1LE7AXwAwIJGun79elx77bVjZbXz69at6z3wpIxUaadeAAD4zW9+0yl7+eWXe/erZWvWrFm4syNeeuml3j7WP3wAcOmll/YeSzn/V155pVOnnmvdBrr9Zu3UG7X2GejeD0odoHtfsfujnj8b61p24cKFTp1axv6QbNmyZWx7165dY9tHjhzp7PNqewv+pp8rAPx63vYTozJjzARZzpNUIiJuAXALwP8KGmMWZzlP0icBXDlve9+obIzMPJSZ+zNzv/Iqa4wZZzlP0h8BeHNEXIM54/wwgI/27VR1WX1/Z1pOcTAo7dQyVqfC9NaLL744kf1YndonpuWqlmIaqL61sD+QVScBwMaNG3vbrhq4aktWxvRePTem9ZVrxKj9Vhxpyj2j3HvsWNUnUMdnMZ/KYCPNzJcj4m8AfBfAGgBfzsyHh7ZnjOEsS5Nm5ncAfGdCfTHGEBxxZEzjrLh3d6mwb3fKO7+iN2sZO1bVic8//3ynDtOktS2mN+v3NKbThgQKMD3zwgsvjG2z76S1DgBs2rRpbHvz5s2dOlXLsu+0VQMz3Vo1qKLR2XdKRj3flQyCUAIu+sZjsb74SWpM49hIjWkcG6kxjWMjNaZxpu446vuoPFTMV2eKEgTA6lTHBHMSsTIloFypM2SmjBI8zmDBA/XclAD/rVu3duookwcU6hgxh6DigFOcOQrKuCrHqsEmdhwZs4qxkRrTODZSYxpnqpo0Ijrv632Bxxf3mw/TckqgghJwoExoViY5M+2kTAyvKB/hh2ZvYMevmpxdDyXAvgbqM+p+QyehM5TAeKVORemjMjHdwQzGvIawkRrTODZSYxrHRmpM48w8mEGZ4TIkFacyw4V9BK91lMyAQNcBxhwBdb+VTGGpzAJhY1QdHHVWDKvDxrHCZsoozhNlVpAC2686oYYGYNT9mOOIOddU/CQ1pnFspMY0jo3UmMaZejBD1TPKB+WqJ5SsC0pmBDbL//z582PbTG+xwOxapmjZoUsv1P2YlqoalGnCbdu2dcpq1gUWPF/3Y+d65syZsW2W4aLqNCXDg6qtFW2vZGassHbqebD7o14jJfjm1WP29soYM1NspMY0jo3UmMaxkRrTOFMPZuhbZmLo2p9KEIKSdaE6iphThs3wqM4D5riqMKdQPZ6yZCH7UF4/qLM+7969u1N21VVX9bZdHSPsPE6dOjW2ffLkyU6dmlKUXQ8le4IyjkpQiDK7ilHHaFJZKS7iJ6kxjWMjNaZxbKTGNM7Ml5lQlk1XPjoPyaigLGvAPrCzoPN6HmwJByWjQp9mZ8dn2rJqUGX5DqCr25kmrAEObDyqTlOCO9iY1WvNrsfQbImKP6ReD6Y36xixMVOWXlwIP0mNaRwbqTGNYyM1pnFspMY0ztQdR30feoem4qwCn81eqc4LJaMAm2XPHCWKg0FBcXhUx8SOHTs6depslrNnz3bqsLLq4GBOkLpmKVvDtI41q1NnHDHnEiursGtUYY6beo2Gro2rBDPYcWTMaxgbqTGN02ukEfHliDgREQ/NK9sZEfdGxOOj/7vvW8aYiaBo0q8A+BcA/z6v7CCA+zLz9og4ONr+dF9DbJmJIVkXlI/OQ5ce3LBhw9g201JMA1WdzHTJkEyASpY7Fphe22GZEWr2hIX6VKnnr+g9RdszH0FtW9WtVScO1YR9y6KwMuW6KkE8r9bt62Rm/jeAZ0vxBwAcHv18GMAH+9oxxgxjqCbdk5nHRz8/BWDPhPpjjCks23GUc+9jC343iIhbIuJoRBxV3OnGmHGGGunTEbEXAEb/n1ioYmYeysz9mblf+Z5ljBlnaDDD3QAOALh99P9dQzugLJmgpLlUqGKd/dGojiMlCwPQdfAoaT8ZSjDDuXPnxraPHTvWqVPHiDk86rkCXWcOS+lZnXQsNWp1pLExq8diDpc6M+a5557rrcPaUtc1rdRxZOehBK4oTsOFUD7BfA3A/wD4w4h4IiJuxpxx3hARjwP409G2MWYF6H2SZuZHFvjVeyfcF2MMwRFHxjTOVAPsM7OjDZR3c0VPDFkij2lE5UM908R1vxrgDnQDypmWq+e6kkHf27dv75Tt3bt3bJstRVH7xDRhPTcWpKL4GpSsi+zc6jgO8X0A3XFkfox6HykB9hPVpMaY2WIjNaZxbKTGNI6N1JjGmfr6pH3rNCpOIuZMUBwFVeArTgDFKbNQWUWZhVOdS4rDRclmwRxZl19+eads586dY9sshWbtt7KkB5vhUq8jq1PPn10zVlb7qNwzrJ16XZVrr9RZSoCOn6TGNI6N1JjGsZEa0zg2UmMaZ+oRR32RQUpqFOZMqfuxqA9F4E9qbUnmcKkzSpijpJ4Hq1OdDEraTZb2k0Uc1fNnaVeUNVzqfqxOTfvCHFBK2lUlVc2kIo6UVDFKihXF0fjqMeWaxpiZYCM1pnFspMY0zszXJ60ompSh6DRFO9R22EdwRUuytmuWBzbDpMJ0mpJhompSFszAxrWm+WR1lCCEqjdZ2tHaztDsCUPWIgW0bAmKlhyS9rPenw5mMGYVYyM1pnFspMY0jo3UmMaZuuNIWfulMiRdCHMmKB+0ldk0daYKOz5zXNV+s5SaFTY+iuNISenB1idl59YHc/jUMuU8hq7XoqSYGTqOShoaxbnUNwNrsXP3k9SYxrGRGtM4NlJjGmfqmrTvA7KiJdk7f9UcLAig1lECqlk7SupJFgShaLBaNvTjedU8LOBfCQJg1P3YePR9vGf7DQ1kUTJTsOuhrKFa+83OQ/FHLEWDVvwkNaZxbKTGNI6N1JjGsZEa0zjNBTMMDUJQ1uMckplBSQUJDPswz9pRZmYoH+H7UqeyYzEUZ86kAkcYQ4+lOJwUx5GSmUGpsxz8JDWmcWykxjSOjdSYxpl5MMNKtausEakECigBD6zeUrLBLXb8oWtdKuOsBJ0rKNkblSB4hjIhQwnKUHwUSmaGoffvcu57P0mNaRwbqTGNYyM1pnF6jTQiroyIIxHxSEQ8HBG3jsp3RsS9EfH46P9uinRjzLJRvBsvA/hkZv4kIrYC+HFE3AvgrwDcl5m3R8RBAAcBfHrluvo7lA/8jOoEGJrmkTkBhgQhKM4ExXHEmNQMFyV4gPVHCUIY0kd1nyEOOOW6sjqTWiplIXqvdmYez8yfjH4+C+BRAFcA+ACAw6NqhwF8UD6qMUZmSZo0Iq4G8A4A9wPYk5nHR796CsCeBfa5JSKORsRRZfEdY8w4spFGxBYA3wLw8cwcS3Oec893+h6SmYcyc39m7meTYY0xiyN9cY+IdZgz0K9m5rdHxU9HxN7MPB4RewGcmESH2Du/osGGBLgrmnBosLSyhKKidycVzKAsqcFYinZaKpPScor+H1qnMrSPfVkoF9Painc3ANwB4NHM/Ny8X90N4MDo5wMA7upryxizdJQn6bsB/CWA/42IB0dlfwfgdgDfiIibAfwKwF+sSA+N+T2n10gz8wcAFnoveu9ku2OMqTjiyJjGmeosmIjo/RA+dGaG4qgZMhOBtatkFGCOgEkFUyhLH6yk42hSziRlKYqhQRnKjKdaNjRTxZCZOks5Lz9JjWkcG6kxjWMjNaZxpp6ZYQhDljVgTCowXdF3SlCG0s7Q7BGTWlZQCZ6fVGaIocElQwM+hoyJEoTPGJIl8yJ+khrTODZSYxrHRmpM49hIjWmcqTqONm7ciLe97W1jZTW1/6ZNmzr71SluzFFQyxQHEKtTl5U4f/58p87Zs2c7Zc8//3xvndoWW/u0ws51w4YNY9vKMgvsWGfOnOmUXbhwYWxb+XjPgjtq2dAMD0pQwubNmztl9Z5RlpAYuhbskCU1lhJY4yepMY1jIzWmcWykxjSOjdSYxpmq4ygzOwK6CnGWB0kR+EMiXNjao9XB8sILL3TqKM4kVqc6T7Zs2dKpUx0c7Lyqc+fcuXO9/WGOI9bHWo85c2oflVkxzHGjOAQV2LkNSdc5qaikoeu1LoSfpMY0jo3UmMaxkRrTODNfn1RJbajMMBnyzs+SdVcNyoISWBDAiy++OLbNNNjGjRvHtpkGq3167rnnOnVqGdOkNbii6lhAC0JgKBkmlOASJVChbx+Aa+Lqb2D+ByUzw5CZU0P170L4SWpM49hIjWkcG6kxjWMjNaZxpp7Ss++DtbKOhpKugzkq6kfv6uwBuk4Y5jhi+1UnBJvNU+swZ86pU6fGtp955plOndqn6iRibbMP/pNKocmuRw1UYMdSglSqA47dP4rjSElNqqzzosw4GpqGdCH8JDWmcWykxjSOjdSYxlkVKT2VwPiqFZgGqVpSCVRgQehMO7Fg+b62n3322U6dGqjAtKSyPEMNimDtKFkXFA3GUPwINXBEmVyhpO9kZSupSZey1ugQ/CQ1pnFspMY0jo3UmMaxkRrTODPPzFBhAn/IDH42w6U6jtjsEVZW2bZtW6esptlkTqHjx4+PbZ8+fbpTp54bC4qoZcxxonzMVxwczEmnzHCp15kFgNRABebcqo4j5lxiZco9MinHUR1/pR2vT2rMawgbqTGN02ukEbEhIn4YET+NiIcj4jOj8msi4v6IOBYRX4+I7ixnY8yyUTTpBQDXZ+a5iFgH4AcR8Z8APgHg85l5Z0T8G4CbAXxpsYbWrFmDrVu3jpVVPcECBZRlDeo7P9NANZiAacIamL59+/ZOHaYTa2B81Z9AV+++7nWv69TZuXPn2DbTv3XMWPaGEydOjG2fPHmyU4ftV7U8ux6KThuypizLZlH1Hrv2bL++doDueTA/hrJe7BD9q2TAuEjvkzTnuHh3rRv9SwDXA/iPUflhAB+Uj2qMkZE0aUSsiYgHAZwAcC+AXwA4nZkXXX9PALhiRXpozO85kpFm5iuZ+XYA+wBcB+At6gEi4paIOBoRR1kcrDFmcZbk3c3M0wCOAHgXgO0RcfEFfR+AJxfY51Bm7s/M/UzLGWMWp9dxFBGXAXgpM09HxEYANwD4LOaM9UMA7gRwAMBdfW1dcsklnbSWygyCWsY+sNcytjyEkuZSWfaCpfSsjhp2/DpTZu/evZ06r3/968e263gB3XMduoYoc67V/RRnCqOOI3Ou1LFl64xWpxC79ixQQ0kXOmSZCeU8WB0lfehCKN7dvQAOR8QazD15v5GZ90TEIwDujIh/AvAAgDvkoxpjZHqNNDN/BuAdpPyXmNOnxpgVxBFHxjTOzDMzKO/mVU8xfaUsD1G9y0zLVT3BMvGxtmugAtOye/bsGdt+4xvf2KlTgyfYudbACaYt635Mk7FgihpswrR1PX+mEytKMMP69es7dapOZeeqZkJU+lSpmlQJ8FeCK5aCn6TGNI6N1JjGsZEa0zg2UmMaZ+qOo+qs6dsGuk4Q5iioDgXm8FBmeFQnAAtlZMEM1VHBnDKXXXbZ2Pbu3bs7dWpUFnNc1fNn2STqDBc2HiwCrAZPMCdZvR5sjOp4sECBOv7McVQzXrD7gwVcKGlPlXShytq4SvaIPifVYuuV+klqTOPYSI1pHBupMY0zdU3atxwE+3hfy5gGqTqNBc9XPcO0Q9UG7EM900VVO7GMDlWnMg1Wj69kp2PnWrNOsDFjulnRibVMCXpXljWsY8iOxe4PpudqPdbH2icWhKAsX1LrqEthqPhJakzj2EiNaRwbqTGNYyM1pnFmPgtGmeFSnUIsmKGWsXaUWQ/1oz+bdcGcADX15q5duzp1lDVMq4ODHavODGHZG/raBYCnn366U/bUU0/1tqVkPVCcdMp5KFkPWNsVdv7VmabMXlEcm4y+zAyL3Zt+khrTODZSYxrHRmpM48xckyrBA0owQy1j2qWWsWNVbcsCF1hWux07doxt1wwHQFfzKMHa7DzqsVjWwWeeeWZsm2lrpuWUTIQVJTseG4+6pAYbV0X/KpqUUcef3Q9VJ7OAC9anSj0PJWjl1bq9rRtjZoqN1JjGsZEa0zg2UmMaZ+aOoyrWlVknQz4eA9qSFlXQsw/c7KN7zXLAZo8oSw0ojprqvLj88ss7darjiGVPYOuzVgec4txider5VycR0J2FwxxAylqoykyhoctDKBkm6j2izK6qdew4MmYVYyM1pnFspMY0jo3UmMaZquMoMztCvDpKWHqKWoeJ9yr6mRDvOzZrmzmJWFl1lDCHk9LHiuK4YbNr3vCGN4xtK+lkgG4KT8UpxBwl1SlUZwmx/ZhDsDqyFKch0O036+OQlJ6KA0o5lhJ9dhE/SY1pHBupMY1jIzWmcWYezKDoMkWTDtE3TJMpKS2Z3qzHUzSgsmanEvDAMlXU4Iq6xMVC+ylZHpRlFerxWZ16PVggixLswqjjyIJbah2mN+u1Vmb8sHPtC3iwJjVmFWMjNaZxZCONiDUR8UBE3DPaviYi7o+IYxHx9YiY7BrkxhgAS3uS3grg0XnbnwXw+cx8E4BTAG6eZMeMMXNIjqOI2AfgzwH8M4BPxJzKvR7AR0dVDgP4RwBf6mmnN40EcwpVUa2sdcmEuJIus84wUdNl1LbZGqaKU0hZM6QeiwWA1DLm7GJpR1makz7Y8YekxVECUBjMUTPEccTqVMchcyTWdthY9wW7TGJ90i8A+BSAiyO2C8DpzLw48k8AuEJsyxizBHqNNCLeB+BEZv54yAEi4paIOBoRR9mq1caYxVFed98N4P0RcROADQC2AfgigO0RsXb0NN0H4Em2c2YeAnAIAPbt29c/o9kYM0avkWbmbQBuA4CIeA+Av83Mj0XENwF8CMCdAA4AuEs54BBNWrWCkr1ACZ5X1sNkGoRpoFrGjq9kmKgZFFgqzhqEoARFKOMBaB/vlVSYQyZOKJMJlPuDlTGdWMuUOsp6rez+qHUUH8pFlvOd9NOYcyIdw5xGvWMZbRljFmBJYYGZ+X0A3x/9/EsA102+S8aY+TjiyJjGsZEa0zhTnwXTN+tFSauowJwZVdCzD/d1Fog6C0aZ1VD7xGbKDFmPU5lJpDh3WJkSXMFQUnEqwQxDA0CUQIVapqwzoziX2EwiZe3ThfCT1JjGsZEa0zg2UmMaZ6qaNCI67/1Vuwz96K1QdQoLZlA0ISuruojpvdpv5eM962M9lqLZmW5jZUpGxaollYCHodR21PVJlTGqOp2dRz1/VqdqUKY/+7IOOjODMasYG6kxjWMjNaZxbKTGNM7UHUd9aRSV2Rps9siQj9dM4CuzLpQP/MpMHSU9pBJwoAYqVJQAA+ZwUdZ5VRwuffuwY6nBFYqTrm8fQHPk1YCXIctV2HFkzCrGRmpM49hIjWmcqQfYV00xRKswvTlkWUPWjtK/ocHiStv1PNSl/vpQsjCyekyTKn1SAlCUa9bX7kJtK2OtLClSl8uo24AWPD9UWwN+khrTPDZSYxrHRmpM49hIjWmcmQczKA6XoccaUkf5CM5Ef3VoDJ0Fo/RRcToMbbsyqYCLoU4hJTWoMlOJzZSpgQlbtmzp1KnZO1gwQz3WkFlJDmYwZhVjIzWmcWykxjTOzLMFDtFXStC7orcUnaSiTAJQ9FWF6a2qi5TgjqGwMVLaHnKuDCVwQgmMZ2NUAxO2bdvWqbN58+axbaZth/hRHMxgzGsIG6kxjWMjNaZxbKTGNE5M0nnSe7CI/wPwKwC7AZyc2oEnw2rsM7A6+/372OerMvMy9oupGumrB404mpn7p37gZbAa+wyszn67z+P4ddeYxrGRGtM4szLSQzM67nJYjX0GVme/3ed5zESTGmN0/LprTONM3Ugj4saIeCwijkXEwWkfXyEivhwRJyLioXllOyPi3oh4fPT/jln2sRIRV0bEkYh4JCIejohbR+XN9jsiNkTEDyPip6M+f2ZUfk1E3D+6R74eEcOXyV4hImJNRDwQEfeMtlesz1M10ohYA+BfAfwZgLcC+EhEvHWafRD5CoAbS9lBAPdl5psB3DfabomXAXwyM98K4J0A/no0ti33+wKA6zPzWgBvB3BjRLwTwGcBfD4z3wTgFICbZ9fFBbkVwKPztleuz5k5tX8A3gXgu/O2bwNw2zT7sIS+Xg3goXnbjwHYO/p5L4DHZt3Hnv7fBeCG1dJvAJsA/ATAH2MuKGAtu2da+AdgH+b+4F0P4B4AsZJ9nvbr7hUAfj1v+4lR2WpgT2YeH/38FIA9s+zMYkTE1QDeAeB+NN7v0WvjgwBOALgXwC8AnM7MiwvctHiPfAHApwBcnH+3CyvYZzuOBpBzfy6bdItHxBYA3wLw8cw8M/93LfY7M1/JzLdj7ul0HYC3zLZHixMR7wNwIjN/PK1jTnvS95MArpy3vW9Uthp4OiL2ZubxiNiLub/8TRER6zBnoF/NzG+PipvvNwBk5umIOIK5V8XtEbF29GRq7R55N4D3R8RNADYA2Abgi1jBPk/7SfojAG8eecIuBfBhAHdPuQ9DuRvAgdHPBzCn+Zoh5tID3AHg0cz83LxfNdvviLgsIraPft6IOQ39KIAjAD40qtZUnzPztszcl5lXY+7+/V5mfgwr2ecZiO6bAPwcc9rj72ftBFigj18DcBzAS5jTFzdjTnfcB+BxAP8FYOes+1n6/CeYe5X9GYAHR/9uarnfAP4IwAOjPj8E4B9G5X8A4IcAjgH4JoD1s+7rAv1/D4B7VrrPjjgypnHsODKmcWykxjSOjdSYxrGRGtM4NlJjGsdGakzj2EiNaRwbqTGN8/8fWj1QE1QMJAAAAABJRU5ErkJggg==\n",
|
| 112 |
+
"text/plain": [
|
| 113 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 114 |
+
]
|
| 115 |
+
},
|
| 116 |
+
"metadata": {
|
| 117 |
+
"needs_background": "light"
|
| 118 |
+
},
|
| 119 |
+
"output_type": "display_data"
|
| 120 |
+
}
|
| 121 |
+
],
|
| 122 |
+
"source": [
|
| 123 |
+
"smp_num = 0\n",
|
| 124 |
+
"print(x2_load[smp_num])\n",
|
| 125 |
+
"print(y_load[smp_num])\n",
|
| 126 |
+
"plt.imshow(x1_load[smp_num].reshape((frame_h, frame_w)), cmap=\"gray\", vmin=0, vmax=255)\n",
|
| 127 |
+
"plt.show()"
|
| 128 |
+
]
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"cell_type": "code",
|
| 132 |
+
"execution_count": 7,
|
| 133 |
+
"id": "b1785e54",
|
| 134 |
+
"metadata": {},
|
| 135 |
+
"outputs": [
|
| 136 |
+
{
|
| 137 |
+
"name": "stdout",
|
| 138 |
+
"output_type": "stream",
|
| 139 |
+
"text": [
|
| 140 |
+
"\n",
|
| 141 |
+
"Normalizing data...\n"
|
| 142 |
+
]
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"data": {
|
| 146 |
+
"text/plain": [
|
| 147 |
+
"['../subjects/7/scalers-boi.bin']"
|
| 148 |
+
]
|
| 149 |
+
},
|
| 150 |
+
"execution_count": 7,
|
| 151 |
+
"metadata": {},
|
| 152 |
+
"output_type": "execute_result"
|
| 153 |
+
}
|
| 154 |
+
],
|
| 155 |
+
"source": [
|
| 156 |
+
"print(\"\\nNormalizing data...\")\n",
|
| 157 |
+
"x2_chs_inp = x2_load[:, chosen_inputs]\n",
|
| 158 |
+
"scalers = j_load(public_scalers_dir)\n",
|
| 159 |
+
"x1_scaler, x2_scaler = scalers\n",
|
| 160 |
+
"x1 = x1_load / x1_scaler\n",
|
| 161 |
+
"x2 = x2_scaler.transform(x2_chs_inp)\n",
|
| 162 |
+
"scalers_dir = sbj_dir + \"scalers-boi.bin\"\n",
|
| 163 |
+
"j_dump(scalers, scalers_dir)"
|
| 164 |
+
]
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"cell_type": "code",
|
| 168 |
+
"execution_count": 8,
|
| 169 |
+
"id": "4b5e4faf",
|
| 170 |
+
"metadata": {},
|
| 171 |
+
"outputs": [
|
| 172 |
+
{
|
| 173 |
+
"name": "stdout",
|
| 174 |
+
"output_type": "stream",
|
| 175 |
+
"text": [
|
| 176 |
+
"\n",
|
| 177 |
+
"Shuffling data...\n"
|
| 178 |
+
]
|
| 179 |
+
}
|
| 180 |
+
],
|
| 181 |
+
"source": [
|
| 182 |
+
"print(\"\\nShuffling data...\")\n",
|
| 183 |
+
"x1_shf, x2_shf, y_shf = shuffle(x1, x2, y_load)"
|
| 184 |
+
]
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"cell_type": "code",
|
| 188 |
+
"execution_count": 9,
|
| 189 |
+
"id": "91de08eb",
|
| 190 |
+
"metadata": {},
|
| 191 |
+
"outputs": [
|
| 192 |
+
{
|
| 193 |
+
"name": "stdout",
|
| 194 |
+
"output_type": "stream",
|
| 195 |
+
"text": [
|
| 196 |
+
"\n",
|
| 197 |
+
"Splitting data to train and test...\n",
|
| 198 |
+
"Data shapes:\n",
|
| 199 |
+
"(51, 48, 44, 1) (9, 48, 44, 1) (51, 7) (9, 7) (51,) (9,)\n"
|
| 200 |
+
]
|
| 201 |
+
}
|
| 202 |
+
],
|
| 203 |
+
"source": [
|
| 204 |
+
"print(\"\\nSplitting data to train and test...\")\n",
|
| 205 |
+
"n_train = int(r_train * n_smp)\n",
|
| 206 |
+
"n_test = n_smp - n_train\n",
|
| 207 |
+
"x1_train, x2_train = x1_shf[:n_train], x2_shf[:n_train]\n",
|
| 208 |
+
"x1_test, x2_test = x1_shf[n_train:], x2_shf[n_train:]\n",
|
| 209 |
+
"y_train = y_shf[:n_train]\n",
|
| 210 |
+
"y_test = y_shf[n_train:]\n",
|
| 211 |
+
"print(\"Data shapes:\")\n",
|
| 212 |
+
"print(x1_train.shape, x1_test.shape, x2_train.shape, x2_test.shape,\n",
|
| 213 |
+
" y_train.shape, y_test.shape)"
|
| 214 |
+
]
|
| 215 |
+
},
|
| 216 |
+
{
|
| 217 |
+
"cell_type": "code",
|
| 218 |
+
"execution_count": 10,
|
| 219 |
+
"id": "f9e629e8",
|
| 220 |
+
"metadata": {},
|
| 221 |
+
"outputs": [],
|
| 222 |
+
"source": [
|
| 223 |
+
"y_train_ctg = to_categorical(y_train)\n",
|
| 224 |
+
"y_test_ctg = to_categorical(y_test)\n",
|
| 225 |
+
"\n",
|
| 226 |
+
"x_train = [x1_train, x2_train]\n",
|
| 227 |
+
"x_test = [x1_test, x2_test]"
|
| 228 |
+
]
|
| 229 |
+
},
|
| 230 |
+
{
|
| 231 |
+
"cell_type": "code",
|
| 232 |
+
"execution_count": 11,
|
| 233 |
+
"id": "bbb4dd8d",
|
| 234 |
+
"metadata": {},
|
| 235 |
+
"outputs": [
|
| 236 |
+
{
|
| 237 |
+
"name": "stdout",
|
| 238 |
+
"output_type": "stream",
|
| 239 |
+
"text": [
|
| 240 |
+
"\n",
|
| 241 |
+
"Loading 'blink_out_in' model...\n"
|
| 242 |
+
]
|
| 243 |
+
}
|
| 244 |
+
],
|
| 245 |
+
"source": [
|
| 246 |
+
"print(\"\\nLoading 'blink_out_in' model...\")\n",
|
| 247 |
+
"cb = EarlyStopping(patience=patience, verbose=1, restore_best_weights=True)\n",
|
| 248 |
+
"model = load_model(public_model_dir)"
|
| 249 |
+
]
|
| 250 |
+
},
|
| 251 |
+
{
|
| 252 |
+
"cell_type": "code",
|
| 253 |
+
"execution_count": 12,
|
| 254 |
+
"id": "47a5cae7",
|
| 255 |
+
"metadata": {},
|
| 256 |
+
"outputs": [
|
| 257 |
+
{
|
| 258 |
+
"name": "stdout",
|
| 259 |
+
"output_type": "stream",
|
| 260 |
+
"text": [
|
| 261 |
+
"\n",
|
| 262 |
+
"Model summary:\n",
|
| 263 |
+
"Model: \"model\"\n",
|
| 264 |
+
"__________________________________________________________________________________________________\n",
|
| 265 |
+
"Layer (type) Output Shape Param # Connected to \n",
|
| 266 |
+
"==================================================================================================\n",
|
| 267 |
+
"input_1 (InputLayer) [(None, 48, 44, 1)] 0 \n",
|
| 268 |
+
"__________________________________________________________________________________________________\n",
|
| 269 |
+
"conv2d (Conv2D) (None, 48, 44, 16) 416 input_1[0][0] \n",
|
| 270 |
+
"__________________________________________________________________________________________________\n",
|
| 271 |
+
"max_pooling2d (MaxPooling2D) (None, 24, 22, 16) 0 conv2d[0][0] \n",
|
| 272 |
+
"__________________________________________________________________________________________________\n",
|
| 273 |
+
"conv2d_1 (Conv2D) (None, 24, 22, 32) 12832 max_pooling2d[0][0] \n",
|
| 274 |
+
"__________________________________________________________________________________________________\n",
|
| 275 |
+
"max_pooling2d_1 (MaxPooling2D) (None, 12, 11, 32) 0 conv2d_1[0][0] \n",
|
| 276 |
+
"__________________________________________________________________________________________________\n",
|
| 277 |
+
"conv2d_2 (Conv2D) (None, 10, 9, 64) 18496 max_pooling2d_1[0][0] \n",
|
| 278 |
+
"__________________________________________________________________________________________________\n",
|
| 279 |
+
"max_pooling2d_2 (MaxPooling2D) (None, 5, 4, 64) 0 conv2d_2[0][0] \n",
|
| 280 |
+
"__________________________________________________________________________________________________\n",
|
| 281 |
+
"flatten (Flatten) (None, 1280) 0 max_pooling2d_2[0][0] \n",
|
| 282 |
+
"__________________________________________________________________________________________________\n",
|
| 283 |
+
"dense (Dense) (None, 256) 327936 flatten[0][0] \n",
|
| 284 |
+
"__________________________________________________________________________________________________\n",
|
| 285 |
+
"input_2 (InputLayer) [(None, 7)] 0 \n",
|
| 286 |
+
"__________________________________________________________________________________________________\n",
|
| 287 |
+
"concatenate (Concatenate) (None, 263) 0 dense[0][0] \n",
|
| 288 |
+
" input_2[0][0] \n",
|
| 289 |
+
"__________________________________________________________________________________________________\n",
|
| 290 |
+
"dense_1 (Dense) (None, 128) 33792 concatenate[0][0] \n",
|
| 291 |
+
"__________________________________________________________________________________________________\n",
|
| 292 |
+
"dense_2 (Dense) (None, 32) 4128 dense_1[0][0] \n",
|
| 293 |
+
"__________________________________________________________________________________________________\n",
|
| 294 |
+
"dense_3 (Dense) (None, 16) 528 dense_2[0][0] \n",
|
| 295 |
+
"__________________________________________________________________________________________________\n",
|
| 296 |
+
"dense_4 (Dense) (None, 3) 51 dense_3[0][0] \n",
|
| 297 |
+
"__________________________________________________________________________________________________\n",
|
| 298 |
+
"dense_5 (Dense) (None, 3) 12 dense_4[0][0] \n",
|
| 299 |
+
"==================================================================================================\n",
|
| 300 |
+
"Total params: 398,191\n",
|
| 301 |
+
"Trainable params: 12\n",
|
| 302 |
+
"Non-trainable params: 398,179\n",
|
| 303 |
+
"__________________________________________________________________________________________________\n",
|
| 304 |
+
"None\n"
|
| 305 |
+
]
|
| 306 |
+
}
|
| 307 |
+
],
|
| 308 |
+
"source": [
|
| 309 |
+
"for layer in model.layers[:-trainable_layers]:\n",
|
| 310 |
+
" layer.trainable = False\n",
|
| 311 |
+
"print(\"\\nModel summary:\")\n",
|
| 312 |
+
"print(model.summary())"
|
| 313 |
+
]
|
| 314 |
+
},
|
| 315 |
+
{
|
| 316 |
+
"cell_type": "code",
|
| 317 |
+
"execution_count": 13,
|
| 318 |
+
"id": "33258d12",
|
| 319 |
+
"metadata": {},
|
| 320 |
+
"outputs": [
|
| 321 |
+
{
|
| 322 |
+
"name": "stdout",
|
| 323 |
+
"output_type": "stream",
|
| 324 |
+
"text": [
|
| 325 |
+
"\n",
|
| 326 |
+
"Retraining the model...\n",
|
| 327 |
+
"Epoch 1/2\n",
|
| 328 |
+
"2/2 [==============================] - 1s 236ms/step - loss: 3.1723 - acc: 0.2941 - val_loss: 2.4191 - val_acc: 0.2222\n",
|
| 329 |
+
"Epoch 2/2\n",
|
| 330 |
+
"2/2 [==============================] - 0s 56ms/step - loss: 1.8350 - acc: 0.3137 - val_loss: 1.5906 - val_acc: 0.4444\n",
|
| 331 |
+
"End of retraining...\n"
|
| 332 |
+
]
|
| 333 |
+
}
|
| 334 |
+
],
|
| 335 |
+
"source": [
|
| 336 |
+
"print(\"\\nRetraining the model...\")\n",
|
| 337 |
+
"results = model.fit(x_train,\n",
|
| 338 |
+
" y_train_ctg,\n",
|
| 339 |
+
" validation_data=(x_test, y_test_ctg),\n",
|
| 340 |
+
" epochs=n_epochs,\n",
|
| 341 |
+
" callbacks=cb)\n",
|
| 342 |
+
"print(\"End of retraining...\")"
|
| 343 |
+
]
|
| 344 |
+
},
|
| 345 |
+
{
|
| 346 |
+
"cell_type": "code",
|
| 347 |
+
"execution_count": 14,
|
| 348 |
+
"id": "0336e76a",
|
| 349 |
+
"metadata": {},
|
| 350 |
+
"outputs": [
|
| 351 |
+
{
|
| 352 |
+
"name": "stdout",
|
| 353 |
+
"output_type": "stream",
|
| 354 |
+
"text": [
|
| 355 |
+
"INFO:tensorflow:Assets written to: ../subjects/7/model-boi\\assets\n"
|
| 356 |
+
]
|
| 357 |
+
}
|
| 358 |
+
],
|
| 359 |
+
"source": [
|
| 360 |
+
"model.save(sbj_dir + \"model-boi.h5\")"
|
| 361 |
+
]
|
| 362 |
+
},
|
| 363 |
+
{
|
| 364 |
+
"cell_type": "code",
|
| 365 |
+
"execution_count": 15,
|
| 366 |
+
"id": "b34c8d64",
|
| 367 |
+
"metadata": {},
|
| 368 |
+
"outputs": [],
|
| 369 |
+
"source": [
|
| 370 |
+
"yhat_train = model.predict(x_train).argmax(1)\n",
|
| 371 |
+
"yhat_test = model.predict(x_test).argmax(1)"
|
| 372 |
+
]
|
| 373 |
+
},
|
| 374 |
+
{
|
| 375 |
+
"cell_type": "code",
|
| 376 |
+
"execution_count": 16,
|
| 377 |
+
"id": "52d1aab5",
|
| 378 |
+
"metadata": {},
|
| 379 |
+
"outputs": [
|
| 380 |
+
{
|
| 381 |
+
"name": "stdout",
|
| 382 |
+
"output_type": "stream",
|
| 383 |
+
"text": [
|
| 384 |
+
"Test\n",
|
| 385 |
+
"0.0\n",
|
| 386 |
+
"1\n"
|
| 387 |
+
]
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"data": {
|
| 391 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAOkAAAD6CAYAAABEdWDWAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAX4klEQVR4nO2db6il1XXGnyfz72pmxtE0yOBItUQUP7QJDDbBfgimgrUhyYdQYkKZgjBfWjAkJY4tlAZa0C8xgZaUAUOmEGKSJqBISrDWUAJFM1GT+gczEyFEGZ2WRqyjzjhm9cM5o/eus+59n7Pvuefsq88PLve+++x37/W+56z7nrX22msxImCM6Zd3LVoAY8zaWEmN6RwrqTGdYyU1pnOspMZ0jpXUmM5Zl5KSvIHk0ySPkzw0K6GMMW/B1nVSklsA/BzA9QCeBfBjADdFxJOrnbN169bYvn17y1xTnzPv9V9Fxtyn5bpaz2u9H8p5rX1azlOvQ+mX7+NGfmbyXFu2bFlxfObMGZw9e7Z8Y7euY95rAByPiGfGQtwN4OMAVlXS7du348orr5x6one9a/oH/m9+85uJtpY3QVWIrVtX3spKZqVPblPGqWQ8e/bsiuPqfrzxxhsTbRlFufJc1XxVH0XG3EcZpxqrukf53lb3Q7lHQ+MCwLZt21Yc7969e8XxsWPHVh9vagne4hIAv1p2/Oy4zRgzQ9bzJJUgeRDAQWDyv4kxZpj1PEmfA3DpsuN947YVRMThiNgfEfvz1zRjzDDr0ZofA7iC5OUYKeenAHx62kHy9/fKLskofSoUR4Fig1T/bLJMiv1b2S7ZoaDY1pW9pTipqvnzfNX9yDbg66+/PiijYm9W5PNUX0Nuq65VuY/5/ajmapFxGj9Ls5JGxFmSfwHgBwC2APhaRDzROp4xpmZd3z8j4vsAvj8jWYwxBY44MqZz5urJITnxHT/bAcp3dcWWal0nzfZmdU7Lum0lkyJjZf9mOzHfU0CzSZXzFHuv6qOsgSr3Q7FbFRad3GAomGGt9Xg/SY3pHCupMZ1jJTWmc6ykxnTO3EOAhhxFswpUmBXVuK0y5mvdKJnXw5CDQzmnogqKUN77We3eqcZWnGuKY1NxtmUHoLJJ4s05V33FGNMFVlJjOsdKakznLHxbihKY3rLLXgl4UIKlK1uqsh8UGfNWPcXea0UJHlfOq2jJQlFl5MjvkRIUMcugBMW3kN8jJbik+swM2aRr4SepMZ1jJTWmc6ykxnSOldSYzpm742goQ1tLdjZA21WgLJ4r8rTu1MlOj1lda4Wym6fVSaRkC1Sy/Cnz52ut7pmSYUMJQqhoyYyoZHj0Lhhj3kZYSY3pHCupMZ0zd5u0JateRrH3WjMBZFoDDpSxlQVtJeCiQsloUF1by/vReh2nT59e87hCCZQHNJtPyTDRYjdX92NpaWlq+c7hJ6kxnWMlNaZzrKTGdI6V1JjOmbvjaGhBvzUIYZ4oQQiKc6tC2c3TkuFhlg6wfP1KMENViiL3UXYuKdkTAM0plM+rHD5KucosY1WYbCiYYS38JDWmc6ykxnSOldSYzpmrTRoRE/aMYqcpNuhG2a1qyUAl89ysaClrqAZF5LbK3jxz5szgOLmPYrfO6rMAzM5uz+MomyuqLBRDtq2DGYzZxFhJjekcK6kxnWMlNaZzFp7SMxvMSqBAZeC3OIqUBW61FmnL4nlr7c9Z9VGcQvm4Oq96z5QMF8p7raQmVWrIVoEKynutBB3kPlUwQ26bxrHoJ6kxnWMlNaZzBpWU5NdIniT5+LK2i0jeT/LY+PeFGyumMe9cFJv06wD+AcA/L2s7BOCBiLid5KHx8a1DA5FsCjpQFr03KnhAtXWVII1ZyTirLBSVvfnKK6+sOG4NQmjJgqGgBtgrmQ+UjBItZQ0rm1T1bVQMnhkR/wHgf1PzxwEcGf99BMAnmiUwxqxJq3pfHBEnxn8/D+DiGcljjEms23EUo+f/qkGXJA+SPEryqJIcyxizklYlfYHkXgAY/z65WseIOBwR+yNi/zTl3owxI1q15l4ABwDcPv59j3piy+4EpTxDS1pJZe7q6a9kS6gcJblPNXbOYKA4Rar7oWRGOHXq1OB5ilNI2b3SGjiiOICUgAOlzmyLIwmYdBQpgRPToCzBfBPAfwK4kuSzJG/GSDmvJ3kMwB+Oj40xG8Dgv46IuGmVlz4yY1mMMQWOODKmcxbuyZmVXZJpDXBXbLnWhfmWgPKq9EJL8LpqE+V+1bUqtr3ie1DKMyhZ9tS2TEuWkErGHTt2DM69nswdfpIa0zlWUmM6x0pqTOdYSY3pnIU7jhRjXXFCKM6U7BRqTWlZOZOU+fPYys4IxXFT0brrIo+tjFP1yQv8lTOlxSlUpctUZFI+M8o42UkEaKUoMnYcGfM2wkpqTOdYSY3pHCupMZ3TneOoNZqnJYWlEqmjpLRU51f6ZFrvR8tuo6qfEgHWGimk7EJR5qpQ7m1LihWl9qiyS8q1YIx5G2ElNaZzrKTGdM7CbdJZ7fJvsQkVm7Q1mKCiJcCgOqfF3lLsJECzE3NAgRKE0BoUodjIlT2n5NNSrnVpaWmwj1LDtNVHAPhJakz3WEmN6RwrqTGdYyU1pnPm6jgiOeFQUGqGtNQ+WW3+tY6rNiUNB9BWn7Q1zaNynpJmUgkeqBbvs+NIrc+SUdKgtqRhVfvl61fuR+sOl5Zx3uwr9zTGLAQrqTGdYyU1pnPmHswwtICs7JZXqOwbZfE6Z12oAuwr8uK5kuayNVvB0LjVOEoqSmDSBm2ttan4GloyVSibGyqUgAulPEQ1jpKFYmgcB9gbs4mxkhrTOVZSYzrHSmpM58zVcRQRg0EHrXVFFAdUnrvqoyzmV86LWQVTtKSHrPpkp1B1HYrjqDXgIt9b5Z5VDqA8TuXcqcbO7+OsUooqO1yUwBElIOTN8Vd9xRjTBVZSYzrHSmpM58w9wL4lO4EShJCp7AulhESL3QjMLutCSwa7VltKCVRQgueVwHjFllOyKVRUY2eZlOto3SignNNSY/ccfpIa0zlWUmM6x0pqTOcMKinJS0k+SPJJkk+QvGXcfhHJ+0keG/++cOPFNeadh+I4Ogvg8xHxCMldAH5C8n4AfwbggYi4neQhAIcA3LrWQCQnnB5KEICSMlFxVLTsupiVk6iipRxBJZPigKqcS0p6zMrB0ZKFQg1CGKL6vCj3sTWYoaXshuLIm+kumIg4ERGPjP/+PwBPAbgEwMcBHBl3OwLgE0NjGWOmZ6olGJKXAfgAgIcAXBwRJ8YvPQ/g4lXOOQjgIFCHoRlj1kb+3kZyJ4DvAvhsRLy0/LUYfd8pd95GxOGI2B8R+6t1OWPM2khPUpLbMFLQb0TE98bNL5DcGxEnSO4FcLJFgFml6FcyCirZ6fLYlb3VWvpBsW8UW6qlj1qeYaNoLR+SUTZgVG1qmY2hPoptqwSJzDRbIEd35S4AT0XEl5a9dC+AA+O/DwC4R57VGCOjPEmvBfCnAP6L5GPjtr8CcDuAb5O8GcAvAfzJhkhozDucQSWNiB8BWO170UdmK44xJuOII2M6Z+H1SdeTfn85SjmCvMtfcaYoC/UVqoNjqI8STDGr3RsVyg6X1lqwLc4kpRQE0FbSQwlUaH0/hhxHzsxgzCbGSmpM51hJjemcuWdmGIo6UuyLCsUGGjoH0Hb0t9pAswrcUEof5OtX7L+qn3KPFLtVmau61vx5aQ1KaEUJQMm0bBpZs6/c0xizEKykxnSOldSYzrGSGtM5c3UcLS0t4aqrrlrRpjgGMsrO9wqlzERuU8YFtOtQSi+89tprax4DWnkGpc5q1ZbPq5wg+TpaAx4UlPvfGnCiOHhOnTq15jGgZXg477zzVhyff/75six+khrTOVZSYzrHSmpM51hJjemche+CURwM0+wYOIfi8FBqmFbyKBE+2QEDTDqKFGeK4oBS5lKuFdB2ASn3aFapWjPKjpuKWTmSFJlmvSvJT1JjOsdKakznWEmN6Zy574LJC78tO1wqlEAFZfeGkr2hOi/bgK12Seui/xBqndV8HUqdVyWYYFbX1brjRrG/W+1GxSbd0JSexpjFYiU1pnOspMZ0jpXUmM6ZezDDUGCC4gSonEJ5XGWBXUmxUu1UUVKqKGk+qrFbggCUnSKVPNX8ShqavONHcea0Bgq00pK+RXGAtaZzHaoX45SexmxirKTGdI6V1JjOmbtNOvSdXqk92ppmUrGLlIX6yr5TbIwWm7g1XWWeX7UJlcwULUEZSkkPJbhEDTjI/VrvdUvwfnXPtm/fvuJ4aWlpcO43XxuUwBizUKykxnSOldSYzrGSGtM5c98Fk41qxVGTqfooWReUBfbcVtWuqdqUYAZlh4ly/fk8pRZrdT+qYIahuYDJe9SadrMlwEGtzZPnU7JptNZwyYEK2SlUtWVHkoMZjNnEWEmN6ZxBJSW5RPJhkj8l+QTJL47bLyf5EMnjJL9FcvvQWMaY6VFs0tMArouIl0luA/Ajkv8K4HMA7oyIu0n+E4CbAXx1rYEiYtAGVexEZWFaWRivyLZLlb1AKXOh2IBVH2XDQc4OWNmWylwVSj3OfE8U21bJ8qdcq5JhoULxUSiB8dXnIduXuYQEMGmTzjSYIUa8PD7cNv4JANcB+Jdx+xEAnxgayxgzPZJNSnILyccAnARwP4BfAHgxIs79C30WwCUbIqEx73AkJY2INyLi/QD2AbgGwFVrn/EWJA+SPEry6KuvvtompTHvYKby7kbEiwAeBPAhAHtInvuCvg/Ac6ucczgi9kfE/lz+zRgzzKDjiOR7AbweES+SPA/A9QDuwEhZPwngbgAHANwzC4Faywi01MNUHEBVn8p5oJR+UBbYlcwIGSWFpBIAUvVTFviV61DSoLamYVWcSdXYynudA1d27Ngx0Sc7irIjqTpvmswMind3L4AjJLdg9OT9dkTcR/JJAHeT/DsAjwK4SxjLGDMlg0oaET8D8IGi/RmM7FNjzAbiiCNjOmfuAfbZVtqoLHKtC9PZVqjslGrxXskgkOdrzWg4qywUrWUNW8oIVvcs2+1nzpyZ6KNsFKhQfBTT2IXnqOzNbJNWAfZ5rmnKrfhJakznWEmN6RwrqTGdYyU1pnO6S+nZmq0go6SQVNJVqrtplN0jmdadIco4GUXmqk25/mr+06dPr3kMAK+88sqK41bHUUvaTWAywKAaR3H25c9R5VxSnJar4SepMZ1jJTWmc6ykxnTOwm1SJaOB0kdZiFay2rVmjFMWxhX7Sslg11J2o0Kx5apxchBCtZkgb0vM9mfVVtmtOQiikqd6P1rsRCUoZFafDyXj45vjDc5ojFkoVlJjOsdKakznWEmN6ZxNUWaipWRAhZL1IBvw1aJztcsh96ucKS2lH5QdN60L/Eoq0CrAIDt4XnrppYk+L7/88orjapzcplyrsnOp6rdz586JPhdccMFgn5zyp3rvs1Oqkic7k+w4MuZthJXUmM6xkhrTOXO3SbOtoARrK6X2lOx4ip2mBEK3lpBoCVSoxsn27muvvTbRJ7dV4yhB71Wu5FOnTg2Oo5TCaMlEWAUlVHbihRdeuOYxAOzZs2fF8a5duyb65CD86vOQMzNUqWuHsjesq8yEMWaxWEmN6RwrqTGdYyU1pnPmvgtmaEd65dxR0oC2ZDBQMhNUKIvuSkYDxeGj9Kl2mGTnTjVOFXCRnUBVEEJLeQglxakSlFD1efe73z3Rlh01VXkIJX1rnr9yUuU2Za7sAHMwgzGbGCupMZ1jJTWmc6ykxnTO3COOsgHd4vCZVZ0TJTVI5SSqImyyg0VxuFTOnLx7pIr4aXEuqTtlFAeYkhpVSWma+1ROoexgqRw3VYSPEimkOAmHdq9UY8+6PqmfpMZ0jpXUmM6xkhrTOQvPzKDUaVRrUi5HCXio7EYlUKCyAfNYld2a2xSbVAlCUAIn1ACQbIMp5TqqPkrgiLLjKNuglU1aBTMoNqliNyvn5LGrYIZspzozgzFvI6ykxnSOrKQkt5B8lOR94+PLST5E8jjJb5Gc9DsbY9bNNE/SWwA8tez4DgB3RsT7APwawM2zFMwYM0JyHJHcB+CPAfw9gM9xZOVeB+DT4y5HAPwtgK8OjZUNb6VO46wCHrLDpQoUyI6ivJsE0GqWKKlJFAdUFUyRnQxKCslqx4tSV6VyaCjvodInO1OU1CjVtbam2czOG8VJVo2Tgykqx9E09Ugz6pP0ywC+AODcu/oeAC9GxLlP0LMALmmWwhizKoNKSvKjAE5GxE9aJiB5kORRkkerp5IxZm2UZ/C1AD5G8kYASwB2A/gKgD0kt46fpvsAPFedHBGHARwGgH379g1/bzXGrGBQSSPiNgC3AQDJDwP4y4j4DMnvAPgkgLsBHABwz9BYJKXg7IySmSEv3leBCrmPkplBqWsJTNp8VZ98rUqwdhU8nqmCPSp7e0geQAs6z3ZZzoJQ9WnNzDAUmF6NA0xem9KnkjHPpwT4K/anUl7lHOtZJ70VIyfScYxs1LvWMZYxZhWmcjlFxA8B/HD89zMArpm9SMaY5TjiyJjOsZIa0zlzT+mZjfNsQCs7EyoDPxvelYNBSTOpOLKUjAZVMEN25lTOneyAqubKfSonWXa4VEERFUp91jx25VzLbcrukereK8621iAExUm3e/fuFce5fgww6Tir7tnQ59y7YIzZxFhJjekcK6kxnbPw+qRKoELuUy3eKyUDsi2lBEUo9mfVVgXP5/krG0jJuqAE4Wd7VwnUBzSfgJIdT8nwoNitLUHwVT8lMD/bnwBwwQUXrDiuAjemKRmxGrZJjdnEWEmN6RwrqTGdYyU1pnMWXmYiO1yUcgiKYa7scGktRaHUJ1VKDVRBCEq5CiWYIc+vpkVV7pGyeyUzS6eQMraSZjOnAt21a9dgn+p9Ve7ZkHxr4SepMZ1jJTWmc6ykxnTO3APssx2iBNjn7/wti8XV3BVKMENlk2Y7sZor226KTVoF6ue5KjtJKTOhlp7IKLakEqSivB+tNqlSMjHboDt37pzok8+rZG4pzekAe2PeRlhJjekcK6kxnWMlNaZz5u44GkJJvag4KlpRHC6tzgxFxtynclRkGZXgCmXnDqA5jlpQaqFWtARXAJMOn6qGaXYcVX2U3VVKoIjiJFsNP0mN6RwrqTGdYyU1pnPmHmA/lNpfSfXfWh5BSe2fx6nsjdYA/zyfUjKwutZsgyrZ6dTABcXeVcZW5lLuo1IKQgmer7Iu5CwLSkkRxWZXAnKmsVH9JDWmc6ykxnSOldSYzrGSGtM53KjF63Iy8r8B/BLAbwH4n7lNPBs2o8zA5pT7nSjzb0fEe6sX5qqkb05KHo2I/XOfeB1sRpmBzSm3ZV6Jv+4a0zlWUmM6Z1FKenhB866HzSgzsDnltszLWIhNaozR8dddYzpn7kpK8gaST5M8TvLQvOdXIPk1kidJPr6s7SKS95M8Nv594SJlzJC8lOSDJJ8k+QTJW8bt3cpNconkwyR/Opb5i+P2y0k+NP6MfIvkZFDtgiG5heSjJO8bH2+YzHNVUpJbAPwjgD8CcDWAm0hePU8ZRL4O4IbUdgjAAxFxBYAHxsc9cRbA5yPiagAfBPDn43vbs9ynAVwXEb8H4P0AbiD5QQB3ALgzIt4H4NcAbl6ciKtyC4Cnlh1vnMwRMbcfAB8C8INlx7cBuG2eMkwh62UAHl92/DSAveO/9wJ4etEyDsh/D4DrN4vcAM4H8AiA38coKGBr9Znp4QfAPoz+4V0H4D4A3EiZ5/119xIAv1p2/Oy4bTNwcUScGP/9PICLFynMWpC8DMAHADyEzuUef218DMBJAPcD+AWAFyPi3B65Hj8jXwbwBQDn9ui9Bxsosx1HDcTo32WXbnGSOwF8F8BnI+Kl5a/1KHdEvBER78fo6XQNgKsWK9HakPwogJMR8ZN5zTnvRGTPAbh02fG+cdtm4AWSeyPiBMm9GP3n7wqS2zBS0G9ExPfGzd3LDQAR8SLJBzH6qriH5Nbxk6m3z8i1AD5G8kYASwB2A/gKNlDmeT9JfwzgirEnbDuATwG4d84ytHIvgAPjvw9gZPN1A0db/+8C8FREfGnZS93KTfK9JPeM/z4PIxv6KQAPAvjkuFtXMkfEbRGxLyIuw+jz++8R8RlspMwLMLpvBPBzjGyPv160E2AVGb8J4ASA1zGyL27GyO54AMAxAP8G4KJFy5lk/gOMvsr+DMBj458be5YbwO8CeHQs8+MA/mbc/jsAHgZwHMB3AOxYtKyryP9hAPdttMyOODKmc+w4MqZzrKTGdI6V1JjOsZIa0zlWUmM6x0pqTOdYSY3pHCupMZ3z//tAbTd9FHA/AAAAAElFTkSuQmCC\n",
|
| 392 |
+
"text/plain": [
|
| 393 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 394 |
+
]
|
| 395 |
+
},
|
| 396 |
+
"metadata": {
|
| 397 |
+
"needs_background": "light"
|
| 398 |
+
},
|
| 399 |
+
"output_type": "display_data"
|
| 400 |
+
}
|
| 401 |
+
],
|
| 402 |
+
"source": [
|
| 403 |
+
"smp_num = 8\n",
|
| 404 |
+
"print(\"Test\")\n",
|
| 405 |
+
"print(y_test[smp_num])\n",
|
| 406 |
+
"print(yhat_test[smp_num])\n",
|
| 407 |
+
"plt.imshow(\n",
|
| 408 |
+
" (x1_test[smp_num].reshape((frame_h, frame_w)) * 255)\n",
|
| 409 |
+
" .astype(np.uint8),\n",
|
| 410 |
+
" cmap=\"gray\",\n",
|
| 411 |
+
" vmin=0,\n",
|
| 412 |
+
" vmax=255\n",
|
| 413 |
+
" )\n",
|
| 414 |
+
"plt.show()"
|
| 415 |
+
]
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"cell_type": "code",
|
| 419 |
+
"execution_count": 17,
|
| 420 |
+
"id": "a711848e",
|
| 421 |
+
"metadata": {},
|
| 422 |
+
"outputs": [
|
| 423 |
+
{
|
| 424 |
+
"data": {
|
| 425 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAzWElEQVR4nO3deVxVdf7H8deXHQR3RBAQUTbF1NyyXHBfWjSsbF8np2UcK9trZqppGy3by7Gycto0wzJzzQ13U1xAWVQUxBVEQUXW+/39cW6DP0cE5cK59/J5Ph48HnDu4dzPEX17+J7v+X6U1hohhBCOz8XsAoQQQtiGBLoQQjgJCXQhhHASEuhCCOEkJNCFEMJJuJn1xi1bttRhYWFmvb0QQjikLVu25Gmt/S/0mmmBHhYWxubNm816eyGEcEhKqayqXpMhFyGEcBIS6EII4SSqDXSllJdSapNSartSaqdS6uUL7POEUmqXUmqHUmqZUqpt3ZQrhBCiKjUZQy8BBmmtTyul3IE1SqmFWusN5+yzFeihtS5SSj0MTAbG1UG9QggHV1ZWRk5ODsXFxWaXYte8vLwIDg7G3d29xt9TbaBrY7GX09Yv3a0f+rx9Vpzz5QbgzhpXIIRoUHJycvDz8yMsLAyllNnl2CWtNcePHycnJ4d27drV+PtqNIaulHJVSm0DjgFLtdYbL7L7A8DCKo4zXim1WSm1OTc3t8ZFCiGcR3FxMS1atJAwvwilFC1atLjk32JqFOha6wqtdVcgGOillIqtoog7gR7AlCqOM11r3UNr3cPf/4LTKIUQDYCEefUu58/okma5aK1PAiuAERd48yHAC8ANWuuSS66khnJPlfDq/F3knymtq7cQQgiHVJNZLv5KqabWz72BoUDaeft0A/6NEebH6qDO/1q3N48Za/cxYPIKPl65h+Kyirp8OyGEE/L19TW7hDpRkyv0QGCFUmoH8DvGGPp8pdQrSqkbrPtMAXyBH5RS25RS8+qoXkZ3bcOSx/vTO7wFkxelM/CtlczZkkOFRRp1CCEatmoDXWu9Q2vdTWt9hdY6Vmv9inX737XW86yfD9FaB2itu1o/brj4UWunQys/PrunB9+Pv4pWfp48+cN2rvtgDYkZcqNVCFFzWmueeuopYmNj6dy5M7NmzQLg8OHD9O/fn65duxIbG8vq1aupqKjg3nvv/e++77zzjsnV/y/T1nKxhavCWzD3kWv4NfkwkxencfeMTfSLaMlzI2PoGNTY7PKEENV4+Zed7DpUaNNjdgxqzD+u71SjfRMSEti2bRvbt28nLy+Pnj170r9/f7799luGDx/OCy+8QEVFBUVFRWzbto2DBw+SkpICwMmTJ21aty04/KP/Li6K67sE8dsTA/jbdR1JPljAtR+sZtLs7Rw6edbs8oQQdmzNmjXcdtttuLq6EhAQwIABA/j999/p2bMnX3zxBS+99BLJycn4+fkRHh5OZmYmEyZMYNGiRTRubH8XjQ59hX4uTzdXHujbjpuuDObjlXv4Yt1+5u84xP192/FwXHsae9X8aSshRP2o6ZV0fevfvz+JiYn8+uuv3HvvvTzxxBPcfffdbN++ncWLFzNt2jRmz57NjBkzzC71/3H4K/TzNfFx57lRMSyfNIBrOwfyycq9DJi8gi/W7qO03GJ2eUIIO9KvXz9mzZpFRUUFubm5JCYm0qtXL7KysggICODBBx/kT3/6E0lJSeTl5WGxWBg7diyvvvoqSUlJZpf/P5zmCv18wc18mDquK/f3bccbC1N5+ZddfLluP08Pj2ZU59byYIMQghtvvJH169fTpUsXlFJMnjyZ1q1b89VXXzFlyhTc3d3x9fVl5syZHDx4kPvuuw+LxbgwfOONN0yu/n8pY6mW+tejRw9dXw0utNasysjljQVppB89RdeQprxwbQw9w5rXy/sLISqlpqYSExNjdhkO4UJ/VkqpLVrrHhfa3+mGXC5EKUVcVCsWTOzH5LFXcLjgLDdPW8/4mZvZm3u6+gMIIYQDaBCB/gdXF8UtPUNY+eRAnhwWybq9xxn2TiIv/pRM7qk6W61ACCHqRYMK9D94e7jyl0ERrHwqjjt6h/L9pgPETVnB+8t2U1RabnZ5QghxWRpkoP+hpa8nr4yOZcnj/ekX4c/UpRnETVnJ95uyZSkBIYTDadCB/odwf1+m3dWdOQ/1IbiZN88mJDPyvURWpB3DrJvGQghxqSTQz9EjrDk/Pnw1n9xxJaXlFu778ndu/3QjyTkFZpcmhBDVkkA/j1KKkZ0DWfrEAF6+oRPpR09x/YdrmPj9Vg7kF5ldnhBCVEkCvQruri7cc3UYK5+K49GB7VmUcoTBb6/i9QWpFBSVmV2eEKKeXGzt9P379xMbe8EGbqaQQK9GYy93nhoezcqn4hjdNYhPV2fSf8oKPk3MpKRcmmsIIeyH0z76b2uBTbyZcnMX7u/bjjcXpvHaglS+Wr+fp4ZHcf0VQbi4yFICQlyyhc/CkWTbHrN1Zxj5ZpUvP/vss4SEhPDoo48C8NJLL+Hm5saKFSs4ceIEZWVlvPrqq4wePfqS3ra4uJiHH36YzZs34+bmxtSpUxk4cCA7d+7kvvvuo7S0FIvFwo8//khQUBC33HILOTk5VFRU8Le//Y1x48bV6rShZi3ovJRSm5RS25VSO5VSL19gH0+l1Cyl1B6l1EalVFitK7NTMYGN+er+Xnz9QG8ae7kz8fttjPl4Lev3Hje7NCFEDYwbN47Zs2f/9+vZs2dzzz33MHfuXJKSklixYgWTJk265BluH330EUopkpOT+e6777jnnnsoLi5m2rRpTJw4kW3btrF582aCg4NZtGgRQUFBbN++nZSUFEaM+J82zZelJlfoJcAgrfVppZQ7sEYptVBrveGcfR4ATmitOyilbgX+BdT+vxs71jeiJfMn9OWnbQd5a3E6t326gcHRrXh2ZDQRAX5mlyeEY7jIlXRd6datG8eOHePQoUPk5ubSrFkzWrduzeOPP05iYiIuLi4cPHiQo0eP0rp16xofd82aNUyYMAGA6Oho2rZtS0ZGBn369OG1114jJyeH+Ph4IiIi6Ny5M5MmTeKZZ57huuuuo1+/fjY5t5q0oNNa6z8WPHG3fpz/X9do4Cvr53OAwaoBLGfo4qKIvzKY5U/G8ezIaDbtz2f4u4k8l7CDY4XFZpcnhKjCzTffzJw5c5g1axbjxo3jm2++ITc3ly1btrBt2zYCAgIoLrbNv+Hbb7+defPm4e3tzahRo1i+fDmRkZEkJSXRuXNnXnzxRV555RWbvFeNbooqpVyVUtuAYxhNojeet0sb4ACA1rocKABaXOA445VSm5VSm3Nznaf/p5e7Kw8NaE/iUwO59+p2zNmSw4ApK5m6NIPTJbKUgBD2Zty4cXz//ffMmTOHm2++mYKCAlq1aoW7uzsrVqwgKyvrko/Zr18/vvnmGwAyMjLIzs4mKiqKzMxMwsPD+etf/8ro0aPZsWMHhw4dwsfHhzvvvJOnnnrKZmur1yjQtdYVWuuuQDDQSyl1WfN0tNbTtdY9tNY9/P39L+cQdq1ZIw/+fn1HfntiAINjWvH+st3ETVnJ1xuyKK+Q5hpC2ItOnTpx6tQp2rRpQ2BgIHfccQebN2+mc+fOzJw5k+jo6Es+5iOPPILFYqFz586MGzeOL7/8Ek9PT2bPnk1sbCxdu3YlJSWFu+++m+TkZHr16kXXrl15+eWXefHFF21yXpe8HrpS6u9Akdb6rXO2LQZe0lqvV0q5AUcAf32Rg9fneuhm2Zp9gjcWpLFpfz7h/o14dkQ0QzsGSHMN0aDJeug1Z/P10JVS/kqpptbPvYGhQNp5u80D7rF+fhOw/GJh3lB0C23GrD9fxad390AB4/+zhXH/3sDW7BNmlyaEcEI1meUSCHyllHLF+A9gttZ6vlLqFWCz1noe8DnwH6XUHiAfuLXOKnYwSimGdgxgYJQ/szYf4J2lu7nx43Vce0UgTw+Pom2LRmaXKISoRnJyMnfdddf/2+bp6cnGjeffTjRXg2hBZ09Ol5TzaWIm0xMzKbdYuPOqtkwYFEHzRh5mlyZEvUhNTSU6OlqGHquhtSYtLU1a0NkzX083Hh8ayaqn4ripezBfrdvPgCkr+GTlXorLZCkB4fy8vLw4fvy4LE19EVprjh8/jpeX1yV9n1yhm2z30VO8uTCNZWnHCGrixaRhUdzYrY0sJSCcVllZGTk5OTab5+2svLy8CA4Oxt3d/f9tv9gVugS6nVi/9zivL0gl+WABHQMb89yoaPpFON/UTiFE7ciQiwPo074FPz96De/f1o3C4jLu+nwTd8/YROrhQrNLE0I4CAl0O+LiorihSxDLJg3gxWtj2H7gJKPeX82TP2zncMFZs8sTQtg5GXKxYwVFZXy0cg9frt2PUvBA33Y8FNeexl7u1X+zEMIpyRi6gzuQX8TbS9L5adshmjfyYOLgCG7rFYqHm/yCJURDI2PoDi6kuQ/v3tqNX/7Sl6gAP/4xbyfD3lnFwuTDMvVLCPFfEugOpHNwE759sDdf3NsTDzcXHv4mibGfrGPz/nyzSxNC2AEJdAejlGJgdCsWTuzPv8Z2JufEWW6atp4//2czmbmnqz+AEMJpyRi6gysqLefz1fuYtmovxeUWbu8VysQhEbT09TS7NCFEHZCbog1A7qkS3l+2m283ZePt7spDA8J5oG843h6uZpcmhLAhuSnaAPj7efLPMbEsebw/V7dvwVtLMoh7awWzfz9AhUVunArREEigO5n2/r5Mv7sHPzzUh6Cm3jz94w5GvbeaFenHZEaMEE5OAt1J9QxrTsLDV/PxHVdSXF7BfV/8zh2fbSTlYIHZpQkh6ogEuhNTSjGqcyBLHx/AS9d3JPVwIdd9sIbHZ20j50SR2eUJIWysJi3oQpRSK5RSu5RSO5VSEy+wTxOl1C9Kqe3Wfe6rm3LF5fBwc+Hea9qx6umBPBLXngXJhxn09ireWJBKQVGZ2eUJIWyk2lkuSqlAIFBrnaSU8gO2AGO01rvO2ed5oInW+hmllD+QDrTWWpdWdVyZ5WKeQyfP8vaSDBK25tDYy50JgzpwV5+2eLrJjBgh7F2tZrlorQ9rrZOsn58CUoE25+8G+Cmjp5QvRl/R8lpVLepMUFNv3r6lC79O6EeXkKa8+msqQ6auYt72Q1hkRowQDuuS5qErpcKARCBWa114znY/YB4QDfgB47TWv17g+8cD4wFCQ0O7Z2Vl1ap4YRurd+fy+oI0Ug8X0iW4Cc+NiuGq8BZmlyWEuACbPFiklPIFVgGvaa0TznvtJuAa4AmgPbAU6HJu6J9PhlzsS4VF89PWg7y1JJ3DBcUMiWnFMyOiiQjwM7s0IcQ5av1gkVLKHfgR+Ob8MLe6D0jQhj3APoyrdeEgXF0UY7sHs+LJOJ4eEcXGzHyGv5vIcwnJHCuU3o9COIKazHJRwOdAqtZ6ahW7ZQODrfsHAFFApq2KFPXHy92VR+I6sOrpgdzdJ4wfNh8g7q2VvLM0gzMlcltECHtWk1kufYHVQDJgsW5+HggF0FpPU0oFAV8CgYAC3tRaf32x48qQi2PYn3eGKYvT+TX5MC19PXl8aATjeoTg5iqPMAhhBlmcS9Ta1uwTvL4gld/3n6C9fyOeHRnDkJhWGL/ACSHqiyzOJWqtW2gzZv+5D9Pv6o4GHpy5mXHTN7D9wEmzSxNCWEmgixpTSjGsU2sWP9aff46JJTP3NKM/Wstfvk0i+7gsJSCE2WTIRVy20yXlTF+1l09X76PcYuGuq8KYMKgDzRp5mF2aEE5LxtBFnTpaWMw7SzOYvfkAjTzdeHRgB+69Ogwvd1lKQAhbkzF0UacCGnvx5tgrWPRYf3qGNefNhWkMfnsVCUk5spSAEPVIAl3YTGSAHzPu7cm3D/amWSN3npi9nes/XMPaPXlmlyZEgyCBLmzu6vYtmfdoX967tSsni8q447ON3DNjE2lHqlwJQghhAxLook64uChGd23DskkDeGFUDFuzTzDyvdU89cN2jhTIUgJC1AW5KSrqxcmiUj5asYev1mXh4gIP9G3HQwPa4+flbnZpQjgUmeUi7MaB/CLeWpLOz9sO0aKRBxOHRHBbr1DcZSkBIWpEZrkIuxHS3If3bu3GvL9cQ0SAL3//eSfD3klkUcphzLq4EMJZSKALU1wR3JTvHryKGff2wM1F8dDXSdw0bT1bsvLNLk0IhyWBLkyjlGJQdAALJ/bjzfjOHMgvYuwn63noP1vYl3fG7PKEcDgyhi7sRlFpOZ+t3se/V+2lpNzCHb1D+evgCFr4eppdmhB2Q26KCoeSe6qE95Zl8N2mA3i7u/JwXHvuv6Yd3h6ylIAQclNUOBR/P09eHdOZxY/1p0/7FkxZnM7At1Yye/MBKmQpASGqVJMWdCFKqRVKqV1KqZ1KqYlV7BenlNpm3WeV7UsVDU2HVr58encPZv+5D62bePH0nB1c+/5qVqYfkxkxQlxATVrQBQKBWuskpZQfsAUYo7Xedc4+TYF1wAitdbZSqpXW+tjFjitDLuJSaK35Nfkwkxelk51fxDUdWvDcyBhi2zQxuzQh6lWthly01oe11knWz08BqUCb83a7HUjQWmdb97tomAtxqZRSXHdFEL89MYC/X9eRnYcKuf7DNTwxaxsHT541uzwh7MIl3RRVSoUBiUCs1rrwnO3vAu5AJ8APeE9rPfMC3z8eGA8QGhraPSsrqza1iwas4GwZn6zcy4y1+wC475owHonrQBNvWUpAODebzHJRSvkCq4DXtNYJ5732IdADGAx4A+uBa7XWGVUdT4ZchC0cPHmWt5ekM3frQZp4uzNhUAR3XhWKp5vMiBHOqdazXJRS7sCPwDfnh7lVDrBYa31Ga52HcRXf5XILFqKm2jT1ZuotXZk/oS+d2zThn/N3MWTqKn7ZfkhunIoGpyazXBTwOZCqtZ5axW4/A32VUm5KKR+gN8ZYuxD1olNQE/7zQG++ur8XjTzcmPDdVsZ8tJaNmcfNLk2IeuNWg32uAe4CkpVS26zbngdCAbTW07TWqUqpRcAOwAJ8prVOqYN6hbioAZH+9O3QkoSkHN5eksG46RsYEhPAsyOj6NDKz+zyhKhT8qSocFpnSyuYsXYfn6zcy9myCsb1DOGxIRG08vMyuzQhLps8+i8atOOnS/hg+R6+3pCFh5sL4/uH82C/cBp51uQXVCHsiwS6EMC+vDNMWZzGguQj+Pt58viQSG7pEYybNNcQDkTWchECaNeyER/f0Z0fH76a0OY+PD83mRHvrea3XUdlRoxwChLoosHp3rYZcx7qw7Q7u1Nh0fxp5mZunb6BHTknzS5NiFqRQBcNklKKEbGtWfJ4f/45uhN7jp3mhg/XMuG7rRzILzK7PCEui4yhCwGcKi5jemImn67OxGKBu/q0ZcKgDjT18TC7NCH+H7kpKkQNHSko5p2lGfyw5QC+nm48OrAD91wdhpe7LCUg7IPcFBWihlo38eJfN13Bgon9uLJtM95YmMbgt1fx09aDWKS5hrBzEuhCXEB068Z8eV8vvvlTb5r6uPPYrG3c8NEa1u3JM7s0IaokgS7ERVzToSW//KUv747ryokzZdz+2Ubu/WIT6UdOmV2aEP9DAl2Iari4KMZ0a8OySQN4flQ0SVknGPleIk/P2c6RgmKzyxPiv+SmqBCX6MSZUj5csYeZ6/fj6qL4U99w/jwgHD8vaa4h6p7MchGiDhzIL2Ly4nR+2X6IFo08eGxIBLf2CsVdlhIQdUhmuQhRB0Ka+/DBbd34+dFr6NDKl7/9vJPh7ySyKOWILCUgTCGBLkQtdQlpyvfjr+Lze3rg4qJ46Ost3DxtPVuyTphdmmhgHC/QT+dC0kwoyje7EiH+SynF4JgAFk3sxxvxncnKL2LsJ+t4+Ost7Ms7Y3Z5ooGoSQu6EKXUCqXULqXUTqXUxIvs21MpVa6Uusm2ZZ5j92KYNwHeioRvboZt30FxQZ29nRCXws3Vhdt6hbLyyTgeGxLBqoxchk5dxUvzdnL8dInZ5QknV+1NUaVUIBCotU5SSvkBW4AxWutd5+3nCiwFioEZWus5FzvuZd8U1RoOb4OUBNg5FwoOgKsnRAyFTjdC5Ajw9L304wpRB46dKubd33Yz6/cD+Li78lBcex7o206WEhCXzaazXJRSPwMfaq2Xnrf9MaAM6AnMr7NAP5fWkLMZUn6EXT/BqcPg5g2RwyE2HiKGgbt37d5DCBvYc+wUby5M57fUowQ28eKJoZHEXxmMq4syuzThYGwW6EqpMCARiNVaF56zvQ3wLTAQmEEVga6UGg+MBwgNDe2elZV1CadRDYsFstfDzgTY9TOcyQUPX4gaCZ3iocNgcPO03fsJcRk2ZB7njQWpbM8pILq1H8+NimFApL/ZZQkHYpNAV0r5AquA17TWCee99gPwttZ6g1LqS+rrCr0qFeWQtcYYlkmdB2dPgGcTiL4WYsdC+ABwlYdAhDm01szfcZjJi9M4kH+WfhEteXZkNJ2CmphdmnAAtQ50pZQ7MB9YrLWeeoHX9wF//O7YEigCxmutf6rqmPX2YFFFGWSuMq7cU+dDSQF4N4eY641hmbZ9wVWaBYv6V1Jewdcbsvlg+W4KzpZxY7c2TBoWRZumMkwoqlarQFdKKeArIF9r/VgN3uxLzL5Cr0p5Cexdboy5py+E0tPQyB86jjaGZUL7gIvjzeQUjq3gbBkfr9zDF2v3A3D/Ne14ZGB7GstSAuICahvofYHVQDJgsW5+HggF0FpPO2//L7HXQD9X2VnYvcQYlslYDOVnwS8QOo4xrtyDe4KSG1ai/uScKGLqkgwSth6kmY87EwZFcOdVbfFwk4sMUUnWcqlOyWnIWGRMg9y9FCpKoEkIdBpjXLkHdZNwF/Um5WABbyxMZe2e44Q29+HpEVFc2zkQJX8HBRLol6a4ENIXGFfue5eBpRyatTOu2jvFQ0AnCXdR57TWrMrI5c2FaaQdOUWXkKa8MCqGXu2am12aMJkE+uUqyoe0+Ua470sEXQEtI41gj40H/yizKxROrsKi+TEph7eXpHO0sIShHQN4ZkQ0HVrJw3MNlQS6LZzJM+a375wL+9cAGlp1gtgbjYBv0d7sCoUTO1tawYy1+/hk5V7OllVwa88QHhsSib+fPFvR0Eig29qpI0a4pyTAgQ3GtsAuRrB3uhGatTW3PuG08k6X8P6y3Xy7MRtPNxfG92/Pg/3b4eMhU28bCgn0ulSQAzt/Mua5H9xibGvTw3iAqdMYaBxkZnXCSWXmnmbK4nQWphzB38+TJ4ZGcnP3YNykuYbTk0CvL/n7jCGZnQlwJBlQxtz22HhjrrtvK7MrFE5mS1Y+r/2aSlL2SSJa+fLsyGgGRbeSGTFOTALdDHl7jGBPSYDcVFAuENbXGJaJuQEatTC7QuEktNYs3nmEfy1KZ1/eGa4Kb87zo2K4Irip2aWJOiCBbrZjqdblfhPg+B5QrhAeZ1y5R18H3k3NrlA4gbIKC99tyubd33aTf6aUG7oE8dTwKEKa+5hdmrAhCXR7obUxFPPHlfvJLHD1gPaDjXCPGgmefmZXKRzcqeIypq3ay2er96E13HN1Wx4d2IGmPh5mlyZsQALdHmkNh5IqG3UUHgQ3L2ujjnhjTXePRmZXKRzY4YKzTF2SwZykHPw83fjLoA7c3SdMmms4OAl0e2exQM4mI9x3/QSnj4K7j9F9KTYeOgwFdy+zqxQOKu1IIW8sSGNVRi5tmnrz9Igorr8iCBdpruGQJNAdiaUCstZVNuooOg4efhA9yrqW+0Bwk1+dxaVbszuP1xeksutwIZ3bNOG5UdFc3b6l2WWJSySB7qgqymF/orVRxy9QfBK8mkLMdcawTLsBspa7uCQWi+anbQd5a3E6hwqKGRTdimdHRhMZIPduHIUEujMoL4XMlcZa7mm/Qukp8GlhTIGMjYe214CLjI2Kmikuq+DLdfv5aMUezpSUc0uPEB4fGklAYxnas3cS6M6mrBj2/GYMy6QvgrIz4BtQ2agjpLc06hA1cuJMKR8s38N/NuzHzcWFB/u1Y/yA9vh6ym9+9koC3ZmVFsHuxcawzO4lUF4MjdtUNupo012W+xXVyj5exOTFaczfcZiWvh5MHBLJrT1DcJelBOxObTsWhQAzgQBAA9O11u+dt88dwDMYfUVPAQ9rrbdf7LgS6HWg5JRxxb4zwbiCryiFpqGVy/22vkLCXVzUtgMnef3XVDbtzyfcvxHPjIhmWMcAWUrAjtQ20AOBQK11klLKD9gCjNFa7zpnn6uBVK31CaXUSOAlrXXvix1XAr2OnT1pjLXvTDDG3i3l0Lz9OY06OppdobBTWmt+Sz3GmwtT2Zt7hp5hzXhuVAxXhjYzuzSBjYdclFI/Ax9qrZdW8XozIEVr3eZix5FAr0dF+ZA6zxiW2b8atAX8oyuv3FtGmF2hsEPlFRZmbT7AO0t3k3e6hGs7B/LU8CjCWsoDb2ayWaArpcKARCBWa11YxT5PAtFa6z9d4LXxwHiA0NDQ7llZWTV+b2Ejp49VNurIWgdoCOhc2aijeTuzKxR25nRJOZ8mZjI9MZNyi4U7erflr4MjaN5Inocwg00CXSnlC6wCXtNaJ1Sxz0DgY6Cv1vr4xY4nV+h2oPBQZaOOnE3GtqBulY06moaYW5+wK8cKi3nnt93M+j2bRh5uPDywPfdf006WEqhntQ50pZQ7MB9YrLWeWsU+VwBzgZFa64zqjimBbmdOZhtX7SkJcHibsS2ktzXcx4BfazOrE3Zk99FT/GtRGr+lHiOwiReThkVxY7c2uMpSAvWitjdFFfAVkK+1fqyKfUKB5cDdWut1NSlKAt2OHd9rbdQxF46mAMp4cCn2RogZDb7+Zlco7MD6vcd5Y2EqO3IKiAlszPOjoukXIX836lptA70vsBpIBizWzc8DoQBa62lKqc+AscAfg+LlVb3hHyTQHURuRuVyv3npRqOOdv2tjTquB5/mZlcoTGSxaOYnH2byojRyTpylX0RLnhsZQ8egxmaX5rTkwSJRe1rDsV2VjTryM8HFzVgsLHassXiYVxOzqxQmKSmv4D/rs/hg+R4Ki8uI7xbMpGGRBDX1Nrs0pyOBLmxLazi83XrlPhcKso1GHR2GGtMgI0eAp6/ZVQoTFBSV8fHKPXyxbj8KuL9vOx6Oa09jL3ezS3MaEuii7mgNOZuNcN/5E5w6BG7eEDnMGJaJGAYe0gKtock5UcRbi9P5adshmvm489fBEdzRuy0ebrKUQG1JoIv6YbHAgQ2VjTrO5IJ7I6O1Xmw8dBgCbp5mVynqUcrBAl5fkMq6vcdp28KHp4dHM6pza1lKoBYk0EX9s1TA/jXWRh3z4Gw+eDaB6GuNcA+PA1f5Nbwh0FqzMiOXNxekkX70FF1DmvLCtTH0DJMb6pdDAl2Yq6IM9q2yNuqYDyUF4N3MmCXTKR7C+kmjjgagwqL5cUsOby9N52hhCcM6BvDMyGja+8v9lkshgS7sR3kJ7F1uhHv6Aig9DT4tjbXcY+MhtI806nByRaXlzFizj09W7qW43MJtvUKYODgSfz8ZjqsJCXRhn8rOwu6lxrBMxmIoKwLf1saTqZ3iIbinNOpwYnmnS3jvt918uykbLzcXHhrQngf6tcPHQ35buxgJdGH/Ss9AxiJro46lUFECjYMrFw0L6iZruTupvbmnmbwojcU7jxLQ2JMnhkZyU/cQWUqgChLowrEUF0L6QmujjmVgKYNmYZXL/QbESrg7od/35/P6glS2Zp8kMsCX50bGEBflLzNiziOBLhzX2RPGjdSdCZC5CnQFtIiobNTRKtrsCoUNaa1ZmHKEyYvS2H+8iD7hLXh+VAydg+Up5D9IoAvncCbvnEYdawANrTpWXrm3aG92hcJGSsstfLsxi/eX7yH/TCmjuwbx5LAoQprLQ2oS6ML5nDpqbdSRANnrjW2trzDWlel0IzRra259wiYKi8uYtnIvn6/Zh9Zw7zVhPBrXgSY+DfcZBgl04dwKcoxlB3YmwMEtxrY2PYyr9o5joMlFuyEKB3C44CxvL8ngx6QcGnu5M2FQB+7q0xZPt4Y3xVUCXTQcJ/ZXNuo4ssPYFtrHGJbpOBr8AkwtT9TOrkOFvLkojcSMXIKbefPU8CiuvyIIlwY0I0YCXTRMeXusjToSjKV/lYu1UUe80aijUQuzKxSXafXuXF5fkEbq4UKuCG7CcyNj6NO+Yfw8JdCFOJZW2ajj+G5QrhA+wNqo4zpjKQLhUCwWzdytB3l7STqHCooZHN2KZ0dGExHgZ3Zpdaq2HYtCgJlAAKCB6Vrr987bRwHvAaOAIuBerXXSxY4rgS5MobXRVi8lAVJ+hJNZ4OIOHQYb4R41Eryk244jKS6r4Iu1+/l4xR7OlJYzrmcIjw+JpFVjL7NLqxO1DfRAIFBrnaSU8gO2AGO01rvO2WcUMAEj0HsD72mte1/suBLownRaw6Ekaxemn6AwB1w9IeKcRh0ejcyuUtRQ/plSPli+m683ZOHm4sKD/cMZ3z8cX0/nWkrApkMuSqmfgQ+11kvP2fZvYKXW+jvr1+lAnNb6cFXHkUAXdsVigZzfKxt1nD4C7j4QOdzaqGMouEs7NUeQdfwMkxel82vyYVr6evLYkAhu7RmCm6tzrAtks0BXSoUBiUCs1rrwnO3zgTe11musXy8DntFabz7v+8cD4wFCQ0O7Z2VlIYTdsVQYc9tTEoy57kV54OFn9E3tFA/tB4Gbh9lVimpszT7B6wtS+X3/CcL9G/HsiGiGdgxw+KUEbBLoSilfYBXwmtY64bzXahTo55IrdOEQKsph/2pjvD31Fyg+aTTDjr7eWDis3QBp1GHHtNYs3XWUNxelkZl7hl5hzXluVDTdQh33JnitA10p5Q7MBxZrrade4HUZchHOr7wUMlcawzJpv0JJIXg3h443WBt19JW13O1UWYWFWb8f4N3fMsg7Xcq1VwTy9PAo2rZwvHsktb0pqoCvgHyt9WNV7HMt8Bcqb4q+r7XudbHjSqALh1ZWDHuXWRt1LISyM9CoVWWjjpCrZC13O3S6pJzpiZl8mphJucXCnVe1ZcKgCJo3cpwhtNoGel9gNZAMWKybnwdCAbTW06yh/yEwAmPa4n0XG24BCXThREqLYPeSykYd5cXgF2Q06ogdC226y3K/duZoYTHvLM1g9uYDNPJ045G4Dtx3TRhe7vb/G5Y8WCREfSk5XdmoY89SqCiFpqHGgmGd4iGwi4S7Hck4eop/LUxjWdoxgpp4MWlYFDd2a2PXSwlIoAthhuICY6w9JQEyV4ClHJqHVy7326qjhLudWLc3jzcWpJF8sICOgY15flQMfSNaml3WBUmgC2G2onxjlszOBNiXCNoCLaMqG3X4R5pdYYNnsWh+2XGIKYvTyTlxlv6R/jw3MpqYQPt6clgCXQh7cjoXUn+GlLmQtRbQRlu9P8K9eTuzK2zQSsormLkuiw+W7+ZUSTljrwxm0rBIApvYx4NlEuhC2KvCw8bDSyk/Qs4mY1tQNyPYO90ITUPMra8BO1lUykcr9vDVuiyUggf6tuOhuPY09jL3uQMJdCEcwckDlcv9HtpqbAvuVdmoo3GgqeU1VAfyi3hrSTo/bztE80YeTBwcwW29QvFwM2daqgS6EI4mP9PaqGMuHE0GFLS92rhq7zgGfP3NrrDBSc4p4PUFqazPPE5YCx+eGRHNiNjW9b6UgAS6EI4sb7d1RcgEyE0zGnWE9bM26rgBfJqbXWGDobVmRfox3liQxu5jp7kytCnPj4qhR1j9/Qwk0IVwFkd3VTbqyN8LLm4QPtAI9+hrjXVmRJ0rr7AwZ0sOU5dmcOxUCcM7BfDMiGjC/X3r/L0l0IVwNlrD4e3W5X7nwslscPWADkOsjTpGgKdzd+6xB0Wl5Xy2eh//XrWX4nILt/cKZeKQCFr6etbZe0qgC+HMtIaDW6zDMnPh1CFw84KIYcaVe8Rw8PAxu0qnlnuqhPeWZfDdpgN4u7vy0IBwHugbjreH7ZcSkEAXoqGwWODAxspGHWeOgXsj44q9U7xxBe/unK3Z7MHe3NP8a2EaS3YdJaCxJ5OGRjG2ezCuNlxKQAJdiIbIUmE8uPRHo46z+eDZ2Bhr7xQP4XHSqKOObNqXz+sLUtl24CRRAX48OyqauEh/m8yIkUAXoqGrKIN9q4xpkGm/GOvMeDWFmOuNYZmw/uDqXL03zaa1ZkHyESYvTiPreBFXt2/B86NiiG1TuxvXEuhCiErlpbB3ubVRxwIoPQU+LSsbdbS9Whp12FBpuYVvNmbx/rLdnCgq48ZubZg0LJLgZpd3X0MCXQhxYWXFxjK/KQnGsr9lReDb2tqoYywE95RGHTZScLaMaav2MmPNPm7vHco/ru90WceRQBdCVK/0jNGgY2cCZCyBihJoHGxt1BEPQVfKcr82cOjkWbzdXWl2mV2SatuxaAZwHXBMax17gdebAF9jdDByA97SWn9RXVES6ELYseJCo7XezgTYswwsZdAsrLJRR+vOEu4mqW2g9wdOAzOrCPTngSZa62eUUv5AOtBaa116seNKoAvhIM6eOKdRx0rQFdCiwzmNOmLMrrBBuVigV3tbW2udqJQKu9gugJ+1r6gvkA+UX06hQgg75N0Mut1pfJw5DqnzjCv31W9B4mTwjzHG22PjoUV7s6tt0Go0hm4N9PlVXKH7AfOAaMAPGKe1/rWK44wHxgOEhoZ2z8rKuvzKhRDmOnXUCPeUBMheZ2xrfYW1UceNxhCNsLla3xStJtBvAq4BngDaA0uBLlrrwosdU4ZchHAiBQdh109GuB+0/rtu093aqGMMNAk2szqncrFAt8V8pPuABG3YA+zDuFoXQjQUTdpAn0fhwWUwcQcMedloir3kBXinE3w+HDb+27iqF3XGFoGeDQwGUEoFAFFApg2OK4RwRM3aQt/H4M+JMCEJBr0IJadg4dPwdhR8eR38/jmcyTO7UqdTk1ku3wFxQEvgKPAPwB1Aaz1NKRUEfAkEAgp4U2v9dXVvLEMuQjQwuemVjTryMkC5QvgAY1gm5jrj5quoljxYJISwH1rD0ZTKcD+xH1zcof0g44Zq1Cjwamx2lXZLAl0IYZ+0Nhpi/7Hcb8EBcPWEiKHGTJmokeDRyOwq7Uqt5qELIUSdUQraXGl8DHnFmCGTkmDMmEmbD27eEDnc2qhjGLh7m12xXZMrdCGE/bFYIHt95ZV7UR54+BrDMbHxxvCMW921ebNnMuQihHBcFeWwf7UR7qm/GEsReDYxbqR2ijdurLq6m11lvZFAF0I4h4oyYz2ZlARjSKakELybn9Ooo5/Tr+UugS6EcD7lJcZKkDsTjJUhS09DI39jLfdO8RDaxynXcpebokII5+PmCdGjjI+ys7B7iXHlvvUb+P0z8Asylh3oFA/BPRrEcr9yhS6EcC4lp43uSykJRjemilJoElrZqCOwq0OHuwy5CCEapuICo2/qzgSjj6qlHJqHVzbqCOjkcOEugS6EEEX5xo3UlATYl2g06mgZWdmowz/K7AprRAJdCCHOdSYPdv0MO+fC/jWAhoBY48o9Nt64irdTEuhCCFGVwsPWcE+AAxuNbYFdKxt1NA01tbzzSaALIURNnDxQ2ajjUJKxLbhnZaOOxkFmVgdIoAshxKXL32cMyexMgCPJgDLmtsfGG3PdfVuZUpYEuhBC1EbeHiPYUxIgNxWUi/FUamw8xNwAPs3rrRQJdCGEsJWjuyrDPX8vuLhBeJwxLBN9LXg3rdO3r1WgK6VmANcBxy7UJNq6TxzwLkYnozyt9YDqipJAF0I4NK3hyI7KRh0ns8HVA9oPtjbqGAmefjZ/29oGen/gNDDzQoGulGoKrANGaK2zlVKttNbHqitKAl0I4TS0hoNJ1uV+50LhQXDzsjbqiIfIEeDhY5O3qtVaLlrrRKVU2EV2uR1I0FpnW/evNsyFEMKpKAXB3Y2Pof+EnE2VjTpSfwF3H+OKvVM8dBgC7l51UoYtFueKBNyVUisBP+A9rfXMC+2olBoPjAcIDbWvuZ1CCGETLi4QepXxMeINyFprhHvqPEj5ETwbw4Bn4Oq/2PytbRHobkB3YDDgDaxXSm3QWmecv6PWejowHYwhFxu8txBC2C8XV2jX3/gY9RbsW2UMy9TRfHZbBHoOcFxrfQY4o5RKBLoA/xPoQgjRYLm6QYfBxkcdscXq7z8DfZVSbkopH6A3kGqD4wohhLgE1V6hK6W+A+KAlkqpHOAfGNMT0VpP01qnKqUWATsAC/CZ1jql7koWQghxITWZ5XJbDfaZAkyxSUVCCCEui/M13BNCiAZKAl0IIZyEBLoQQjgJCXQhhHASEuhCCOEkTFs+VymVC2Rd5re3BPJsWI4jkHNuGOScG4banHNbrbX/hV4wLdBrQym1uarVxpyVnHPDIOfcMNTVOcuQixBCOAkJdCGEcBKOGujTzS7ABHLODYOcc8NQJ+fskGPoQggh/pejXqELIYQ4jwS6EEI4CbsOdKXUCKVUulJqj1Lq2Qu87qmUmmV9fWM1vU8dQg3O+Qml1C6l1A6l1DKlVFsz6rSl6s75nP3GKqW0Usrhp7jV5JyVUrdYf9Y7lVLf1neNtlaDv9uhSqkVSqmt1r/fo8yo01aUUjOUUseUUhdcTlwZ3rf+eexQSl1Z6zfVWtvlB+AK7AXCAQ9gO9DxvH0eAaZZP78VmGV23fVwzgMBH+vnDzeEc7bu5wckAhuAHmbXXQ8/5whgK9DM+nUrs+uuh3OeDjxs/bwjsN/sumt5zv2BK4GUKl4fBSwEFHAVsLG272nPV+i9gD1a60ytdSnwPTD6vH1GA19ZP58DDFZKqXqs0daqPWet9QqtdZH1yw1AcD3XaGs1+TkD/BP4F1Bcn8XVkZqc84PAR1rrEwBa62P1XKOt1eScNdDY+nkT4FA91mdzWutEIP8iu4wGZmrDBqCpUiqwNu9pz4HeBjhwztc51m0X3EdrXQ4UAC3qpbq6UZNzPtcDGP/DO7Jqz9n6q2iI1vrX+iysDtXk5xwJRCql1iqlNiilRtRbdXWjJuf8EnCntTPaAmBC/ZRmmkv9914tWzSJFiZQSt0J9AAGmF1LXVJKuQBTgXtNLqW+uWEMu8Rh/BaWqJTqrLU+aWZRdew24Eut9dtKqT7Af5RSsVpri9mFOQp7vkI/CISc83WwddsF91FKuWH8mna8XqqrGzU5Z5RSQ4AXgBu01iX1VFtdqe6c/YBYYKVSaj/GWOM8B78xWpOfcw4wT2tdprXeB2RgBLyjqsk5PwDMBtBarwe8MBaxclY1+vd+Kew50H8HIpRS7ZRSHhg3Peedt8884B7r5zcBy7X1boODqvaclVLdgH9jhLmjj6tCNeestS7QWrfUWodprcMw7hvcoLXebE65NlGTv9s/YVydo5RqiTEEk1mPNdpaTc45GxgMoJSKwQj03Hqtsn7NA+62zna5CijQWh+u1RHNvhNczV3iURhXJnuBF6zbXsH4Bw3GD/wHYA+wCQg3u+Z6OOffgKPANuvHPLNrrutzPm/flTj4LJca/pwVxlDTLiAZuNXsmuvhnDsCazFmwGwDhpldcy3P9zvgMFCG8RvXA8BDwEPn/Iw/sv55JNvi77U8+i+EEE7CnodchBBCXAIJdCGEcBIS6EII4SQk0IUQwklIoAshhJOQQBdCCCchgS6EEE7i/wBS80RvxTwM8QAAAABJRU5ErkJggg==\n",
|
| 426 |
+
"text/plain": [
|
| 427 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 428 |
+
]
|
| 429 |
+
},
|
| 430 |
+
"metadata": {
|
| 431 |
+
"needs_background": "light"
|
| 432 |
+
},
|
| 433 |
+
"output_type": "display_data"
|
| 434 |
+
}
|
| 435 |
+
],
|
| 436 |
+
"source": [
|
| 437 |
+
"loss = results.history[\"loss\"]\n",
|
| 438 |
+
"val_loss = results.history[\"val_loss\"]\n",
|
| 439 |
+
"plt.plot(loss, label=\"loss\")\n",
|
| 440 |
+
"plt.plot(val_loss, label=\"val_loss\")\n",
|
| 441 |
+
"plt.legend()\n",
|
| 442 |
+
"plt.show()"
|
| 443 |
+
]
|
| 444 |
+
},
|
| 445 |
+
{
|
| 446 |
+
"cell_type": "code",
|
| 447 |
+
"execution_count": 18,
|
| 448 |
+
"id": "1bcd977b",
|
| 449 |
+
"metadata": {},
|
| 450 |
+
"outputs": [
|
| 451 |
+
{
|
| 452 |
+
"data": {
|
| 453 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA48AAAOjCAYAAAAbBzFCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAoLklEQVR4nO3debhkd13n8c+XNM2aREKCJJ0OSRDQToAAAWQVcAuJmaDDooAs4hNRERHRYUYfGHHwGWTXgZkBYZRFhJYtBEziODAsItmBLBAiCaTTcbLJErYmzW/+qOpwabv7e9NJdd3cer2e5z7cOufUOd97yVO575xTp2qMEQAAANiVW817AAAAAFY+8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8ArDqVdXtquoDVfXVqtp4E/bzlKo67eacbR6q6u+q6unzngOAWxbxCMCKUVVPrqozq+q6qrpiGjkPvxl2/fgkP5zkzmOMJ+zuTsYYbx9j/MzNMM8PqKpHVdWoqvdut/y+0+UfWeZ+/nNVva3bbozx2DHGX+3muAAsKPEIwIpQVc9P8pokf5JJ6B2S5PVJTrgZdn+3JBeNMa6/GfY1K1cleUhV3XnJsqcnuejmOkBN+Hc/ALvFv0AAmLuq2jfJS5L85hjjPWOMb4wxvjvG+MAY4/em29ymql5TVZunX6+pqttM1z2qqjZV1e9W1ZXTs5bPnK77oyQvSvKk6RnNZ21/hq6qDp2e4VszffyMqvpiVX29qi6pqqcsWf7xJc97aFWdMb0c9oyqeuiSdR+pqj+uqk9M93NaVe2/i1/DliTvS/KL0+fvleRJSd6+3e/qtVV1WVV9rarOqqpHTJcfk+Q/Lfk5P71kjpdW1SeSfDPJ4dNlvzpd/9+r6t1L9v+yqvqHqqrl/v8HwGIQjwCsBA9Jctsk793FNn+Q5MeTHJXkvkkelOQPl6y/a5J9k6xL8qwkr6uqO40xXpzJ2cx3jjHuOMZ4064Gqao7JPmzJI8dY+yd5KFJzt3Bdvsl+eB02zsneVWSD2535vDJSZ6Z5C5J1iZ5wa6OneQtSZ42/f5nk5yXZPN225yRye9gvyR/nWRjVd12jHHKdj/nfZc855eTnJhk7yRf2m5/v5vk3tMwfkQmv7unjzFGMysAC0Y8ArAS3DnJ1c1lpU9J8pIxxpVjjKuS/FEmUbTNd6frvzvG+FCS65Lcazfn+V6SI6vqdmOMK8YY5+9gm+OSfGGM8dYxxvVjjHck+VyS45ds87/GGBeNMb6V5F2ZRN9OjTH+Mcl+VXWvTCLyLTvY5m1jjGumx3xlktuk/zn/coxx/vQ5391uf9/M5Pf4qiRvS/JbY4xNzf4AWEDiEYCV4Jok+2+7bHQnDsoPnjX70nTZDfvYLj6/meSON3aQMcY3Mrlc9NlJrqiqD1bVjy5jnm0zrVvy+F92Y563JnlOkkdnB2diq+oFVXXh9FLZr2RytnVXl8MmyWW7WjnG+FSSLyapTCIXAP4N8QjASvDJJN9J8rhdbLM5kxvfbHNI/u0lncv1jSS3X/L4rktXjjFOHWP8dJIDMzmb+MZlzLNtpst3c6Zt3prkN5J8aHpW8AbTy0p/P8kTk9xpjPFDSb6aSfQlyc4uNd3lJahV9ZuZnMHcPN0/APwb4hGAuRtjfDWTm9q8rqoeV1W3r6pbV9Vjq+pPp5u9I8kfVtUB0xvPvCiTyyx3x7lJHllVh0xv1vMft62oqh+uqhOm7338TiaXv35vB/v4UJJ7Tj9eZE1VPSnJhiQn7+ZMSZIxxiVJfiKT93hub+8k12dyZ9Y1VfWiJPssWf//khx6Y+6oWlX3TPJfkjw1k8tXf7+qjtq96QFYzcQjACvC9P17z8/kJjhXZXKp5XMyuQNpMgmcM5N8Jslnk5w9XbY7x/r7JO+c7uus/GDw3Wo6x+Yk12YScr++g31ck+TnMrnhzDWZnLH7uTHG1bsz03b7/vgYY0dnVU9NckomH9/xpSTfzg9ekrpx+r/XVNXZ3XGmlwm/LcnLxhifHmN8IZM7tr51251sAWCbcjM1AAAAOs48AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQWjPvAZbaf//9x90OPWTeYwAL7OyLzpv3CMCCu/89j5z3CMAC+9KlX87VV19dO1q3ouLxbocekk986uPzHgNYYLc75p7zHgFYcJ84xd9CwPw87MEP3+k6l60CAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8ciqc9opp+U+G47KEfe6d17+slfMexxgUY2R/NOVyblXz3sSYAH5e4hZEI+sKlu3bs3znvv8vP/k9+acz56Vje/cmAsvuHDeYwGL6MvXJXdYM+8pgAXk7yFmRTyyqpxx+pm5+90Pz2GHH5a1a9fmCU98fE4+6eR5jwUsmm9vTa7+TrLuDvOeBFhA/h5iVsQjq8rmzZtz8PqDb3i87uB1uXzzFXOcCFhIF30lucc+854CWFD+HmJWZhqPVXVMVX2+qi6uqhfO8lgAsCJc9a1k7V7JPmvnPQkA3Kxm9maMqtoryeuS/HSSTUnOqKqTxhgXzOqYcNBBB2XTZZtueHz5psuz7qAD5zgRsHC+umUSkFd/O/neSK4fyXnXJkfuN+/JgAXh7yFmZZZnHh+U5OIxxhfHGFuS/E2SE2Z4PMjRD3xALr74n3PpJZdmy5Yt2fiuv81xxx8377GARfIj+yaPODB5+F0nwbjfWuEI7FH+HmJWZnkbuHVJLlvyeFOSB8/weJA1a9bk1a99ZY4/9oRs3bo1T3/G07LhiA3zHgsAYI/x9xCzMvd7iFfViUlOTJL1h6yf8zSsBscce0yOOfaYeY8BkOx3m8kXwB7m7yFmYZaXrV6eZGkNHjxd9gPGGG8YYxw9xjj6gAP2n+E4AAAA7K5ZxuMZSe5RVYdV1dokv5jkpBkeDwAAgBmZ2WWrY4zrq+o5SU5NsleSN48xzp/V8QAAAJidmb7ncYzxoSQfmuUxAAAAmL1ZXrYKAADAKiEeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABaa+Y9AMBK8q1TLpr3CMCCO+fq0+c9ArDAvvndb+x0nTOPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQOtGxWNV3amq7jOrYQAAAFiZ2nisqo9U1T5VtV+Ss5O8sapeNfvRAAAAWCmWc+Zx3zHG15L8QpK3jDEenOSnZjsWAAAAK8ly4nFNVR2Y5IlJTp7xPAAAAKxAy4nHlyQ5NcnFY4wzqurwJF+Y7VgAAACsJGu6DcYYG5NsXPL4i0n+/SyHAgAAYGXZaTxW1Z8nGTtbP8Z47kwmAgAAYMXZ1ZnHM/fYFAAAAKxoO43HMcZfLX1cVbcfY3xz9iMBAACw0izncx4fUlUXJPnc9PF9q+r1M58MAACAFWM5d1t9TZKfTXJNkowxPp3kkTOcCQAAgBVmOfGYMcZl2y3aOoNZAAAAWKHaj+pIcllVPTTJqKpbJ/ntJBfOdiwAAABWkuWceXx2kt9Msi7J5iRHTR8DAACwINozj2OMq5M8ZQ/MAgAAwAq1nLutHl5VH6iqq6rqyqp6f1UdvieGAwAAYGVYzmWrf53kXUkOTHJQko1J3jHLoQAAAFhZlhOPtx9jvHWMcf30621JbjvrwQAAAFg5dhqPVbVfVe2X5O+q6oVVdWhV3a2qfj/Jh/bciHDjnHbKabnPhqNyxL3unZe/7BXzHgdYQF6HgHl66fNelmOP+Pk85SeeOe9RWGV2debxrCRnJnlikl9L8uEkH0ny60meNPPJYDds3bo1z3vu8/P+k9+bcz57Vja+c2MuvMAnywB7jtchYN6OfdIxefU7XjbvMViFdnq31THGYXtyELg5nHH6mbn73Q/PYYdP/vF9whMfn5NPOjk/tuHH5jwZsCi8DgHzdr+H3DdXfPlf5j0Gq1D7UR1JUlVHJtmQJe91HGO8ZVZDwe7avHlzDl5/8A2P1x28LqeffuYcJwIWjdchAFar5XxUx4uT/Pn069FJ/jTJv1vG8948/WiP827ylAAAAMzVcu62+vgkP5nkX8YYz0xy3yT7LuN5f5nkmN0fDW68gw46KJsu23TD48s3XZ51Bx04x4mAReN1CIDVajnx+K0xxveSXF9V+yS5Msn67kljjI8mufYmzgc3ytEPfEAuvvifc+kll2bLli3Z+K6/zXHHHzfvsYAF4nUIgNVqOe95PLOqfijJGzO5A+t1ST45y6Fgd61Zsyavfu0rc/yxJ2Tr1q15+jOelg1HbJj3WMAC8ToEzNuLnv3HOecfz81Xrv1qTrjfE/Krv/eMHP9k/xGLm67GGMvfuOrQJPuMMT5zI7Y/eYxx5C62OTHJiUmy/pD1D7joi59b9jwAAKvNOVefPu8RgAX2Kz/za7nw05+vHa3b6ZnHqrr/rtaNMc6+OYYbY7whyRuS5AFH33/5JQsAAMAes6vLVl+5i3UjyWNu5lkAAABYoXYaj2OMR9+UHVfVO5I8Ksn+VbUpyYvHGG+6KfsEAABgPpZzw5zdMsb4pVntGwAAgD1rOR/VAQAAwIITjwAAALTaeKyJp1bVi6aPD6mqB81+NAAAAFaK5Zx5fH2ShyTZ9h7Gryd53cwmAgAAYMVZzg1zHjzGuH9VnZMkY4x/raq1M54LAACAFWQ5Zx6/W1V7ZfLZjqmqA5J8b6ZTAQAAsKIsJx7/LMl7k9ylql6a5ONJ/mSmUwEAALCitJetjjHeXlVnJfnJJJXkcWOMC2c+GQAAACtGG49VdUiSbyb5wNJlY4wvz3IwAAAAVo7l3DDng5m837GS3DbJYUk+n+SIGc4FAADACrKcy1bvvfRxVd0/yW/MbCIAAABWnOXcMOcHjDHOTvLgGcwCAADACrWc9zw+f8nDWyW5f5LNM5sIAACAFWc573nce8n312fyHsh3z2YcAAAAVqJdxmNV7ZVk7zHGC/bQPAAAAKxAO33PY1WtGWNsTfKwPTgPAAAAK9Cuzjyensn7G8+tqpOSbEzyjW0rxxjvmfFsAAAArBDLec/jbZNck+Qx+f7nPY4k4hEAAGBB7Coe7zK90+p5+X40bjNmOhUAAAAryq7ica8kd8wPRuM24hEAAGCB7CoerxhjvGSPTQIAAMCKtdO7rWbHZxwBAABYQLuKx5/cY1MAAACwou00HscY1+7JQQAAAFi5dnXmEQAAAJKIRwAAAJZBPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0Foz7wEAAPi+hz71qfMeAVhkl1y501XOPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPAAAAtMQjAAAALfEIAABASzwCAADQEo8AAAC0xCMAAAAt8QgAAEBLPAIAANASjwAAALTEIwAAAC3xCAAAQEs8AgAA0BKPrDqnnXJa7rPhqBxxr3vn5S97xbzHARaQ1yFgRRgj+acrk3OvnvckrBLikVVl69ated5zn5/3n/zenPPZs7LxnRtz4QUXznssYIF4HQJWjC9fl9xhzbynYBURj6wqZ5x+Zu5+98Nz2OGHZe3atXnCEx+fk086ed5jAQvE6xCwInx7a3L1d5J1d5j3JKwi4pFVZfPmzTl4/cE3PF538LpcvvmKOU4ELBqvQ8CKcNFXknvsM+8pWGVmFo9Vtb6qPlxVF1TV+VX127M6FgAAMHXVt5K1eyX7rJ33JKwys7wI+vokvzvGOLuq9k5yVlX9/RjjghkekwV30EEHZdNlm254fPmmy7PuoAPnOBGwaLwOAXP31S2TgLz628n3RnL9SM67Njlyv3lPxi3czM48jjGuGGOcPf3+60kuTLJuVseDJDn6gQ/IxRf/cy695NJs2bIlG9/1tznu+OPmPRawQLwOAXP3I/smjzgwefhdJ8G431rhyM1ij9x+qaoOTXK/JJ/aE8djca1Zsyavfu0rc/yxJ2Tr1q15+jOelg1HbJj3WMAC8ToEwGpVY4zZHqDqjkn+b5KXjjHes4P1JyY5MUnWH7L+ARd98XMznQcAYCW73TH3nPcIwCL71JUZX9tSO1o107utVtWtk7w7ydt3FI5JMsZ4wxjj6DHG0QccsP8sxwEAAGA3zfJuq5XkTUkuHGO8albHAQAAYPZmeebxYUl+Ocljqurc6dexMzweAAAAMzKzG+aMMT6eZIfXygIAAHDLMtP3PAIAALA6iEcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACgJR4BAABoiUcAAABa4hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BAAAoFVjjHnPcIOquirJl+Y9B7do+ye5et5DAAvLaxAwb16HuKnuNsY4YEcrVlQ8wk1VVWeOMY6e9xzAYvIaBMyb1yFmyWWrAAAAtMQjAAAALfHIavOGeQ8ALDSvQcC8eR1iZrznEQAAgJYzjwAAALTEI6tCVR1TVZ+vqour6oXzngdYLFX15qq6sqrOm/cswGKqqvVV9eGquqCqzq+q3573TKw+LlvlFq+q9kpyUZKfTrIpyRlJfmmMccFcBwMWRlU9Msl1Sd4yxjhy3vMAi6eqDkxy4Bjj7KraO8lZSR7n7yFuTs48sho8KMnFY4wvjjG2JPmbJCfMeSZggYwxPprk2nnPASyuMcYVY4yzp99/PcmFSdbNdypWG/HIarAuyWVLHm+KF0sAYEFV1aFJ7pfkU3MehVVGPAIAwCpRVXdM8u4kzxtjfG3e87C6iEdWg8uTrF/y+ODpMgCAhVFVt84kHN8+xnjPvOdh9RGPrAZnJLlHVR1WVWuT/GKSk+Y8EwDAHlNVleRNSS4cY7xq3vOwOolHbvHGGNcneU6SUzN5c/i7xhjnz3cqYJFU1TuSfDLJvapqU1U9a94zAQvnYUl+Ocljqurc6dex8x6K1cVHdQAAANBy5hEAAICWeAQAAKAlHgEAAGiJRwAAAFriEQAAgJZ4BOAWp6q2Tm9Df15Vbayq29+Eff1lVT1++v1fVNWGXWz7qKp66G4c49Kq2n+5y7fb5robeaz/XFUvuLEzAkBHPAJwS/StMcZRY4wjk2xJ8uylK6tqze7sdIzxq2OMC3axyaOS3Oh4BIDVQDwCcEv3sSQ/Mj0r+LGqOinJBVW1V1W9vKrOqKrPVNWvJUlN/Leq+nxV/e8kd9m2o6r6SFUdPf3+mKo6u6o+XVX/UFWHZhKpvzM96/mIqjqgqt49PcYZVfWw6XPvXFWnVdX5VfUXSar7IarqfVV11vQ5J2637tXT5f9QVQdMl929qk6ZPudjVfWjO9jnc6vqgunP/ze7+fsFgCTJbv2XWQBYCaZnGB+b5JTpovsnOXKMcck0wL46xnhgVd0mySeq6rQk90tyryQbkvxwkguSvHm7/R6Q5I1JHjnd135jjGur6n8kuW6M8Yrpdn+d5NVjjI9X1SFJTk3yY0lenOTjY4yXVNVxSZ61jB/nV6bHuF2SM6rq3WOMa5LcIcmZY4zfqaoXTff9nCRvSPLsMcYXqurBSV6f5DHb7fOFSQ4bY3ynqn5oOb9TANgZ8QjALdHtqurc6fcfS/KmTC4nPX2Mccl0+c8kuc+29zMm2TfJPZI8Msk7xhhbk2yuqv+zg/3/eJKPbtvXGOPanczxU0k2VN1wYnGfqrrj9Bi/MH3uB6vqX5fxMz23qn5++v366azXJPlekndOl78tyXumx3hoko1Ljn2bHezzM0neXlXvS/K+ZcwAADslHgG4JfrWGOOopQumEfWNpYuS/NYY49Tttjv2ZpzjVkl+fIzx7R3MsmxV9ahMQvQhY4xvVtVHktx2J5uP6XG/sv3vYAeOyyRkj0/yB1V17zHG9TdqOACY8p5HAFarU5P8elXdOkmq6p5VdYckH03ypOl7Ig9M8ugdPPefkjyyqg6bPne/6fKvJ9l7yXanJfmtbQ+q6qjptx9N8uTpsscmuVMz675J/nUajj+ayZnPbW6VZNvZ0ydncjns15JcUlVPmB6jquq+S3dYVbdKsn6M8eEk/2F6jDs2cwDATolHAFarv8jk/YxnV9V5Sf5nJlfcvDfJF6br3pLkk9s/cYxxVZITM7lE9NP5/mWjH0jy89tumJPkuUmOnt6Q5oJ8/66vf5RJfJ6fyeWrX25mPSXJmqq6MMl/zSRet/lGkgdNf4bHJHnJdPlTkjxrOt/5SU7Ybp97JXlbVX02yTlJ/myM8ZVmDgDYqRpjzHsGAAAAVjhnHgEAAGiJRwAAAFriEQAAgJZ4BAAAoCUeAQAAaIlHAAAAWuIRAACAlngEAACg9f8B0stDya7eYS0AAAAASUVORK5CYII=\n",
|
| 454 |
+
"text/plain": [
|
| 455 |
+
"<Figure size 1152x1152 with 1 Axes>"
|
| 456 |
+
]
|
| 457 |
+
},
|
| 458 |
+
"metadata": {
|
| 459 |
+
"needs_background": "light"
|
| 460 |
+
},
|
| 461 |
+
"output_type": "display_data"
|
| 462 |
+
}
|
| 463 |
+
],
|
| 464 |
+
"source": [
|
| 465 |
+
"from sklearn.metrics import confusion_matrix\n",
|
| 466 |
+
"\n",
|
| 467 |
+
"n_class = y_train_ctg.shape[1]\n",
|
| 468 |
+
"cm_test = confusion_matrix(y_test, yhat_test)\n",
|
| 469 |
+
"plt.figure(figsize=(16, 16))\n",
|
| 470 |
+
"plt.imshow(cm_test, cmap=plt.cm.Greens)\n",
|
| 471 |
+
"class_arr = np.arange(n_class)\n",
|
| 472 |
+
"plt.xticks(class_arr)\n",
|
| 473 |
+
"plt.yticks(class_arr)\n",
|
| 474 |
+
"plt.title('Confusion Matrix')\n",
|
| 475 |
+
"plt.xlabel('Predicted labels')\n",
|
| 476 |
+
"plt.ylabel('True labels')\n",
|
| 477 |
+
"\n",
|
| 478 |
+
"for i in range(n_class):\n",
|
| 479 |
+
" for j in range(n_class):\n",
|
| 480 |
+
" plt.text(i, j, cm_test[j][i])"
|
| 481 |
+
]
|
| 482 |
+
}
|
| 483 |
+
],
|
| 484 |
+
"metadata": {
|
| 485 |
+
"kernelspec": {
|
| 486 |
+
"display_name": "venv",
|
| 487 |
+
"language": "python",
|
| 488 |
+
"name": "venv"
|
| 489 |
+
},
|
| 490 |
+
"language_info": {
|
| 491 |
+
"codemirror_mode": {
|
| 492 |
+
"name": "ipython",
|
| 493 |
+
"version": 3
|
| 494 |
+
},
|
| 495 |
+
"file_extension": ".py",
|
| 496 |
+
"mimetype": "text/x-python",
|
| 497 |
+
"name": "python",
|
| 498 |
+
"nbconvert_exporter": "python",
|
| 499 |
+
"pygments_lexer": "ipython3",
|
| 500 |
+
"version": "3.9.6"
|
| 501 |
+
}
|
| 502 |
+
},
|
| 503 |
+
"nbformat": 4,
|
| 504 |
+
"nbformat_minor": 5
|
| 505 |
+
}
|
codes/jupyter_notebook/tr_2mdl_et.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
codes/jupyter_notebook/tr_mdl_io.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
codes/sampling.py
ADDED
|
@@ -0,0 +1,395 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module contains Smp class to collect inputs from the user. To understand this module you should know about Mediapipe library."""
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
import cv2
|
| 6 |
+
import time
|
| 7 |
+
from codes.base import eyeing as ey
|
| 8 |
+
from codes.calibration import Clb
|
| 9 |
+
import math
|
| 10 |
+
import random
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Smp(object):
|
| 15 |
+
running = True
|
| 16 |
+
|
| 17 |
+
def sampling(self, num, camera_id=0, gui=True):
|
| 18 |
+
"""
|
| 19 |
+
Collecting inputs (eyes images and face vectors) from the user as the main sampling data.
|
| 20 |
+
|
| 21 |
+
Parameters:
|
| 22 |
+
num: subject number
|
| 23 |
+
camera_id: Camera ID
|
| 24 |
+
gui: If it's False, for having the ability to stop the program, a little window will be appeared. So, you can press "q" to stop it. If it's True, you can stop program using GUI.
|
| 25 |
+
|
| 26 |
+
Returns:
|
| 27 |
+
None
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
face_saving_time = 80
|
| 31 |
+
return_face1 = True
|
| 32 |
+
win_name = "Sampling"
|
| 33 |
+
little_win_name = "smp"
|
| 34 |
+
tx0 = [["Sampling", (0.25, 0.5), 2, ey.RED, 3]]
|
| 35 |
+
tx1 = [["SPACE --> start/pause", (0.05, 0.3), 1.5, ey.RED, 3],
|
| 36 |
+
["ESC --> Stop", (0.05, 0.6), 1.6, ey.RED, 3]]
|
| 37 |
+
|
| 38 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 39 |
+
|
| 40 |
+
(
|
| 41 |
+
frame_size,
|
| 42 |
+
camera_matrix,
|
| 43 |
+
dst_cof,
|
| 44 |
+
pcf
|
| 45 |
+
) = ey.get_camera_properties(camera_id)
|
| 46 |
+
|
| 47 |
+
face_mesh = ey.get_mesh()
|
| 48 |
+
|
| 49 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 50 |
+
ey.pass_frames(cap, 100)
|
| 51 |
+
|
| 52 |
+
print("Sampling started...")
|
| 53 |
+
t_mat = []
|
| 54 |
+
sys_time_mat = []
|
| 55 |
+
eyes_mat = []
|
| 56 |
+
inp_scalars_mat = []
|
| 57 |
+
eyes_ratio_mat = []
|
| 58 |
+
face_vec = []
|
| 59 |
+
fps_vec = []
|
| 60 |
+
|
| 61 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 62 |
+
ey.show_clb_win(win_name, win_color=ey.WHITE, texts=tx0)
|
| 63 |
+
cv2.waitKey(4000)
|
| 64 |
+
cv2.destroyWindow(win_name)
|
| 65 |
+
|
| 66 |
+
t0 = time.perf_counter()
|
| 67 |
+
while self.running:
|
| 68 |
+
j = 0
|
| 69 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 70 |
+
ey.show_clb_win(win_name, win_color=ey.WHITE, texts=tx1)
|
| 71 |
+
button = cv2.waitKey(0)
|
| 72 |
+
cv2.destroyWindow(win_name)
|
| 73 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 74 |
+
break
|
| 75 |
+
elif button == ord(' '):
|
| 76 |
+
t_vec = []
|
| 77 |
+
sys_time_vec = []
|
| 78 |
+
eyes_vec = []
|
| 79 |
+
inp_scalars_vec = []
|
| 80 |
+
eyes_ratio_vec = []
|
| 81 |
+
t1 = time.perf_counter()
|
| 82 |
+
while self.running:
|
| 83 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 84 |
+
if frame_success:
|
| 85 |
+
return_face = False
|
| 86 |
+
if ((time.perf_counter() - t0) < face_saving_time) and return_face1:
|
| 87 |
+
return_face = True
|
| 88 |
+
|
| 89 |
+
results = face_mesh.process(frame_rgb) # Get inputs of the models
|
| 90 |
+
|
| 91 |
+
# Get inputs of the models
|
| 92 |
+
(
|
| 93 |
+
features_success,
|
| 94 |
+
_,
|
| 95 |
+
eyes_frame_gray,
|
| 96 |
+
features_vector,
|
| 97 |
+
eyes_ratio,
|
| 98 |
+
face_img
|
| 99 |
+
) = ey.get_model_inputs(
|
| 100 |
+
frame,
|
| 101 |
+
frame_rgb,
|
| 102 |
+
results,
|
| 103 |
+
camera_matrix,
|
| 104 |
+
pcf,
|
| 105 |
+
frame_size,
|
| 106 |
+
dst_cof,
|
| 107 |
+
some_landmarks_ids,
|
| 108 |
+
return_face=return_face
|
| 109 |
+
)
|
| 110 |
+
if features_success:
|
| 111 |
+
t_vec.append(round(time.perf_counter() - t0, 3))
|
| 112 |
+
sys_time_vec.append(str(datetime.now())[-15:-3])
|
| 113 |
+
eyes_vec.append(eyes_frame_gray)
|
| 114 |
+
inp_scalars_vec.append(features_vector)
|
| 115 |
+
eyes_ratio_vec.append(eyes_ratio)
|
| 116 |
+
if return_face:
|
| 117 |
+
face_vec.append(face_img)
|
| 118 |
+
j += 1
|
| 119 |
+
if not gui:
|
| 120 |
+
ey.show_clb_win(little_win_name, win_color=ey.RED, win_size=(50, 50))
|
| 121 |
+
button = cv2.waitKey(1)
|
| 122 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27) or (button == ord(' ')):
|
| 123 |
+
break
|
| 124 |
+
fps_vec.append(ey.get_time(j, t1, True))
|
| 125 |
+
t_mat.append(np.array(t_vec))
|
| 126 |
+
sys_time_mat.append(sys_time_vec)
|
| 127 |
+
eyes_mat.append(np.array(eyes_vec))
|
| 128 |
+
inp_scalars_mat.append(np.array(inp_scalars_vec))
|
| 129 |
+
eyes_ratio_mat.append(np.array(eyes_ratio_vec))
|
| 130 |
+
|
| 131 |
+
if not gui:
|
| 132 |
+
cv2.destroyWindow(little_win_name)
|
| 133 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 134 |
+
break
|
| 135 |
+
return_face1 = False
|
| 136 |
+
|
| 137 |
+
print("Sampling finished")
|
| 138 |
+
ey.get_time(0, t0, True)
|
| 139 |
+
print(f"Mean FPS : {np.array(fps_vec).mean()}")
|
| 140 |
+
|
| 141 |
+
cv2.destroyAllWindows()
|
| 142 |
+
cap.release()
|
| 143 |
+
|
| 144 |
+
smp_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.SMP])
|
| 145 |
+
ey.save(
|
| 146 |
+
[t_mat, sys_time_mat, eyes_mat, inp_scalars_mat, eyes_ratio_mat, [np.array(face_vec)]],
|
| 147 |
+
smp_dir,
|
| 148 |
+
[ey.T, "sys_time", ey.X1, ey.X2, ey.ER, ey.FV])
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def accuracy(self, num, camera_id=0, clb_grid=(2, 2, 10)):
|
| 152 |
+
"""
|
| 153 |
+
Collecting inputs (eyes images and face vectors) from the user to assess the accuracy of the models. It shows a point in a grid
|
| 154 |
+
and the user must look at that. So, with the true outputs (locations of the point) and the predicted outputs, it's possible to
|
| 155 |
+
evaluate the models. The evaulation part is in the eye_track.py module.
|
| 156 |
+
|
| 157 |
+
Parameters:
|
| 158 |
+
num: subject number
|
| 159 |
+
camera_id: Camera ID
|
| 160 |
+
clb_grid: The grid that you want to use for comparison
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
None
|
| 164 |
+
"""
|
| 165 |
+
|
| 166 |
+
# Collecting data for testing
|
| 167 |
+
tx0 = [["Track WHITE point", (0.05, 0.25), 1.5, ey.RED, 3],
|
| 168 |
+
["SPACE --> start", (0.05, 0.5), 1.5, ey.RED, 3],
|
| 169 |
+
["ESC --> Stop", (0.05, 0.75), 1.5, ey.RED, 3]]
|
| 170 |
+
clb_points = Clb().create_grid(clb_grid)
|
| 171 |
+
|
| 172 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 173 |
+
|
| 174 |
+
(
|
| 175 |
+
frame_size,
|
| 176 |
+
camera_matrix,
|
| 177 |
+
dst_cof,
|
| 178 |
+
pcf
|
| 179 |
+
) = ey.get_camera_properties(camera_id)
|
| 180 |
+
|
| 181 |
+
face_mesh = ey.get_mesh()
|
| 182 |
+
|
| 183 |
+
i = 0
|
| 184 |
+
fps_vec = []
|
| 185 |
+
t_mat = []
|
| 186 |
+
eyes_mat = []
|
| 187 |
+
inp_scalars_mat = []
|
| 188 |
+
points_loc_mat = []
|
| 189 |
+
eyes_ratio_mat = []
|
| 190 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 191 |
+
ey.pass_frames(cap, 100)
|
| 192 |
+
t0 = time.perf_counter()
|
| 193 |
+
|
| 194 |
+
win_name = "Information"
|
| 195 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 196 |
+
ey.show_clb_win(win_name, texts=tx0, win_color=ey.WHITE)
|
| 197 |
+
cv2.waitKey(10000)
|
| 198 |
+
cv2.destroyWindow(win_name)
|
| 199 |
+
for (i_m, m) in enumerate(ey.monitors):
|
| 200 |
+
if not self.running:
|
| 201 |
+
break
|
| 202 |
+
win_name = f"Calibration-{i_m}"
|
| 203 |
+
ey.big_win(win_name, i_m * m.width)
|
| 204 |
+
for item in clb_points:
|
| 205 |
+
if not self.running and (i_m != 0):
|
| 206 |
+
break
|
| 207 |
+
pnt = item[0]
|
| 208 |
+
t_vec = []
|
| 209 |
+
eyes_vec = []
|
| 210 |
+
inp_scalars_vec = []
|
| 211 |
+
points_loc_vec = []
|
| 212 |
+
eyes_ratio_vec = []
|
| 213 |
+
ey.show_clb_win(win_name, pnt, win_color=ey.GRAY)
|
| 214 |
+
|
| 215 |
+
button = cv2.waitKey(0)
|
| 216 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 217 |
+
break
|
| 218 |
+
elif button == ord(' '):
|
| 219 |
+
ey.pass_frames(cap)
|
| 220 |
+
t1 = time.perf_counter()
|
| 221 |
+
s = len(item)
|
| 222 |
+
for pnt in item:
|
| 223 |
+
ey.show_clb_win(win_name, pnt)
|
| 224 |
+
button = cv2.waitKey(1)
|
| 225 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 226 |
+
break
|
| 227 |
+
while True:
|
| 228 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 229 |
+
if frame_success:
|
| 230 |
+
results = face_mesh.process(frame_rgb)
|
| 231 |
+
(
|
| 232 |
+
features_success,
|
| 233 |
+
_,
|
| 234 |
+
eyes_frame_gray,
|
| 235 |
+
features_vector,
|
| 236 |
+
eyes_ratio,
|
| 237 |
+
_
|
| 238 |
+
) = ey.get_model_inputs(
|
| 239 |
+
frame,
|
| 240 |
+
frame_rgb,
|
| 241 |
+
results,
|
| 242 |
+
camera_matrix,
|
| 243 |
+
pcf,
|
| 244 |
+
frame_size,
|
| 245 |
+
dst_cof,
|
| 246 |
+
some_landmarks_ids
|
| 247 |
+
)
|
| 248 |
+
if features_success:
|
| 249 |
+
t_vec.append(round(time.perf_counter() - t1, 3))
|
| 250 |
+
eyes_vec.append(eyes_frame_gray)
|
| 251 |
+
inp_scalars_vec.append(features_vector)
|
| 252 |
+
points_loc_vec.append([(pnt[0] + i_m)/len(ey.monitors), pnt[1]])
|
| 253 |
+
eyes_ratio_vec.append(eyes_ratio)
|
| 254 |
+
i += 1
|
| 255 |
+
break
|
| 256 |
+
if not self.running:
|
| 257 |
+
break
|
| 258 |
+
fps_vec.append(ey.get_time(s, t1))
|
| 259 |
+
t_mat.append(np.array(t_vec))
|
| 260 |
+
eyes_mat.append(np.array(eyes_vec))
|
| 261 |
+
inp_scalars_mat.append(np.array(inp_scalars_vec))
|
| 262 |
+
points_loc_mat.append(np.array(points_loc_vec))
|
| 263 |
+
eyes_ratio_mat.append(np.array(eyes_ratio_vec))
|
| 264 |
+
|
| 265 |
+
if not self.running:
|
| 266 |
+
break
|
| 267 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 268 |
+
break
|
| 269 |
+
cv2.destroyWindow(win_name)
|
| 270 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 271 |
+
break
|
| 272 |
+
cap.release()
|
| 273 |
+
|
| 274 |
+
cv2.destroyAllWindows()
|
| 275 |
+
ey.get_time(0, t0, True)
|
| 276 |
+
print(f"Mean FPS : {np.array(fps_vec).mean()}")
|
| 277 |
+
|
| 278 |
+
acc_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.ACC])
|
| 279 |
+
ey.save(
|
| 280 |
+
[t_mat, eyes_mat, inp_scalars_mat, points_loc_mat, eyes_ratio_mat],
|
| 281 |
+
acc_dir,
|
| 282 |
+
[ey.T, ey.X1, ey.X2, ey.Y, ey.ER])
|
| 283 |
+
print("Accuracy data collected!")
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def latency(self, num, camera_id=0):
|
| 287 |
+
""" Collecting data to assessing latency. The user should look at the left and right side of the screen when they see red or blue colors.
|
| 288 |
+
|
| 289 |
+
Parameters:
|
| 290 |
+
num: Subject number
|
| 291 |
+
camera_id: Camera ID
|
| 292 |
+
|
| 293 |
+
Returns:
|
| 294 |
+
None
|
| 295 |
+
"""
|
| 296 |
+
tx1 = [["SPACE --> start", (0.05, 0.2), 1.3, ey.BLACK, 2],
|
| 297 |
+
[f"ESC --> stop", (0.05, 0.4), 1.3, ey.BLACK, 2],
|
| 298 |
+
["RED --> Left", (0.05, 0.6), 1.3, ey.RED, 2],
|
| 299 |
+
["BLUE --> Right", (0.05, 0.8), 1.3, ey.BLUE, 2]]
|
| 300 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 301 |
+
|
| 302 |
+
(
|
| 303 |
+
frame_size,
|
| 304 |
+
camera_matrix,
|
| 305 |
+
dst_cof,
|
| 306 |
+
pcf
|
| 307 |
+
) = ey.get_camera_properties(camera_id)
|
| 308 |
+
|
| 309 |
+
face_mesh = ey.get_mesh()
|
| 310 |
+
|
| 311 |
+
fps_vec = []
|
| 312 |
+
t_mat = []
|
| 313 |
+
eyes_mat = []
|
| 314 |
+
inp_scalars_mat = []
|
| 315 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 316 |
+
ey.pass_frames(cap, 100)
|
| 317 |
+
t0 = time.perf_counter()
|
| 318 |
+
|
| 319 |
+
win_name = "Information"
|
| 320 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2) * ey.monitors[0].width)
|
| 321 |
+
ey.show_clb_win(win_name, texts=tx1, win_color=ey.WHITE)
|
| 322 |
+
button = cv2.waitKey(0)
|
| 323 |
+
if button == ord(' '):
|
| 324 |
+
cv2.destroyWindow(win_name)
|
| 325 |
+
win_name = "Latency"
|
| 326 |
+
time.sleep(2)
|
| 327 |
+
for j in range(6):
|
| 328 |
+
if not self.running:
|
| 329 |
+
break
|
| 330 |
+
t_vec = []
|
| 331 |
+
eyes_vec = []
|
| 332 |
+
inp_scalars_vec = []
|
| 333 |
+
i = 0
|
| 334 |
+
t1 = time.perf_counter()
|
| 335 |
+
dt = random.random()*3
|
| 336 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2) * ey.monitors[0].width)
|
| 337 |
+
if j % 2 == 0:
|
| 338 |
+
win_color = ey.BLUE
|
| 339 |
+
else:
|
| 340 |
+
win_color = ey.RED
|
| 341 |
+
ey.show_clb_win(win_name, win_color=win_color)
|
| 342 |
+
cv2.waitKey(ey.LATENCY_WAITING_TIME)
|
| 343 |
+
cv2.destroyWindow(win_name)
|
| 344 |
+
while (time.perf_counter()-t1) < (3 + dt):
|
| 345 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 346 |
+
break
|
| 347 |
+
while True:
|
| 348 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 349 |
+
if frame_success:
|
| 350 |
+
results = face_mesh.process(frame_rgb)
|
| 351 |
+
(
|
| 352 |
+
features_success,
|
| 353 |
+
_,
|
| 354 |
+
eyes_frame_gray,
|
| 355 |
+
features_vector,
|
| 356 |
+
eyes_ratio,
|
| 357 |
+
_
|
| 358 |
+
) = ey.get_model_inputs(
|
| 359 |
+
frame,
|
| 360 |
+
frame_rgb,
|
| 361 |
+
results,
|
| 362 |
+
camera_matrix,
|
| 363 |
+
pcf,
|
| 364 |
+
frame_size,
|
| 365 |
+
dst_cof,
|
| 366 |
+
some_landmarks_ids
|
| 367 |
+
)
|
| 368 |
+
if features_success:
|
| 369 |
+
t_vec.append(round(time.perf_counter() - t1, 3))
|
| 370 |
+
eyes_vec.append(eyes_frame_gray)
|
| 371 |
+
inp_scalars_vec.append(features_vector)
|
| 372 |
+
i += 1
|
| 373 |
+
break
|
| 374 |
+
if not self.running:
|
| 375 |
+
break
|
| 376 |
+
fps_vec.append(ey.get_time(i, t1))
|
| 377 |
+
t_mat.append(np.array(t_vec))
|
| 378 |
+
eyes_mat.append(np.array(eyes_vec))
|
| 379 |
+
inp_scalars_mat.append(np.array(inp_scalars_vec))
|
| 380 |
+
|
| 381 |
+
if not self.running:
|
| 382 |
+
break
|
| 383 |
+
if (button == ord('q')) or (button == ord('Q')) or (button == 27):
|
| 384 |
+
break
|
| 385 |
+
cap.release()
|
| 386 |
+
|
| 387 |
+
ey.get_time(0, t0, True)
|
| 388 |
+
print(f"Mean FPS : {np.array(fps_vec).mean()}")
|
| 389 |
+
|
| 390 |
+
ltn_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.LTN])
|
| 391 |
+
ey.save(
|
| 392 |
+
[t_mat, eyes_mat, inp_scalars_mat],
|
| 393 |
+
ltn_dir,
|
| 394 |
+
[ey.T, ey.X1, ey.X2])
|
| 395 |
+
print("Latency data collected!")
|
codes/see_data.py
ADDED
|
@@ -0,0 +1,383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module is for seeing the inputs and the outputs."""
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
from codes.base import eyeing as ey
|
| 5 |
+
import time
|
| 6 |
+
import matplotlib.pyplot as plt
|
| 7 |
+
import numpy as np
|
| 8 |
+
import os
|
| 9 |
+
import math
|
| 10 |
+
from openpyxl import load_workbook
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class See(object):
|
| 14 |
+
running = True
|
| 15 |
+
|
| 16 |
+
@staticmethod
|
| 17 |
+
def data_features(num, target_fol=ey.CLB):
|
| 18 |
+
"""
|
| 19 |
+
Seeing the inputs in each one of the folders
|
| 20 |
+
|
| 21 |
+
Parameters:
|
| 22 |
+
num: subject number
|
| 23 |
+
target_fol: targeted folder
|
| 24 |
+
|
| 25 |
+
Returns:
|
| 26 |
+
None
|
| 27 |
+
"""
|
| 28 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 29 |
+
if target_fol == ey.CLB:
|
| 30 |
+
target_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 31 |
+
data = ey.load(target_dir, [ey.X1, ey.X2, ey.Y])
|
| 32 |
+
elif target_fol == ey.IO:
|
| 33 |
+
target_dir = ey.create_dir([sbj_dir, ey.IO])
|
| 34 |
+
data = ey.load(target_dir, [ey.X1, ey.X2, ey.Y])
|
| 35 |
+
elif target_fol == ey.SMP:
|
| 36 |
+
target_dir = ey.create_dir([sbj_dir, ey.SMP])
|
| 37 |
+
data = ey.load(target_dir, [ey.X1, ey.X2, ey.T])
|
| 38 |
+
elif target_fol == ey.ACC:
|
| 39 |
+
target_dir = ey.create_dir([sbj_dir, ey.ACC])
|
| 40 |
+
data = ey.load(target_dir, [ey.X1, ey.X2, ey.T, ey.Y])
|
| 41 |
+
elif target_fol == ey.LTN:
|
| 42 |
+
target_dir = ey.create_dir([sbj_dir, ey.LTN])
|
| 43 |
+
data = ey.load(target_dir, [ey.X1, ey.X2, ey.T])
|
| 44 |
+
else:
|
| 45 |
+
data = None
|
| 46 |
+
print("The folder isn't valid!!")
|
| 47 |
+
quit()
|
| 48 |
+
|
| 49 |
+
win_name = "Eyes"
|
| 50 |
+
cv2.namedWindow(win_name)
|
| 51 |
+
if len(ey.monitors) == 1:
|
| 52 |
+
cv2.moveWindow(win_name, int(ey.monitors[0].width / 2), int(ey.monitors[0].height / 2))
|
| 53 |
+
else:
|
| 54 |
+
cv2.moveWindow(win_name, ey.monitors[0].width + int(ey.monitors[0].width / 2), int(ey.monitors[0].height / 2))
|
| 55 |
+
|
| 56 |
+
x1 = data[0]
|
| 57 |
+
print(f"Number of vectors : {len(x1)}")
|
| 58 |
+
time.sleep(2)
|
| 59 |
+
|
| 60 |
+
i = 0
|
| 61 |
+
for (k, x1_vec) in enumerate(x1):
|
| 62 |
+
for (s, img) in enumerate(x1_vec):
|
| 63 |
+
d = []
|
| 64 |
+
for (j, _) in enumerate(data):
|
| 65 |
+
if j == 0:
|
| 66 |
+
continue
|
| 67 |
+
d.append(data[j][k][s])
|
| 68 |
+
if True: #i % 10 == 0:
|
| 69 |
+
print(f"{i}, {d}")
|
| 70 |
+
cv2.imshow(win_name, img)
|
| 71 |
+
q = cv2.waitKey(20)
|
| 72 |
+
if q == ord('q') or q == ord('Q'):
|
| 73 |
+
break
|
| 74 |
+
i += 1
|
| 75 |
+
if q == ord('q') or q == ord('Q'):
|
| 76 |
+
break
|
| 77 |
+
cv2.destroyAllWindows()
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def pixels_smp(self, num, n_monitors_data=len(ey.monitors), show_in_all_monitors=False, win_size=(1280,720), show_fixations=False):
|
| 81 |
+
"""
|
| 82 |
+
See the eye viewpoint of the user during sampling.
|
| 83 |
+
|
| 84 |
+
Parameters:
|
| 85 |
+
num: subject number
|
| 86 |
+
n_monitors_data: The number of monitors while the data was collecting.
|
| 87 |
+
show_in_all_monitors: Just for the moment that we have more than one monitor. So we tune the parameters to show the data in all of them
|
| 88 |
+
win_size: size of the appeared window
|
| 89 |
+
show_fixations: It shows the fixations
|
| 90 |
+
|
| 91 |
+
Returns:
|
| 92 |
+
None
|
| 93 |
+
"""
|
| 94 |
+
little_win = False
|
| 95 |
+
smp_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.SMP])
|
| 96 |
+
try:
|
| 97 |
+
sheet_et = load_workbook(smp_dir + "eye_track.xlsx")["Sheet"]
|
| 98 |
+
prd_et = []
|
| 99 |
+
for i in range(3,sheet_et.max_row+1):
|
| 100 |
+
et_splited = sheet_et[f"C{i}"].value[1:-1].split(',')
|
| 101 |
+
prd_et.append([float(sheet_et[f"A{i}"].value), float(et_splited[0]), float(et_splited[1])])
|
| 102 |
+
prd_et = np.array(prd_et)
|
| 103 |
+
|
| 104 |
+
if show_fixations:
|
| 105 |
+
sheet_fxn = load_workbook(smp_dir + "fixations.xlsx")["Sheet"]
|
| 106 |
+
fixations = []
|
| 107 |
+
for i in range(3, sheet_fxn.max_row+1):
|
| 108 |
+
fxn_splited = sheet_fxn[f"D{i}"].value[1:-1].split(',')
|
| 109 |
+
fixations.append([float(sheet_fxn[f"A{i}"].value), float(sheet_fxn[f"C{i}"].value),
|
| 110 |
+
float(fxn_splited[0]), float(fxn_splited[1])])
|
| 111 |
+
fixations = np.array(fixations)
|
| 112 |
+
|
| 113 |
+
if show_in_all_monitors:
|
| 114 |
+
win_names = []
|
| 115 |
+
for (i, m) in enumerate(ey.monitors):
|
| 116 |
+
win_name = f"Calibration-{i}"
|
| 117 |
+
ey.big_win(win_name, i * m.width)
|
| 118 |
+
win_names.append(win_name)
|
| 119 |
+
elif (n_monitors_data == 1):
|
| 120 |
+
win_name = "Calibration"
|
| 121 |
+
ey.big_win(win_name, math.floor(len(ey.monitors)/2)*ey.monitors[0].width)
|
| 122 |
+
else:
|
| 123 |
+
win_name = "Calibration"
|
| 124 |
+
little_win = True
|
| 125 |
+
|
| 126 |
+
for prd1 in prd_et:
|
| 127 |
+
t0 = prd1[0]
|
| 128 |
+
fxn_exist = False
|
| 129 |
+
if show_fixations:
|
| 130 |
+
time_comparison = t0 - fixations[:, 0]
|
| 131 |
+
time_comparison[time_comparison<0] = 1000
|
| 132 |
+
matched_t_fxn_arg = time_comparison.argmin()
|
| 133 |
+
if (t0 > fixations[matched_t_fxn_arg, 0]) and (t0 < (fixations[matched_t_fxn_arg, 0]+fixations[matched_t_fxn_arg, 1])):
|
| 134 |
+
fxn_exist = True
|
| 135 |
+
prd0 = prd1[1:]
|
| 136 |
+
tx0 = [[f"time: {t0} sec", (0.05, 0.25), 1, ey.GREEN, 2]]
|
| 137 |
+
if show_in_all_monitors:
|
| 138 |
+
y_prd_show = [None] * len(ey.monitors)
|
| 139 |
+
texts = y_prd_show.copy()
|
| 140 |
+
texts[math.floor(len(ey.monitors) / 2)] = tx0
|
| 141 |
+
pw_prd = prd0[0] * n_monitors_data
|
| 142 |
+
for (i, _) in enumerate(ey.monitors):
|
| 143 |
+
if prd0[0] != -1:
|
| 144 |
+
win_color = ey.WHITE
|
| 145 |
+
if i != 1:
|
| 146 |
+
t0 = None
|
| 147 |
+
if (pw_prd > i) and (pw_prd < (i + 1)):
|
| 148 |
+
y_prd_show[i] = prd0
|
| 149 |
+
y_prd_show[i][0] = pw_prd - i
|
| 150 |
+
else:
|
| 151 |
+
prd0 = None
|
| 152 |
+
win_color = ey.GRAY
|
| 153 |
+
ey.show_clb_win(win_names[i], pnt_prd=y_prd_show[i], texts=texts[i], win_color=win_color)
|
| 154 |
+
else:
|
| 155 |
+
if prd0[0] != -1:
|
| 156 |
+
win_color = ey.WHITE
|
| 157 |
+
else:
|
| 158 |
+
prd0 = None
|
| 159 |
+
win_color = ey.GRAY
|
| 160 |
+
if little_win:
|
| 161 |
+
if fxn_exist:
|
| 162 |
+
ey.show_clb_win(win_name, pnt=fixations[matched_t_fxn_arg,2:],
|
| 163 |
+
pnt_prd=prd0, texts=tx0, win_color=win_color, win_size=win_size, pnt_color=ey.RED)
|
| 164 |
+
else:
|
| 165 |
+
ey.show_clb_win(win_name, pnt_prd=prd0, texts=tx0, win_color=win_color, win_size=win_size)
|
| 166 |
+
cv2.moveWindow(win_name, 0, 0)
|
| 167 |
+
else:
|
| 168 |
+
if fxn_exist:
|
| 169 |
+
ey.show_clb_win(win_name, pnt=fixations[matched_t_fxn_arg,2:],
|
| 170 |
+
pnt_prd=prd0, texts=tx0, win_color=win_color, pnt_color=ey.RED)
|
| 171 |
+
else:
|
| 172 |
+
ey.show_clb_win(win_name, pnt_prd=prd0, texts=tx0, win_color=win_color)
|
| 173 |
+
|
| 174 |
+
q = cv2.waitKey(50)
|
| 175 |
+
if q == ord('q') or q == ord('Q'):
|
| 176 |
+
break
|
| 177 |
+
if not self.running:
|
| 178 |
+
break
|
| 179 |
+
cv2.destroyAllWindows()
|
| 180 |
+
except FileNotFoundError:
|
| 181 |
+
print(f"Data does not exist in {smp_dir}")
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def pixels_acc(self, num, n_monitors_data=len(ey.monitors), show_in_all_monitors=False):
|
| 185 |
+
"""
|
| 186 |
+
See the eye viewpoint of the user during testing.
|
| 187 |
+
|
| 188 |
+
Parameters:
|
| 189 |
+
num: subject number
|
| 190 |
+
n_monitors_data: The number of monitors while the data was collecting.
|
| 191 |
+
show_in_all_monitors: Just for the moment that we have more than one monitor. So we tune the parameters to show the data in all of them
|
| 192 |
+
|
| 193 |
+
Returns:
|
| 194 |
+
None
|
| 195 |
+
"""
|
| 196 |
+
acc_dir = ey.create_dir([ey.subjects_dir, f"{num}", ey.ACC])
|
| 197 |
+
if ey.file_existing(acc_dir, 'y_mdf.pickle'):
|
| 198 |
+
[y, y_prd] = ey.load(acc_dir, ['y_mdf', 'y_prd_mdf'])
|
| 199 |
+
if show_in_all_monitors:
|
| 200 |
+
win_names = []
|
| 201 |
+
for (i, m) in enumerate(ey.monitors):
|
| 202 |
+
win_name = f"Calibration-{i}"
|
| 203 |
+
ey.big_win(win_name, i * m.width)
|
| 204 |
+
win_names.append(win_name)
|
| 205 |
+
else:
|
| 206 |
+
win_name = "Calibration"
|
| 207 |
+
ey.big_win(win_name, math.floor(len(ey.monitors) / 2)*ey.monitors[0].width)
|
| 208 |
+
|
| 209 |
+
for (y0, y_prd0) in zip(y, y_prd):
|
| 210 |
+
if show_in_all_monitors:
|
| 211 |
+
y_show = [None] * len(ey.monitors)
|
| 212 |
+
y_prd_show = [None] * len(ey.monitors)
|
| 213 |
+
pw = y0[0] * n_monitors_data
|
| 214 |
+
pw_prd = y_prd0[0] * n_monitors_data
|
| 215 |
+
for (i, _) in enumerate(ey.monitors):
|
| 216 |
+
if (pw > i) and (pw < (i + 1)):
|
| 217 |
+
y_show[i] = y0
|
| 218 |
+
y_show[i][0] = pw - i
|
| 219 |
+
if (pw_prd > i) and (pw_prd < (i + 1)):
|
| 220 |
+
y_prd_show[i] = y_prd0
|
| 221 |
+
y_prd_show[i][0] = pw_prd - i
|
| 222 |
+
ey.show_clb_win(win_names[i], pnt=y_show[i], pnt_prd=y_prd_show[i], win_color=ey.WHITE, pnt_color=ey.RED)
|
| 223 |
+
else:
|
| 224 |
+
ey.show_clb_win(win_name, pnt=y0, pnt_prd=y_prd0, win_color=ey.WHITE, pnt_color=ey.RED)
|
| 225 |
+
|
| 226 |
+
q = cv2.waitKey(50)
|
| 227 |
+
if q == ord('q') or q == ord('Q') or q == 27:
|
| 228 |
+
break
|
| 229 |
+
if not self.running:
|
| 230 |
+
break
|
| 231 |
+
|
| 232 |
+
cv2.destroyAllWindows()
|
| 233 |
+
else:
|
| 234 |
+
print(f"Data does not exist in {acc_dir}")
|
| 235 |
+
|
| 236 |
+
@staticmethod
|
| 237 |
+
def blinks_plot(num, threshold=ey.DEFAULT_BLINKING_THRESHOLD, target_fol="er"):
|
| 238 |
+
"""
|
| 239 |
+
Plotting the eyes aspect ratio (EAR) vector to tune threshold
|
| 240 |
+
|
| 241 |
+
Parameters:
|
| 242 |
+
num: subject number
|
| 243 |
+
threshold: the threshold of ear velocity
|
| 244 |
+
target_fol: targeted folder
|
| 245 |
+
|
| 246 |
+
Returns:
|
| 247 |
+
None
|
| 248 |
+
"""
|
| 249 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 250 |
+
if target_fol == ey.ER:
|
| 251 |
+
target_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 252 |
+
elif target_fol == ey.CLB:
|
| 253 |
+
target_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 254 |
+
elif target_fol == ey.SMP:
|
| 255 |
+
target_dir = ey.create_dir([sbj_dir, ey.SMP])
|
| 256 |
+
elif target_fol == ey.ACC:
|
| 257 |
+
target_dir = ey.create_dir([sbj_dir, ey.ACC])
|
| 258 |
+
else:
|
| 259 |
+
print("The folder isn't valid!!")
|
| 260 |
+
quit()
|
| 261 |
+
er_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 262 |
+
|
| 263 |
+
t_mat, eyes_ratio_mat = ey.load(target_dir, [ey.T, ey.ER])
|
| 264 |
+
|
| 265 |
+
threshold = ey.get_threshold(er_dir, threshold)
|
| 266 |
+
|
| 267 |
+
print(f"Blinking threshold is {threshold}")
|
| 268 |
+
eyes_ratio_v_mat, _, eyes_ratio_v_blink_mat = ey.get_blinking(t_mat, eyes_ratio_mat, threshold)
|
| 269 |
+
|
| 270 |
+
if len(eyes_ratio_v_mat) > 1:
|
| 271 |
+
eyes_ratio_v_vec = eyes_ratio_v_mat[0]
|
| 272 |
+
eyes_ratio_v_blink_vec = eyes_ratio_v_blink_mat[0]
|
| 273 |
+
for (i, erv) in enumerate(eyes_ratio_v_mat):
|
| 274 |
+
if i == 0:
|
| 275 |
+
continue
|
| 276 |
+
eyes_ratio_v_vec = np.concatenate([eyes_ratio_v_vec, erv])
|
| 277 |
+
eyes_ratio_v_blink_vec = np.concatenate([eyes_ratio_v_blink_vec, eyes_ratio_v_blink_mat[i]])
|
| 278 |
+
else:
|
| 279 |
+
eyes_ratio_v_vec = eyes_ratio_v_mat[0]
|
| 280 |
+
eyes_ratio_v_blink_vec = eyes_ratio_v_blink_mat[0]
|
| 281 |
+
|
| 282 |
+
# print(eyes_ratio_v_vec)
|
| 283 |
+
plt.figure()
|
| 284 |
+
plt.plot(eyes_ratio_v_vec)
|
| 285 |
+
plt.plot(eyes_ratio_v_blink_vec)
|
| 286 |
+
plt.title(f"Velocity of Eyes Ratio ({target_fol})")
|
| 287 |
+
plt.xlabel("# of Sample")
|
| 288 |
+
plt.ylabel("ER/sec")
|
| 289 |
+
blink_img_dir = target_dir + 'blinking.png'
|
| 290 |
+
plt.savefig(blink_img_dir, dpi=300, bbox_inches='tight')
|
| 291 |
+
blink_img = cv2.imread(blink_img_dir)
|
| 292 |
+
cv2.imshow("Blinking", blink_img)
|
| 293 |
+
cv2.waitKey(0)
|
| 294 |
+
cv2.destroyAllWindows()
|
| 295 |
+
os.remove(blink_img_dir)
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def user_face(self, num, threshold="d", save_threshold=False):
|
| 299 |
+
"""
|
| 300 |
+
Show the user's face to tune blinking threshold.
|
| 301 |
+
|
| 302 |
+
Parameters:
|
| 303 |
+
num: subject number
|
| 304 |
+
threshold: the blinking threshold
|
| 305 |
+
save_threshold: save the tuned threshold
|
| 306 |
+
|
| 307 |
+
Returns:
|
| 308 |
+
None
|
| 309 |
+
"""
|
| 310 |
+
scaling_frame = 5
|
| 311 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 312 |
+
smp_dir = ey.create_dir([sbj_dir, ey.SMP])
|
| 313 |
+
er_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 314 |
+
|
| 315 |
+
threshold = ey.get_threshold(er_dir, threshold)
|
| 316 |
+
if save_threshold:
|
| 317 |
+
ey.save([threshold], er_dir, ["oth_usr"])
|
| 318 |
+
print(f"Blinking threshold is {threshold}")
|
| 319 |
+
|
| 320 |
+
if ey.file_existing(smp_dir, ey.T+".pickle"):
|
| 321 |
+
t_mat, face_mat, eyes_ratio_mat = ey.load(smp_dir, [ey.T, ey.FV, ey.ER])
|
| 322 |
+
|
| 323 |
+
eyes_ratio_v_mat = ey.get_blinking(t_mat, eyes_ratio_mat)[0]
|
| 324 |
+
|
| 325 |
+
face_vec = face_mat[0]
|
| 326 |
+
vec120_len, fh, fw = face_vec.shape[:-1]
|
| 327 |
+
little_vec_len = int(vec120_len / 10)
|
| 328 |
+
before_len = int(2 * little_vec_len / 3)
|
| 329 |
+
after_len = int(little_vec_len - before_len)
|
| 330 |
+
eyes_ratio_v_vec = eyes_ratio_v_mat[0][:vec120_len]
|
| 331 |
+
min_eyes_ratio_v, max_eyes_ratio_v = eyes_ratio_v_vec.min(), eyes_ratio_v_vec.max()
|
| 332 |
+
new_fw, new_fh = fw*scaling_frame, fh*scaling_frame
|
| 333 |
+
shift_edge = int(new_fh / 90.0)
|
| 334 |
+
red_area_h = int(0.85 * fh)
|
| 335 |
+
red_area_w = int(0.3 * fw)
|
| 336 |
+
|
| 337 |
+
thr_in_img_y = fh - int((fh / (max_eyes_ratio_v - min_eyes_ratio_v)) * (threshold - min_eyes_ratio_v))
|
| 338 |
+
zero_in_img_y = fh - int((fh / (max_eyes_ratio_v - min_eyes_ratio_v)) * (0.0 - min_eyes_ratio_v))
|
| 339 |
+
|
| 340 |
+
for i, fr in enumerate(face_vec):
|
| 341 |
+
fr = cv2.resize(fr, (new_fw, new_fh),interpolation=cv2.INTER_AREA)
|
| 342 |
+
frb = fr[-(fh+shift_edge):, :, :]
|
| 343 |
+
frb[:, :, 0:2] = 200
|
| 344 |
+
for j in range(i-before_len, i+after_len):
|
| 345 |
+
if (j>0) and (j<vec120_len):
|
| 346 |
+
if j != i:
|
| 347 |
+
marker_color = (0, 0, 255)
|
| 348 |
+
marker_size = 5
|
| 349 |
+
else:
|
| 350 |
+
marker_color = (0, 0, 0)
|
| 351 |
+
marker_size = 8
|
| 352 |
+
eye_ratio_in_img_x = int(j / vec120_len * new_fw)
|
| 353 |
+
eye_ratio_in_img_y = fh - int((fh / (max_eyes_ratio_v - min_eyes_ratio_v)) * (eyes_ratio_v_vec[j] - min_eyes_ratio_v))
|
| 354 |
+
frb = cv2.circle(frb, (eye_ratio_in_img_x, eye_ratio_in_img_y+shift_edge), marker_size, marker_color, cv2.FILLED)
|
| 355 |
+
frb = cv2.line(frb, (0, thr_in_img_y+shift_edge), (new_fw, thr_in_img_y+shift_edge), (0, 0, 0), 2)
|
| 356 |
+
frb = cv2.line(frb, (0, 0), (new_fw, 0), (0, 0, 0), 10)
|
| 357 |
+
frb = cv2.line(frb, (0, fh+shift_edge), (new_fw, fh+shift_edge), (0, 0, 0), 10)
|
| 358 |
+
frb = cv2.line(frb, (0, zero_in_img_y+shift_edge), (new_fw, zero_in_img_y+shift_edge), (53, 18, 80), 1)
|
| 359 |
+
frb = cv2.putText(frb, "erv = 0", (10, zero_in_img_y), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 81, 140), 2)
|
| 360 |
+
frb = cv2.putText(frb, f"erv = {threshold}", (10, thr_in_img_y), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 81, 140), 2)
|
| 361 |
+
fr[new_fh-(fh+shift_edge):, :, :] = frb
|
| 362 |
+
|
| 363 |
+
if eyes_ratio_v_vec[i] > threshold:
|
| 364 |
+
fr[-(fh+red_area_h):-fh, :, 2] = 255
|
| 365 |
+
fr[:red_area_h, :, 2] = 255
|
| 366 |
+
fr[:-fh, :red_area_w, 2] = 255
|
| 367 |
+
fr[:-fh, -red_area_w:, 2] = 255
|
| 368 |
+
|
| 369 |
+
win_name = "User"
|
| 370 |
+
|
| 371 |
+
if len(ey.monitors) == 1:
|
| 372 |
+
x_disp = 0
|
| 373 |
+
else:
|
| 374 |
+
x_disp = ey.monitors[0].width
|
| 375 |
+
ey.big_win(win_name, x_disp)
|
| 376 |
+
cv2.imshow(win_name, fr)
|
| 377 |
+
q = cv2.waitKey(100)
|
| 378 |
+
if q == ord('q') or q == ord('Q'):
|
| 379 |
+
break
|
| 380 |
+
i += 1
|
| 381 |
+
cv2.destroyAllWindows()
|
| 382 |
+
else:
|
| 383 |
+
print(f"Data does not exist in {smp_dir}")
|
codes/show.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module is for seeing the camera stream. This module includes Camera class."""
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
import time
|
| 5 |
+
from codes.base import eyeing as ey
|
| 6 |
+
from screeninfo import get_monitors
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
monitors = get_monitors()
|
| 10 |
+
|
| 11 |
+
class Camera(object):
|
| 12 |
+
running = True
|
| 13 |
+
def raw(self, camera_id):
|
| 14 |
+
"""
|
| 15 |
+
See camera stream
|
| 16 |
+
|
| 17 |
+
Parameters:
|
| 18 |
+
camera_id: Camera ID
|
| 19 |
+
|
| 20 |
+
Returns:
|
| 21 |
+
None
|
| 22 |
+
"""
|
| 23 |
+
frame_size, _, _, _ = ey.get_camera_properties(camera_id)
|
| 24 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 25 |
+
ey.pass_frames(cap, 100)
|
| 26 |
+
i = 0.0
|
| 27 |
+
win_name = "Webcam"
|
| 28 |
+
if len(monitors) == 1:
|
| 29 |
+
x_disp = 0
|
| 30 |
+
else:
|
| 31 |
+
x_disp = monitors[0].width
|
| 32 |
+
ey.big_win(win_name, x_disp)
|
| 33 |
+
t0 = time.perf_counter()
|
| 34 |
+
print("Showing camera..")
|
| 35 |
+
while self.running:
|
| 36 |
+
frame_success, frame, _ = ey.get_frame(cap)
|
| 37 |
+
if frame_success:
|
| 38 |
+
i += 1
|
| 39 |
+
cv2.imshow(win_name, frame)
|
| 40 |
+
q = cv2.waitKey(1)
|
| 41 |
+
if q == ord('q') or q == ord('Q'):
|
| 42 |
+
break
|
| 43 |
+
|
| 44 |
+
cv2.destroyAllWindows()
|
| 45 |
+
|
| 46 |
+
fps = ey.get_time(i, t0, True)
|
| 47 |
+
print(f"FPS : {fps}")
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def features(self, camera_id):
|
| 51 |
+
"""
|
| 52 |
+
See camera stream with landmarks and features
|
| 53 |
+
|
| 54 |
+
Parameters:
|
| 55 |
+
camera_id: Camera ID
|
| 56 |
+
|
| 57 |
+
Returns:
|
| 58 |
+
None
|
| 59 |
+
"""
|
| 60 |
+
some_landmarks_ids = ey.get_some_landmarks_ids()
|
| 61 |
+
|
| 62 |
+
(
|
| 63 |
+
frame_size,
|
| 64 |
+
camera_matrix,
|
| 65 |
+
dst_cof,
|
| 66 |
+
pcf
|
| 67 |
+
) = ey.get_camera_properties(camera_id)
|
| 68 |
+
|
| 69 |
+
face_mesh = ey.get_mesh()
|
| 70 |
+
|
| 71 |
+
cap = ey.get_camera(camera_id, frame_size)
|
| 72 |
+
ey.pass_frames(cap, 100)
|
| 73 |
+
win_name = "Features"
|
| 74 |
+
if len(monitors) == 1:
|
| 75 |
+
x_disp = 0
|
| 76 |
+
else:
|
| 77 |
+
x_disp = monitors[0].width
|
| 78 |
+
ey.big_win(win_name, x_disp)
|
| 79 |
+
t0 = time.perf_counter()
|
| 80 |
+
i = 0
|
| 81 |
+
print("Showing features..")
|
| 82 |
+
while self.running:
|
| 83 |
+
frame_success, frame, frame_rgb = ey.get_frame(cap)
|
| 84 |
+
if frame_success:
|
| 85 |
+
results = face_mesh.process(frame_rgb)
|
| 86 |
+
features_success, frame = ey.get_model_inputs(
|
| 87 |
+
frame,
|
| 88 |
+
frame_rgb,
|
| 89 |
+
results,
|
| 90 |
+
camera_matrix,
|
| 91 |
+
pcf,
|
| 92 |
+
frame_size,
|
| 93 |
+
dst_cof,
|
| 94 |
+
some_landmarks_ids,
|
| 95 |
+
True
|
| 96 |
+
)[:2]
|
| 97 |
+
|
| 98 |
+
if features_success:
|
| 99 |
+
i += 1
|
| 100 |
+
cv2.imshow(win_name, frame)
|
| 101 |
+
q = cv2.waitKey(1)
|
| 102 |
+
if q == ord('q') or q == ord('Q'):
|
| 103 |
+
break
|
| 104 |
+
|
| 105 |
+
cap.release()
|
| 106 |
+
cv2.destroyAllWindows()
|
| 107 |
+
|
| 108 |
+
fps = ey.get_time(i, t0, True)
|
| 109 |
+
print(f"FPS : {fps}")
|
codes/tune_models_params.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module is for retraining the base (et) models. It contains the Tuning class."""
|
| 2 |
+
|
| 3 |
+
from tensorflow.keras.callbacks import EarlyStopping
|
| 4 |
+
from tensorflow.keras.models import load_model
|
| 5 |
+
from tensorflow.keras.utils import to_categorical
|
| 6 |
+
from sklearn.utils import shuffle
|
| 7 |
+
from joblib import load as j_load
|
| 8 |
+
from joblib import dump as j_dump
|
| 9 |
+
import pickle
|
| 10 |
+
import numpy as np
|
| 11 |
+
import os
|
| 12 |
+
from codes.base import eyeing as ey
|
| 13 |
+
from openpyxl import Workbook
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
PATH2ROOT_ABS = os.path.dirname(__file__) + "/../"
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Tuning(object):
|
| 20 |
+
@staticmethod
|
| 21 |
+
def et_mdl(
|
| 22 |
+
subjects,
|
| 23 |
+
models_list=[1],
|
| 24 |
+
r_train_list=[0.99],
|
| 25 |
+
n_epochs_patience=[[3, 3]],
|
| 26 |
+
trainable_layers=[1],
|
| 27 |
+
shift_samples=None,
|
| 28 |
+
blinking_threshold='uo',
|
| 29 |
+
show_model=False,
|
| 30 |
+
delete_files=False
|
| 31 |
+
):
|
| 32 |
+
"""
|
| 33 |
+
You can retrain the base (et) models using this method. You have the possibility to retrain the models with various hyper parameters
|
| 34 |
+
to see which one has better performance. So you can enter lists as inputs.
|
| 35 |
+
|
| 36 |
+
Parameters:
|
| 37 |
+
subjects: list of subjects
|
| 38 |
+
models_list: a list of models' number
|
| 39 |
+
r_train_list: the ratio of the data that you want for training
|
| 40 |
+
n_epochs_patience: number of epochs and patience
|
| 41 |
+
trainable_layers: The number of trainable layer's (ending layers of the network)
|
| 42 |
+
shift_samples: shift the inputs to align with outputs. This is because of the delay.
|
| 43 |
+
blinking_threshold: Blinking threshold for removing the samples that are during blink
|
| 44 |
+
show_model: Show the structure of the model
|
| 45 |
+
delete_files: delete the dataset after retraining
|
| 46 |
+
|
| 47 |
+
Returns:
|
| 48 |
+
None
|
| 49 |
+
"""
|
| 50 |
+
print("\nStarting to retrain eye_tracking model...")
|
| 51 |
+
x1_scaler, x2_scaler, y_scaler = j_load(ey.scalers_dir + f"scalers_et_main.bin") # Loading the scaler
|
| 52 |
+
|
| 53 |
+
# Going through each subject's folder
|
| 54 |
+
kk = 0
|
| 55 |
+
for num in subjects:
|
| 56 |
+
print(f"Subject number {num} in process...")
|
| 57 |
+
sbj_dir = ey.create_dir([ey.subjects_dir, f"{num}"])
|
| 58 |
+
|
| 59 |
+
# ### Retraining 'eye_tracking' model with subject calibration data
|
| 60 |
+
clb_dir = ey.create_dir([sbj_dir, ey.CLB])
|
| 61 |
+
if ey.file_existing(clb_dir, ey.X1+".pickle"):
|
| 62 |
+
print(f"Loading subject data in {clb_dir}")
|
| 63 |
+
(
|
| 64 |
+
x1_load0,
|
| 65 |
+
x2_load0,
|
| 66 |
+
y_load0,
|
| 67 |
+
t_mat,
|
| 68 |
+
eyes_ratio
|
| 69 |
+
) = ey.load(clb_dir, [ey.X1, ey.X2, ey.Y, ey.T, ey.ER])
|
| 70 |
+
if shift_samples:
|
| 71 |
+
if shift_samples[kk]:
|
| 72 |
+
ii = 0
|
| 73 |
+
for (x11, x21, y1, t1, eyr1) in zip(x1_load0, x2_load0, y_load0, t_mat, eyes_ratio):
|
| 74 |
+
t_mat[ii] = t1[:-shift_samples[kk]]
|
| 75 |
+
x1_load0[ii] = x11[shift_samples[kk]:]
|
| 76 |
+
x2_load0[ii] = x21[shift_samples[kk]:]
|
| 77 |
+
y_load0[ii] = y1[:-shift_samples[kk]]
|
| 78 |
+
eyes_ratio[ii] = eyr1[shift_samples[kk]:]
|
| 79 |
+
ii += 1
|
| 80 |
+
kk += 1
|
| 81 |
+
er_dir = ey.create_dir([sbj_dir, ey.ER])
|
| 82 |
+
blinking_threshold = ey.get_threshold(er_dir, blinking_threshold)
|
| 83 |
+
|
| 84 |
+
blinking = ey.get_blinking(t_mat, eyes_ratio, blinking_threshold)[1]
|
| 85 |
+
|
| 86 |
+
# Removing the samples that are during blink
|
| 87 |
+
x1_load = []
|
| 88 |
+
x2_load = []
|
| 89 |
+
y_load = []
|
| 90 |
+
k1 = 0
|
| 91 |
+
k2 = 0
|
| 92 |
+
for (x11, x21, y1, b1) in zip(x1_load0, x2_load0, y_load0, blinking):
|
| 93 |
+
for (x10, x20, y0, b0) in zip(x11, x21, y1, b1):
|
| 94 |
+
k2 += 1
|
| 95 |
+
if not b0:
|
| 96 |
+
k1 += 1
|
| 97 |
+
x1_load.append(x10)
|
| 98 |
+
x2_load.append(x20)
|
| 99 |
+
y_load.append(y0)
|
| 100 |
+
|
| 101 |
+
print(f"All samples of subjects: {k2}, Not blinking: {k1}")
|
| 102 |
+
x1_load = np.array(x1_load)
|
| 103 |
+
x2_load = np.array(x2_load)
|
| 104 |
+
y_load = np.array(y_load)
|
| 105 |
+
n_smp, frame_h, frame_w = x1_load.shape[:-1]
|
| 106 |
+
print(f"Samples number: {n_smp}")
|
| 107 |
+
|
| 108 |
+
# Displaying data
|
| 109 |
+
|
| 110 |
+
# ### Preparing modified calibration data to feeding in eye_tracking model. Going through each model to predict the output
|
| 111 |
+
print("Normalizing modified calibration data to feeding in eye_tracking model...")
|
| 112 |
+
for mdl_num in models_list:
|
| 113 |
+
print("Loading public eye_tracking models...")
|
| 114 |
+
mdl_name = ey.MDL + f"{mdl_num}"
|
| 115 |
+
info = ey.load(ey.et_trained_dir, [mdl_name])[0]
|
| 116 |
+
x2_chosen_features = info["x2_chosen_features"]
|
| 117 |
+
x2_new = x2_load[:, x2_chosen_features]
|
| 118 |
+
|
| 119 |
+
x1 = x1_load / x1_scaler
|
| 120 |
+
x2 = x2_scaler.transform(x2_new)
|
| 121 |
+
|
| 122 |
+
# Shuffling and splitting data to train and val
|
| 123 |
+
x1_shf, x2_shf, y_hrz_shf, y_vrt_shf = shuffle(x1, x2, y_load[:, 0], y_load[:, 1])
|
| 124 |
+
|
| 125 |
+
# Going through each training ratio in the ratio list
|
| 126 |
+
for rt in r_train_list:
|
| 127 |
+
n_train = int(rt * n_smp)
|
| 128 |
+
x1_train, x2_train = x1_shf[:n_train], x2_shf[:n_train]
|
| 129 |
+
x1_val, x2_val = x1_shf[n_train:], x2_shf[n_train:]
|
| 130 |
+
y_hrz_train, y_vrt_train = y_hrz_shf[:n_train], y_vrt_shf[:n_train]
|
| 131 |
+
y_hrz_val, y_vrt_val = y_hrz_shf[n_train:], y_vrt_shf[n_train:]
|
| 132 |
+
|
| 133 |
+
x_train = [x1_train, x2_train]
|
| 134 |
+
x_val = [x1_val, x2_val]
|
| 135 |
+
|
| 136 |
+
print(x1_train.shape, x1_val.shape, y_hrz_train.shape, y_hrz_val.shape,
|
| 137 |
+
x2_train.shape, x2_val.shape, y_vrt_train.shape, y_vrt_val.shape)
|
| 138 |
+
|
| 139 |
+
# Callback for training. Going through each epoch and patience in epochs list
|
| 140 |
+
for nep in n_epochs_patience:
|
| 141 |
+
cb = EarlyStopping(patience=nep[1], verbose=1, restore_best_weights=True)
|
| 142 |
+
|
| 143 |
+
# Going through each nubmer for trainable_layers list
|
| 144 |
+
for tl in trainable_layers:
|
| 145 |
+
# Retraining the models and saving them
|
| 146 |
+
model_hrz = load_model(ey.et_trained_dir + mdl_name + "-hrz.h5")
|
| 147 |
+
model_vrt = load_model(ey.et_trained_dir + mdl_name + "-vrt.h5")
|
| 148 |
+
info["trained_mdl_num"] = mdl_num
|
| 149 |
+
info["r_retrain"] = rt
|
| 150 |
+
info["n_epochs_patience_retrain"] = nep
|
| 151 |
+
info["trainable_layers"] = tl
|
| 152 |
+
for (layer_hrz, layer_vrt) in zip(model_hrz.layers[:-tl], model_vrt.layers[:-tl]):
|
| 153 |
+
layer_hrz.trainable = False
|
| 154 |
+
layer_vrt.trainable = False
|
| 155 |
+
|
| 156 |
+
if show_model:
|
| 157 |
+
print(model_hrz.summary())
|
| 158 |
+
|
| 159 |
+
sbj_mdl_dir = ey.create_dir([sbj_dir, ey.MDL])
|
| 160 |
+
retrained_mdl_num = ey.find_max_mdl(sbj_mdl_dir, b=-7) + 1
|
| 161 |
+
|
| 162 |
+
print(f"\n<<<<<<< {retrained_mdl_num}-sbj:{num}-model-hrz:{mdl_num}-r_train:{rt}-epoch_patience:{nep}-trainable_layers:{tl} >>>>>>>>")
|
| 163 |
+
model_hrz.fit(x_train,
|
| 164 |
+
y_hrz_train * y_scaler,
|
| 165 |
+
validation_data=(x_val, y_hrz_val * y_scaler),
|
| 166 |
+
epochs=nep[0],
|
| 167 |
+
callbacks=cb)
|
| 168 |
+
hrz_train_loss = model_hrz.evaluate(x_train, y_hrz_train * y_scaler)
|
| 169 |
+
hrz_val_loss = model_hrz.evaluate(x_val, y_hrz_val * y_scaler)
|
| 170 |
+
info["hrz_retrain_train_loss"] = hrz_train_loss
|
| 171 |
+
info["hrz_retrain_val_loss"] = hrz_val_loss
|
| 172 |
+
retrained_mdl_name = ey.MDL + f"{retrained_mdl_num}"
|
| 173 |
+
mdl_hrz_dir = sbj_mdl_dir + retrained_mdl_name + "-hrz.h5"
|
| 174 |
+
model_hrz.save(mdl_hrz_dir)
|
| 175 |
+
print("Saving model-et-hrz in " + mdl_hrz_dir)
|
| 176 |
+
|
| 177 |
+
print(f"\n<<<<<<< {retrained_mdl_num}-sbj:{num}-model-vrt:{mdl_num}-r_train:{rt}-epoch_patience:{nep}-trainable_layers:{tl} >>>>>>>>")
|
| 178 |
+
model_vrt.fit(x_train,
|
| 179 |
+
y_vrt_train * y_scaler,
|
| 180 |
+
validation_data=(x_val, y_vrt_val * y_scaler),
|
| 181 |
+
epochs=nep[0],
|
| 182 |
+
callbacks=cb)
|
| 183 |
+
vrt_train_loss = model_vrt.evaluate(x_train, y_vrt_train * y_scaler)
|
| 184 |
+
vrt_val_loss = model_vrt.evaluate(x_val, y_vrt_val * y_scaler)
|
| 185 |
+
info["vrt_retrain_train_loss"] = vrt_train_loss
|
| 186 |
+
info["vrt_retrain_val_loss"] = vrt_val_loss
|
| 187 |
+
mdl_vrt_dir = sbj_mdl_dir + retrained_mdl_name + "-vrt.h5"
|
| 188 |
+
model_vrt.save(mdl_vrt_dir)
|
| 189 |
+
print("Saving model-et-vrt in " + mdl_vrt_dir)
|
| 190 |
+
|
| 191 |
+
ey.save([info], sbj_mdl_dir, [retrained_mdl_name])
|
| 192 |
+
|
| 193 |
+
if delete_files:
|
| 194 |
+
ey.remove(clb_dir)
|
| 195 |
+
else:
|
| 196 |
+
print(f"Data does not exist in {clb_dir}")
|
| 197 |
+
|
| 198 |
+
|
codes/work.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from PyQt5.QtCore import pyqtSignal, QObject
|
| 2 |
+
from codes.show import Camera
|
| 3 |
+
from codes.calibration import Clb
|
| 4 |
+
from codes.sampling import Smp
|
| 5 |
+
from codes.tune_models_params import Tuning
|
| 6 |
+
from codes.eye_track import EyeTrack
|
| 7 |
+
from codes.see_data import See
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
# Change parameters use_io, clb_grd for accuracy and del_files, nep
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Worker(QObject, Camera, Clb, Smp, Tuning, EyeTrack, See):
|
| 14 |
+
num = 0
|
| 15 |
+
camera_id = 0
|
| 16 |
+
thb = 0.0
|
| 17 |
+
ss = 0
|
| 18 |
+
mfr = 0.0
|
| 19 |
+
dft = 0.0
|
| 20 |
+
st = 0.0
|
| 21 |
+
|
| 22 |
+
cam = False
|
| 23 |
+
clb = False
|
| 24 |
+
smp = False
|
| 25 |
+
acc = False
|
| 26 |
+
tbt = False
|
| 27 |
+
mdl = False
|
| 28 |
+
uio = False
|
| 29 |
+
gps = False
|
| 30 |
+
gpa = False
|
| 31 |
+
sps = False
|
| 32 |
+
spa = False
|
| 33 |
+
gfx = False
|
| 34 |
+
|
| 35 |
+
running = True
|
| 36 |
+
|
| 37 |
+
cam_started = pyqtSignal()
|
| 38 |
+
clb_started = pyqtSignal()
|
| 39 |
+
smp_started = pyqtSignal()
|
| 40 |
+
acc_started = pyqtSignal()
|
| 41 |
+
tbt_started = pyqtSignal()
|
| 42 |
+
mdl_started = pyqtSignal()
|
| 43 |
+
gps_started = pyqtSignal()
|
| 44 |
+
gpa_started = pyqtSignal()
|
| 45 |
+
sps_started = pyqtSignal()
|
| 46 |
+
spa_started = pyqtSignal()
|
| 47 |
+
gfx_started = pyqtSignal()
|
| 48 |
+
finished = pyqtSignal()
|
| 49 |
+
|
| 50 |
+
def __init__(self):
|
| 51 |
+
super().__init__()
|
| 52 |
+
|
| 53 |
+
def do_work(self):
|
| 54 |
+
if self.cam and self.running:
|
| 55 |
+
print("\nCamera")
|
| 56 |
+
self.cam_started.emit()
|
| 57 |
+
self.features(camera_id=self.camera_id)
|
| 58 |
+
if self.clb and self.running:
|
| 59 |
+
print("\nCalibration")
|
| 60 |
+
self.clb_started.emit()
|
| 61 |
+
self.et(
|
| 62 |
+
num=self.num,
|
| 63 |
+
camera_id=self.camera_id,
|
| 64 |
+
info=self.info,
|
| 65 |
+
clb_grid=self.clb_grid
|
| 66 |
+
)
|
| 67 |
+
if self.smp and self.running:
|
| 68 |
+
print("\nSampling")
|
| 69 |
+
self.smp_started.emit()
|
| 70 |
+
self.sampling(
|
| 71 |
+
num=self.num,
|
| 72 |
+
camera_id=self.camera_id,
|
| 73 |
+
gui=True
|
| 74 |
+
)
|
| 75 |
+
if self.acc and self.running:
|
| 76 |
+
print("\nTesting")
|
| 77 |
+
self.acc_started.emit()
|
| 78 |
+
self.accuracy(
|
| 79 |
+
num=self.num,
|
| 80 |
+
camera_id=self.camera_id,
|
| 81 |
+
clb_grid=(5, 7, 20)
|
| 82 |
+
)
|
| 83 |
+
if self.tbt and self.running:
|
| 84 |
+
print("\nSee user blinking")
|
| 85 |
+
self.user_face(
|
| 86 |
+
num=self.num,
|
| 87 |
+
threshold=self.thb,
|
| 88 |
+
save_threshold=True
|
| 89 |
+
)
|
| 90 |
+
if self.mdl and self.running:
|
| 91 |
+
print("\nTuning params")
|
| 92 |
+
self.mdl_started.emit()
|
| 93 |
+
self.et_mdl(
|
| 94 |
+
subjects=[self.num],
|
| 95 |
+
shift_samples=[self.ss],
|
| 96 |
+
delete_files=False
|
| 97 |
+
)
|
| 98 |
+
if self.gps and self.running:
|
| 99 |
+
print("\nGetting pixels")
|
| 100 |
+
self.gps_started.emit()
|
| 101 |
+
self.get_pixels(
|
| 102 |
+
subjects=[self.num],
|
| 103 |
+
shift_samples=[self.ss],
|
| 104 |
+
use_io=self.uio,
|
| 105 |
+
delete_files=False
|
| 106 |
+
)
|
| 107 |
+
if self.gpa and self.running:
|
| 108 |
+
print("\nGetting test pixels")
|
| 109 |
+
self.gpa_started.emit()
|
| 110 |
+
self.get_pixels(
|
| 111 |
+
subjects=[self.num],
|
| 112 |
+
target_fol="acc",
|
| 113 |
+
shift_samples=[self.ss],
|
| 114 |
+
use_io=True,
|
| 115 |
+
delete_files=False
|
| 116 |
+
)
|
| 117 |
+
if self.sps and self.running:
|
| 118 |
+
print("\nSeeing sampling data")
|
| 119 |
+
self.sps_started.emit()
|
| 120 |
+
self.pixels_smp(num=self.num, show_in_all_monitors=True)
|
| 121 |
+
if self.spa and self.running:
|
| 122 |
+
print("\nSeeing testing data")
|
| 123 |
+
self.spa_started.emit()
|
| 124 |
+
self.pixels_acc(
|
| 125 |
+
num=self.num,
|
| 126 |
+
show_in_all_monitors=True
|
| 127 |
+
)
|
| 128 |
+
if self.gfx and self.running:
|
| 129 |
+
print("\nGetting fixations")
|
| 130 |
+
self.gfx_started.emit()
|
| 131 |
+
self.get_fixations(
|
| 132 |
+
subjects=[self.num],
|
| 133 |
+
t_discard=self.dft,
|
| 134 |
+
x_merge=self.mfr[0],
|
| 135 |
+
y_merge=self.mfr[1],
|
| 136 |
+
vx_thr=self.st,
|
| 137 |
+
vy_thr=self.st
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
print("\nEye Tracking finished!")
|
| 142 |
+
self.finished.emit()
|
| 143 |
+
self.running = True
|
crt_exe_gui.spec
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- mode: python ; coding: utf-8 -*-
|
| 2 |
+
# Activate environment, then type "pyinstaller crt_exe_gui.spec" in command line
|
| 3 |
+
# to generate .exe file for gui
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
block_cipher = None
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
a = Analysis(['main_gui.py'],
|
| 10 |
+
pathex=[],
|
| 11 |
+
binaries=[],
|
| 12 |
+
datas=[('env/Lib/site-packages/mediapipe/modules', 'mediapipe/modules'),
|
| 13 |
+
('models', 'models'),
|
| 14 |
+
('docs', 'docs'),
|
| 15 |
+
('other_files', 'other_files')],
|
| 16 |
+
hiddenimports=[],
|
| 17 |
+
hookspath=[],
|
| 18 |
+
hooksconfig={},
|
| 19 |
+
runtime_hooks=[],
|
| 20 |
+
excludes=[],
|
| 21 |
+
win_no_prefer_redirects=False,
|
| 22 |
+
win_private_assemblies=False,
|
| 23 |
+
cipher=block_cipher,
|
| 24 |
+
noarchive=False)
|
| 25 |
+
pyz = PYZ(a.pure, a.zipped_data,
|
| 26 |
+
cipher=block_cipher)
|
| 27 |
+
|
| 28 |
+
exe = EXE(pyz,
|
| 29 |
+
a.scripts,
|
| 30 |
+
a.binaries,
|
| 31 |
+
a.zipfiles,
|
| 32 |
+
a.datas,
|
| 33 |
+
[],
|
| 34 |
+
name='Owleye',
|
| 35 |
+
debug=False,
|
| 36 |
+
bootloader_ignore_signals=False,
|
| 37 |
+
strip=False,
|
| 38 |
+
upx=True,
|
| 39 |
+
upx_exclude=[],
|
| 40 |
+
runtime_tmpdir=None,
|
| 41 |
+
console=True,
|
| 42 |
+
icon='docs/images/logo.ico',
|
| 43 |
+
disable_windowed_traceback=False,
|
| 44 |
+
target_arch=None,
|
| 45 |
+
codesign_identity=None,
|
| 46 |
+
entitlements_file=None )
|
docs/USE_APP.md
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## This document shows you how to use Owleye application
|
| 2 |
+
|
| 3 |
+
When you run the program, a window like the one in below will be appeared:
|
| 4 |
+
|
| 5 |
+

|
| 6 |
+
|
| 7 |
+
Also, after running, a folder called "subjects" will be created. In this folder, a folder will be created for each subject based on the subject number in the UI. Then, you can adjust the items that you need, and finally "start" the program. In following, the items will be explained.
|
| 8 |
+
|
| 9 |
+
**1. Subject number:** A specific number that you enter as the ID of the subject.
|
| 10 |
+
|
| 11 |
+
**2. Camera ID:** Usually it is 0, but you can try other numbers if you have several webcams on your system.
|
| 12 |
+
|
| 13 |
+
**3. Camera:** by activating this checkbox, after clicking on the "start" button, the webcam stream will be shown. So you can see yourself and the landmarks that already are detected on your face.
|
| 14 |
+
|
| 15 |
+
**4. Calibration:** By activating this checkbox, after clicking on the "start" button, the program creates a folder with the name of subject number. Then, it will be ready to collect data (inputs and outputs of the models of [the sixth block in owleye's structure](https://github.com/owleye/Owleye/blob/main/docs/images/Owleye%20structure.png)) from the user. So, a white point will be appeared on the screen. As soon as you press the "SPACE" key on keyboard, the program starts collecting data for a particlular time. While this, the background bacomes black and the user should look at the white point during this time. If the data collection ends, the screen will become gray and the point will go to another location. The user can look everywhere and actually rest in this situation. Again, the "SPACE" key should be pressed for data collection and all of the explained process get repeated. Also, a folder called "clb" (calibration) will be created in the user's folder. In this folder, 5 ".pickle" files will be created which were collected:
|
| 16 |
+
|
| 17 |
+
- t.pickle: Time
|
| 18 |
+
- x1.pickle: Eyes images
|
| 19 |
+
- x2.pickle: Face vecotrs
|
| 20 |
+
- y.pickle: White point locations
|
| 21 |
+
- er.pickle: Eye aspect ratio vector (Go to [11th section](https://github.com/owleye/Owleye/blob/main/docs/USE_APP.md#:~:text=from%20the%20default.-,11.%20Threshold,-%3A%20To%20detect) to know about this)
|
| 22 |
+
|
| 23 |
+
**5. Subject name:** This item is arbitrary. you can enter the user's name. This is activated when you want to do calibration.
|
| 24 |
+
|
| 25 |
+
**6. Describtion:** This section is arbitrary too. you can enter any information that your user has. This is activated when you want to do calibration.
|
| 26 |
+
|
| 27 |
+
**7. Calibration grid:** This item can have three types of integer numbers.
|
| 28 |
+
|
| 29 |
+
- 2 numbers (n, c): The white point starts to move just horizontally in the screen in n rows. Each row contains c locations that the point goes through.
|
| 30 |
+
- 3 numbers (n, m , c): The white point does not move. just in a grid by size of n x m (like a matrix) remains fixed in each location.
|
| 31 |
+
- 4 numbers (n, c, m, d): The white point moves both horizontally and vertically, in n rows with c locations and m columns with d locations.
|
| 32 |
+
|
| 33 |
+
**8. Sampling:** If you activate this checkbox, the program will start collecting data from you while you are looking in the screen. So, this item is for using the program for your goal. Also a folder called "smp" (sampling) will be created in the user's folder. In this folder these files will be made:
|
| 34 |
+
- t.pickle
|
| 35 |
+
- sys_time.pickle
|
| 36 |
+
- x1.pickle
|
| 37 |
+
- x2.pickle
|
| 38 |
+
- er.pickle
|
| 39 |
+
- fv.pickle: face vector, this is a series of face images for seeing the face during the sampling, to choose the blinking threshold.
|
| 40 |
+
|
| 41 |
+
**9. Testing:** This checkbox is for seeing how well Owleye works. If you activate this checkbox, after starting the program, it creates a folder named "acc" (accuracy). The program will start showing you a white point that you must look at that. Actually, the user is looking in the white point. So, it is clear that what should be the best possible prediction of Owleye (The position of white points). Also, you can see mean squared error (MSE). In this folder these files will be made:
|
| 42 |
+
- t.pickle
|
| 43 |
+
- x1.pickle
|
| 44 |
+
- x2.pickle
|
| 45 |
+
- y.pickle
|
| 46 |
+
- er.pickle
|
| 47 |
+
|
| 48 |
+
**10. Tune blinking threshold:** By activating this checkbox, you can change the blinking threshold (11) from the default.
|
| 49 |
+
|
| 50 |
+
**11. Threshold:** To detect the blinks, The eyes aspect ratio (EAR) method is used. In the samples that are collected during sampling, EAR will be calculated. It is a vector during time. Then using that vector, the velocity of EAR will be calculated as a vector. So, the values above the determined threshold are considered as a blink. The default value for threshold is gained using try and error with my face. every face can have a different threshold.
|
| 51 |
+
|
| 52 |
+
**12. Tune eye tracking model:** By activating this checkbox, after pressing "start" button, the program starts retraining the two base models of [the sixth block in the Owleye's structure](https://github.com/owleye/Owleye/blob/main/docs/images/Owleye%20structure.png) which already are in the models folder in root. After this, a folder called "mdl" (modeling) will be created in the subject's folder. In this folder, there are 3 files:
|
| 53 |
+
- mdl1.pickle: scaler of the face vector
|
| 54 |
+
- mdl1-hrz.h5: model for predicting the horizontal direction
|
| 55 |
+
- mdl1-vrt.h5: model for predicting the vertical direction
|
| 56 |
+
|
| 57 |
+
**13. SS:** Abbreviation of shift samples. While the white point is moving during calibration, the inputs and outputs (point locations) are not exactly aligned. Because of processing problems, the images (inputs) are a little later than the outputs. For example, if this parameter is equal to 20, it means you want to shift inputs in 20 samples. So, the 21th input will be aligned with 1st output.
|
| 58 |
+
|
| 59 |
+
**14. Sampling data:** It means the later calculations are for the sampling data, not testing data.
|
| 60 |
+
|
| 61 |
+
**15. Test data:** It means the later calculations are for the testing data, not sampling data.
|
| 62 |
+
|
| 63 |
+
**16. Use IO model:** If the user activates this checkbox, after the program predicted the sampling data, it will remove the samples that are out of the screens range.
|
| 64 |
+
|
| 65 |
+
**17. Get pixels:** If the user activates this checkbox, the program will predict the sampling or testing data. If the sampling data is chosen, these data will be created in the "smp" folder:
|
| 66 |
+
- y_prd.pickle
|
| 67 |
+
- eye_track.xlsx: An excel file containing the eye movements and times during sampling. This is the file that you should use for your future analysis.
|
| 68 |
+
- fixations.xlsx: An excel file containing the fixations during sampling. This is the file that you should use for your future analysis.
|
| 69 |
+
|
| 70 |
+
Otherwise, if the test data is chosen, these data will be created:
|
| 71 |
+
- y_mdf.pickle: This is same as the actual y, but the points that are detected during a blink, are removed.
|
| 72 |
+
- y_prd_mdf.pickle: This is the predictions of Owleye, while the blink moments are removed.
|
| 73 |
+
|
| 74 |
+
**18. See pixels:** If the user activates this checkbox, the program will show the locations that the user has looked.
|
| 75 |
+
|
| 76 |
+
**19. Get fixations:** The program will calculate the fixations of the user by three parameters of 20, 21, and 23.
|
| 77 |
+
|
| 78 |
+
**20. ST:** Abbreviation for saccade threshold. To separate fixations, it is needed to firstly calculate the velocity of eye movement. So, by putting a threshold on the velocity, it's possible to compute the moments that the user changed their viewpoint.
|
| 79 |
+
|
| 80 |
+
**21. DFT:** Abbreviation for discard fixation time. Fixations that last less than this time, will be removed.
|
| 81 |
+
|
| 82 |
+
**22. MFR:** Abbreviation for merged fixations ratio. Fixations that are close together, will be added. Two numbers are for two directions.
|
| 83 |
+
|
| 84 |
+
**23. Start:** This button starts the program. If you activate one of the checkboxes, the program just do that specific one. If you select some of the checkboxes, the program goes for all of the selected ones, one after another.
|
| 85 |
+
|
| 86 |
+
**24. Stop:** This button stops the program in every step that it's running.
|
docs/images/468_landmarks.jpg
ADDED
|
|
docs/images/Owleye structure.png
ADDED
|
|
docs/images/logo.ico
ADDED
|
|
main.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The project "Owleye" turns your webcam to an eye tracker. You can use it to know which point in the screen you are looking.
|
| 2 |
+
The project has several parts that you can get familiar with, using the documentations that I've provided in README.md and docs/USE_APP.md files.
|
| 3 |
+
Before using this project, make sure that you have read these documentations. main.py is like main_gui.py with some advantages.
|
| 4 |
+
Using main.py you can retrain the models for several subjects all together.
|
| 5 |
+
Also, you can do predictions for all subjects together. You can train the models using this file, but with main_gui.py you can just retrain
|
| 6 |
+
the models. In addition, you can test the latency of Owleye by main.py. To sum up, use main_gui.py to collect calibration data and sampling data
|
| 7 |
+
or for seeing data, but use main.py for training, retraining, and predictions for a group of subjects that you already have their data.
|
| 8 |
+
In the following, you can uncomment each section (between star signs **) to do your work.
|
| 9 |
+
Also, for a faster understanding of the code, it is recommended to print the varibales shape. There are a lot of lists and lists of lists
|
| 10 |
+
that may confuse you.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
from codes.show import Camera
|
| 15 |
+
from codes.calibration import Clb
|
| 16 |
+
from codes.sampling import Smp
|
| 17 |
+
from codes.see_data import See
|
| 18 |
+
from codes.crt_train_models import Modeling
|
| 19 |
+
from codes.tune_models_params import Tuning
|
| 20 |
+
from codes.eye_track import EyeTrack
|
| 21 |
+
|
| 22 |
+
# *********************** PARAMETERS ***********************
|
| 23 |
+
NUMBER = 6 # The subject number that we want to do subsequent works on them
|
| 24 |
+
|
| 25 |
+
TRAINING_SUBJECTS = [71, 72, 73, 74, 81, 82, 83, 84, 85, 86, 122, 123,
|
| 26 |
+
124, 125, 126, 144, 145, 146, 147, 201, 203, 204, 206, 207, 211,
|
| 27 |
+
212, 213, 214, 215, 216, 217, 221, 222, 224] # You can retrain the models and predict the sampling data for several users at a same time
|
| 28 |
+
CAMERA_ID = 0 # Check camera id by uncommenting Camera().raw()
|
| 29 |
+
SHIFT_SAMPLES = 0 # Because of the delay that the sampling has, you can shift inputs to reach to the appropriate output
|
| 30 |
+
|
| 31 |
+
# # *********************** SEE CAMERA ***********************
|
| 32 |
+
# Camera().raw(camera_id=CAMERA_ID) # You can see the webcam stream
|
| 33 |
+
# Camera().features(camera_id=CAMERA_ID)
|
| 34 |
+
# """You can see the webcam stream with the detected landmarks. You can check whether Mediapipe and Opencv work properly or not."""
|
| 35 |
+
|
| 36 |
+
# # *********************** CALIBRATION **********************
|
| 37 |
+
NAME = "Mostafa Lotfi"
|
| 38 |
+
Descriptions = "Test for shifting"
|
| 39 |
+
INFO = [NAME, Descriptions]
|
| 40 |
+
CALIBRATION_GRID = 4, 200, 6, 100
|
| 41 |
+
|
| 42 |
+
# Clb().et(num=NUMBER, camera_id=CAMERA_ID, info=INFO, clb_grid=CALIBRATION_GRID)
|
| 43 |
+
# """This method collects data (input and output) for eye tracking"""
|
| 44 |
+
# Clb().out(num=NUMBER, camera_id=CAMERA_ID, n_smp_in_cls=100)
|
| 45 |
+
# """This method collects data of the subjects while looking at out of the screen
|
| 46 |
+
# This is used for in-out model to see whether the user is looking inside of the screen or outside of that. This is not in main_gui.pyp"""
|
| 47 |
+
|
| 48 |
+
# Clb().calculate_threshold(num=NUMBER, camera_id=CAMERA_ID)
|
| 49 |
+
# """This method collects data for calculation of blink threshold. This is not in main_gui.py"""
|
| 50 |
+
|
| 51 |
+
# # *********************** SAMPLING *************************
|
| 52 |
+
# Smp().sampling(num=NUMBER, camera_id=CAMERA_ID, gui=False) # The method collects inputs during sampling time
|
| 53 |
+
|
| 54 |
+
# # *********************** ACCURACY **************************
|
| 55 |
+
# Smp().accuracy(num=NUMBER, camera_id=CAMERA_ID, clb_grid=(5, 7, 30))
|
| 56 |
+
# """The method collects data (input and output), This is for testing Owleye's performance.
|
| 57 |
+
|
| 58 |
+
# # *********************** LATENCY **************************
|
| 59 |
+
# Smp().latency(num=NUMBER, camera_id=CAMERA_ID)
|
| 60 |
+
# """This method collects data to calculates the delay of Owleye. When it's run, you should look at the left and write side of the screen
|
| 61 |
+
# based on the color. This method is not in main_gui.py"""
|
| 62 |
+
|
| 63 |
+
# # ********************* SEE FEATURES ***********************
|
| 64 |
+
# """This method is to see the data collected in previous sections. It isn't in main_gui.py"""
|
| 65 |
+
# See().data_features(num=NUMBER, target_fol="clb")
|
| 66 |
+
# See().data_features(num=NUMBER, target_fol="io")
|
| 67 |
+
# See().data_features(num=NUMBER, target_fol="smp")
|
| 68 |
+
# See().data_features(num=NUMBER, target_fol="acc")
|
| 69 |
+
# See().data_features(num=NUMBER, target_fol="ltn")
|
| 70 |
+
|
| 71 |
+
# See().user_face(num=NUMBER, threshold=5, save_threshold=True) # See user's face during sampling, to tune eye ratio threshold
|
| 72 |
+
|
| 73 |
+
# """This method is for ploting the eye aspect ration in varous data."""
|
| 74 |
+
# See().blinks_plot(num=NUMBER, target_fol="er")
|
| 75 |
+
# See().blinks_plot(num=NUMBER, threshold="ao", target_fol="clb")
|
| 76 |
+
# See().blinks_plot(num=NUMBER, threshold=9, target_fol="smp")
|
| 77 |
+
# See().blinks_plot(num=NUMBER, threshold=4.5, target_fol="acc")
|
| 78 |
+
|
| 79 |
+
# # ***************** CREATE BASE MODELS *******************
|
| 80 |
+
# """This section is just for creation of eye tracking and in-out models (not training). You can change the structure in the method.
|
| 81 |
+
# This method is not in main_gui.py"""
|
| 82 |
+
# Modeling().create_io()
|
| 83 |
+
# Modeling().get_models_information(show_model=True)
|
| 84 |
+
# Modeling().create_et()
|
| 85 |
+
# Modeling().get_models_information(io=False, show_model=True)
|
| 86 |
+
|
| 87 |
+
# # ****************** TRAIN BASE MODELS *******************
|
| 88 |
+
# """You can train the base models in this section. You should enter a list of subjects that you want to create the model using them.
|
| 89 |
+
# This method is not in main_gui.py"""
|
| 90 |
+
# Modeling().train_io(
|
| 91 |
+
# subjects=[1, 2],
|
| 92 |
+
# models_list=[1, 2],
|
| 93 |
+
# min_max_brightness_ratio=[[0.65, 1.45], [0.6, 1.5]],
|
| 94 |
+
# r_train_list=[0.8, 0.9],
|
| 95 |
+
# n_epochs_patience=[[2, 1], [3, 2]],
|
| 96 |
+
# save_scaler=False,
|
| 97 |
+
# show_model=False)
|
| 98 |
+
# Modeling().get_models_information(io=True, raw=False, show_model=False)
|
| 99 |
+
# Modeling().train_et(subjects=TRAINING_SUBJECTS,
|
| 100 |
+
# models_list=[1],
|
| 101 |
+
# min_max_brightness_ratio=[[0.65, 1.45]],
|
| 102 |
+
# r_train_list=[0.8],
|
| 103 |
+
# n_epochs_patience=[[2, 1]],
|
| 104 |
+
# shift_samples=[SHIFT_SAMPLES] * len(TRAINING_SUBJECTS),
|
| 105 |
+
# blinking_threshold="d",
|
| 106 |
+
# save_scaler=False,
|
| 107 |
+
# show_model=False)
|
| 108 |
+
# Modeling().get_models_information(io=False, raw=False, show_model=False)
|
| 109 |
+
|
| 110 |
+
# # *********************** Tuning *************************
|
| 111 |
+
# # To retrain the base models
|
| 112 |
+
# Tuning().et_mdl(subjects=TRAINING_SUBJECTS,
|
| 113 |
+
# models_list=[1],
|
| 114 |
+
# r_train_list=[0.99],
|
| 115 |
+
# n_epochs_patience=[[3, 3]],
|
| 116 |
+
# trainable_layers=[1],
|
| 117 |
+
# shift_samples=[SHIFT_SAMPLES] * len(TRAINING_SUBJECTS),
|
| 118 |
+
# blinking_threshold='uo',
|
| 119 |
+
# show_model=False,
|
| 120 |
+
# delete_files=False)
|
| 121 |
+
|
| 122 |
+
# # *********************** GET PIXELS-Sampling ***********************
|
| 123 |
+
# # To predict the sampling pixels
|
| 124 |
+
# EyeTrack().get_pixels(
|
| 125 |
+
# subjects=TRAINING_SUBJECTS,
|
| 126 |
+
# models_list=[1],
|
| 127 |
+
# target_fol="smp",
|
| 128 |
+
# shift_samples=[SHIFT_SAMPLES] * len(TRAINING_SUBJECTS),
|
| 129 |
+
# blinking_threshold="uo"
|
| 130 |
+
# )
|
| 131 |
+
|
| 132 |
+
# # ******************* GET PIXELS-GET PIXELS-Accuracy *******************
|
| 133 |
+
# # To predict testing pixels and calculate the loss
|
| 134 |
+
# EyeTrack().get_pixels(
|
| 135 |
+
# subjects=[NUMBER],
|
| 136 |
+
# models_list=[1],
|
| 137 |
+
# target_fol="acc",
|
| 138 |
+
# shift_samples=[SHIFT_SAMPLES],
|
| 139 |
+
# blinking_threshold="uo"
|
| 140 |
+
# )
|
| 141 |
+
# EyeTrack().get_models_information(show_model=False)
|
| 142 |
+
|
| 143 |
+
# # ******************** GET PIXELS-Latency *****************************
|
| 144 |
+
# To calculate the delay of Owleye
|
| 145 |
+
# EyeTrack().get_pixels(subjects=[NUMBER], models_list=[1], target_fol="ltn", shift_samples=[1])
|
| 146 |
+
|
| 147 |
+
# # ******************** GET FIXATIONS ***********************
|
| 148 |
+
# # To calculate fixations
|
| 149 |
+
# EyeTrack().get_fixations(subjects=TRAINING_SUBJECTS, n_monitors_data=3, x_merge=0.15/2, y_merge=0.18/2, vx_thr=0.8, vy_thr=0.8, t_discard=0.1)
|
| 150 |
+
|
| 151 |
+
# # ***************** SEE SAMPLING PIXELS ********************
|
| 152 |
+
# To see the predictions of sampling data
|
| 153 |
+
# See().pixels_smp(num=NUMBER, n_monitors_data=3, show_in_all_monitors=False, win_size=(3 * 1280, 720), show_fixations=True)
|
| 154 |
+
|
| 155 |
+
# # ***************** SEE ACCURACY PIXELS *********************
|
| 156 |
+
# # To see the predictions of testing data
|
| 157 |
+
# See().pixels_acc(num=NUMBER, n_monitors_data=1)
|
| 158 |
+
|
| 159 |
+
|
main_gui.py
ADDED
|
@@ -0,0 +1,514 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The project "Owleye" turns your webcam to an eye tracker. You can use it to know which point in the screen you are looking.
|
| 2 |
+
The project has several parts that you can get familiar with, using the documentations that I've provided in README.md and docs/USE_APP.md files.
|
| 3 |
+
Before using this project, make sure that you have read these documentations.
|
| 4 |
+
This file contains the code for a GUI. There are some points that you should know about a GUI of PyQt5 to understand the following code.
|
| 5 |
+
Also, unfortunately I didn't add proper comments in this file and now it's a little hard to understand it (Now I am really embarrassed for this :)). But, totally, the GUI is connected
|
| 6 |
+
To the modules in the codes folder, using a worker. the worker gives the ability for multithreading. For understanding the code of eye tracker,
|
| 7 |
+
I suggest you to just visit the modules in the codes folder and see how I used them in main.py.
|
| 8 |
+
Also, for a faster understanding of the code, it is recommended to print the varibales shape. There are a lot of lists and lists of lists
|
| 9 |
+
that may confuse you.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
from PyQt5 import QtCore, QtGui, QtWidgets
|
| 14 |
+
from PyQt5.QtCore import QThread
|
| 15 |
+
from codes.work import Worker
|
| 16 |
+
import os
|
| 17 |
+
|
| 18 |
+
PATH2ROOT_ABS = os.path.dirname(__file__) + "/"
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class Ui_MainWindow(object):
|
| 22 |
+
def setupUi(self, MainWindow):
|
| 23 |
+
MainWindow.setObjectName("MainWindow")
|
| 24 |
+
MainWindow.resize(362, 462)
|
| 25 |
+
MainWindow.setAcceptDrops(True)
|
| 26 |
+
MainWindow.setWindowIcon(QtGui.QIcon(PATH2ROOT_ABS + "docs/images/logo.ico"))
|
| 27 |
+
self.centralwidget = QtWidgets.QWidget(MainWindow)
|
| 28 |
+
self.centralwidget.setObjectName("centralwidget")
|
| 29 |
+
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
|
| 30 |
+
self.gridLayout.setObjectName("gridLayout")
|
| 31 |
+
self.l_num = QtWidgets.QLabel(self.centralwidget)
|
| 32 |
+
font = QtGui.QFont()
|
| 33 |
+
font.setFamily("Times New Roman")
|
| 34 |
+
font.setPointSize(10)
|
| 35 |
+
self.l_num.setFont(font)
|
| 36 |
+
self.l_num.setObjectName("l_num")
|
| 37 |
+
self.gridLayout.addWidget(self.l_num, 0, 0, 1, 2)
|
| 38 |
+
self.le_num = QtWidgets.QLineEdit(self.centralwidget)
|
| 39 |
+
font = QtGui.QFont()
|
| 40 |
+
font.setFamily("Times New Roman")
|
| 41 |
+
self.le_num.setFont(font)
|
| 42 |
+
self.le_num.setObjectName("le_num")
|
| 43 |
+
self.gridLayout.addWidget(self.le_num, 0, 3, 1, 2)
|
| 44 |
+
self.l_cam = QtWidgets.QLabel(self.centralwidget)
|
| 45 |
+
font = QtGui.QFont()
|
| 46 |
+
font.setFamily("Times New Roman")
|
| 47 |
+
font.setPointSize(10)
|
| 48 |
+
self.l_cam.setFont(font)
|
| 49 |
+
self.l_cam.setObjectName("l_cam")
|
| 50 |
+
self.gridLayout.addWidget(self.l_cam, 0, 5, 1, 4)
|
| 51 |
+
self.le_cam = QtWidgets.QLineEdit(self.centralwidget)
|
| 52 |
+
font = QtGui.QFont()
|
| 53 |
+
font.setFamily("Times New Roman")
|
| 54 |
+
self.le_cam.setFont(font)
|
| 55 |
+
self.le_cam.setObjectName("le_cam")
|
| 56 |
+
self.gridLayout.addWidget(self.le_cam, 0, 9, 1, 1)
|
| 57 |
+
self.chb_cam = QtWidgets.QCheckBox(self.centralwidget)
|
| 58 |
+
font = QtGui.QFont()
|
| 59 |
+
font.setFamily("Times New Roman")
|
| 60 |
+
font.setPointSize(10)
|
| 61 |
+
self.chb_cam.setFont(font)
|
| 62 |
+
self.chb_cam.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 63 |
+
self.chb_cam.setObjectName("chb_cam")
|
| 64 |
+
self.gridLayout.addWidget(self.chb_cam, 1, 0, 1, 2)
|
| 65 |
+
self.chb_clb = QtWidgets.QCheckBox(self.centralwidget)
|
| 66 |
+
font = QtGui.QFont()
|
| 67 |
+
font.setFamily("Times New Roman")
|
| 68 |
+
font.setPointSize(10)
|
| 69 |
+
self.chb_clb.setFont(font)
|
| 70 |
+
self.chb_clb.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 71 |
+
self.chb_clb.setObjectName("chb_clb")
|
| 72 |
+
self.gridLayout.addWidget(self.chb_clb, 2, 0, 1, 2)
|
| 73 |
+
self.l_name = QtWidgets.QLabel(self.centralwidget)
|
| 74 |
+
font = QtGui.QFont()
|
| 75 |
+
font.setFamily("Times New Roman")
|
| 76 |
+
font.setPointSize(10)
|
| 77 |
+
self.l_name.setFont(font)
|
| 78 |
+
self.l_name.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 79 |
+
self.l_name.setObjectName("l_name")
|
| 80 |
+
self.gridLayout.addWidget(self.l_name, 3, 0, 1, 2)
|
| 81 |
+
self.le_name = QtWidgets.QLineEdit(self.centralwidget)
|
| 82 |
+
self.le_name.setEnabled(False)
|
| 83 |
+
font = QtGui.QFont()
|
| 84 |
+
font.setFamily("Times New Roman")
|
| 85 |
+
font.setPointSize(8)
|
| 86 |
+
self.le_name.setFont(font)
|
| 87 |
+
self.le_name.setObjectName("le_name")
|
| 88 |
+
self.gridLayout.addWidget(self.le_name, 3, 3, 1, 4)
|
| 89 |
+
self.l_dcp = QtWidgets.QLabel(self.centralwidget)
|
| 90 |
+
font = QtGui.QFont()
|
| 91 |
+
font.setFamily("Times New Roman")
|
| 92 |
+
font.setPointSize(10)
|
| 93 |
+
self.l_dcp.setFont(font)
|
| 94 |
+
self.l_dcp.setObjectName("l_dcp")
|
| 95 |
+
self.gridLayout.addWidget(self.l_dcp, 4, 0, 1, 2)
|
| 96 |
+
self.te_dcp = QtWidgets.QTextEdit(self.centralwidget)
|
| 97 |
+
self.te_dcp.setEnabled(False)
|
| 98 |
+
font = QtGui.QFont()
|
| 99 |
+
font.setFamily("Times New Roman")
|
| 100 |
+
self.te_dcp.setFont(font)
|
| 101 |
+
self.te_dcp.setObjectName("te_dcp")
|
| 102 |
+
self.gridLayout.addWidget(self.te_dcp, 4, 3, 1, 7)
|
| 103 |
+
self.l_clg_grd = QtWidgets.QLabel(self.centralwidget)
|
| 104 |
+
font = QtGui.QFont()
|
| 105 |
+
font.setFamily("Times New Roman")
|
| 106 |
+
font.setPointSize(10)
|
| 107 |
+
self.l_clg_grd.setFont(font)
|
| 108 |
+
self.l_clg_grd.setObjectName("l_clg_grd")
|
| 109 |
+
self.gridLayout.addWidget(self.l_clg_grd, 5, 0, 1, 3)
|
| 110 |
+
self.le_clb_grd = QtWidgets.QLineEdit(self.centralwidget)
|
| 111 |
+
self.le_clb_grd.setEnabled(False)
|
| 112 |
+
font = QtGui.QFont()
|
| 113 |
+
font.setFamily("Times New Roman")
|
| 114 |
+
self.le_clb_grd.setFont(font)
|
| 115 |
+
self.le_clb_grd.setObjectName("le_clb_grd")
|
| 116 |
+
self.gridLayout.addWidget(self.le_clb_grd, 5, 3, 1, 3)
|
| 117 |
+
self.chb_smp = QtWidgets.QCheckBox(self.centralwidget)
|
| 118 |
+
font = QtGui.QFont()
|
| 119 |
+
font.setFamily("Times New Roman")
|
| 120 |
+
font.setPointSize(10)
|
| 121 |
+
self.chb_smp.setFont(font)
|
| 122 |
+
self.chb_smp.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 123 |
+
self.chb_smp.setObjectName("chb_smp")
|
| 124 |
+
self.gridLayout.addWidget(self.chb_smp, 6, 0, 1, 2)
|
| 125 |
+
self.chb_tst = QtWidgets.QCheckBox(self.centralwidget)
|
| 126 |
+
font = QtGui.QFont()
|
| 127 |
+
font.setFamily("Times New Roman")
|
| 128 |
+
font.setPointSize(10)
|
| 129 |
+
self.chb_tst.setFont(font)
|
| 130 |
+
self.chb_tst.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 131 |
+
self.chb_tst.setObjectName("chb_tst")
|
| 132 |
+
self.gridLayout.addWidget(self.chb_tst, 6, 6, 1, 3)
|
| 133 |
+
self.chb_blink = QtWidgets.QCheckBox(self.centralwidget)
|
| 134 |
+
font = QtGui.QFont()
|
| 135 |
+
font.setFamily("Times New Roman")
|
| 136 |
+
font.setPointSize(10)
|
| 137 |
+
self.chb_blink.setFont(font)
|
| 138 |
+
self.chb_blink.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 139 |
+
self.chb_blink.setObjectName("chb_blink")
|
| 140 |
+
self.gridLayout.addWidget(self.chb_blink, 7, 0, 1, 5)
|
| 141 |
+
self.l_blink = QtWidgets.QLabel(self.centralwidget)
|
| 142 |
+
font = QtGui.QFont()
|
| 143 |
+
font.setFamily("Times New Roman")
|
| 144 |
+
font.setPointSize(10)
|
| 145 |
+
self.l_blink.setFont(font)
|
| 146 |
+
self.l_blink.setObjectName("l_blink")
|
| 147 |
+
self.gridLayout.addWidget(self.l_blink, 7, 6, 1, 3)
|
| 148 |
+
self.le_blink = QtWidgets.QLineEdit(self.centralwidget)
|
| 149 |
+
self.le_blink.setEnabled(False)
|
| 150 |
+
font = QtGui.QFont()
|
| 151 |
+
font.setFamily("Times New Roman")
|
| 152 |
+
self.le_blink.setFont(font)
|
| 153 |
+
self.le_blink.setObjectName("le_blink")
|
| 154 |
+
self.gridLayout.addWidget(self.le_blink, 7, 9, 1, 1)
|
| 155 |
+
self.chb_tune_mdl = QtWidgets.QCheckBox(self.centralwidget)
|
| 156 |
+
font = QtGui.QFont()
|
| 157 |
+
font.setFamily("Times New Roman")
|
| 158 |
+
font.setPointSize(10)
|
| 159 |
+
self.chb_tune_mdl.setFont(font)
|
| 160 |
+
self.chb_tune_mdl.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 161 |
+
self.chb_tune_mdl.setObjectName("chb_tune_mdl")
|
| 162 |
+
self.gridLayout.addWidget(self.chb_tune_mdl, 8, 0, 1, 5)
|
| 163 |
+
self.l_shift = QtWidgets.QLabel(self.centralwidget)
|
| 164 |
+
font = QtGui.QFont()
|
| 165 |
+
font.setFamily("Times New Roman")
|
| 166 |
+
font.setPointSize(10)
|
| 167 |
+
self.l_shift.setFont(font)
|
| 168 |
+
self.l_shift.setObjectName("l_shift")
|
| 169 |
+
self.gridLayout.addWidget(self.l_shift, 8, 6, 1, 1)
|
| 170 |
+
self.le_shift = QtWidgets.QLineEdit(self.centralwidget)
|
| 171 |
+
font = QtGui.QFont()
|
| 172 |
+
font.setFamily("Times New Roman")
|
| 173 |
+
self.le_shift.setFont(font)
|
| 174 |
+
self.le_shift.setObjectName("le_shift")
|
| 175 |
+
self.gridLayout.addWidget(self.le_shift, 8, 7, 1, 3)
|
| 176 |
+
self.rb_smp = QtWidgets.QRadioButton(self.centralwidget)
|
| 177 |
+
self.rb_smp.setChecked(True)
|
| 178 |
+
font = QtGui.QFont()
|
| 179 |
+
font.setFamily("Times New Roman")
|
| 180 |
+
font.setPointSize(10)
|
| 181 |
+
self.rb_smp.setFont(font)
|
| 182 |
+
self.rb_smp.setObjectName("rb_smp")
|
| 183 |
+
self.gridLayout.addWidget(self.rb_smp, 9, 0, 1, 3)
|
| 184 |
+
self.rb_tst = QtWidgets.QRadioButton(self.centralwidget)
|
| 185 |
+
font = QtGui.QFont()
|
| 186 |
+
font.setFamily("Times New Roman")
|
| 187 |
+
font.setPointSize(10)
|
| 188 |
+
self.rb_tst.setFont(font)
|
| 189 |
+
self.rb_tst.setObjectName("rb_tst")
|
| 190 |
+
self.gridLayout.addWidget(self.rb_tst, 9, 3, 1, 2)
|
| 191 |
+
self.chb_io = QtWidgets.QCheckBox(self.centralwidget)
|
| 192 |
+
font = QtGui.QFont()
|
| 193 |
+
font.setFamily("Times New Roman")
|
| 194 |
+
font.setPointSize(10)
|
| 195 |
+
self.chb_io.setFont(font)
|
| 196 |
+
self.chb_io.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 197 |
+
self.chb_io.setObjectName("chb_io")
|
| 198 |
+
self.gridLayout.addWidget(self.chb_io, 9, 6, 1, 4)
|
| 199 |
+
self.chb_pxl = QtWidgets.QCheckBox(self.centralwidget)
|
| 200 |
+
font = QtGui.QFont()
|
| 201 |
+
font.setFamily("Times New Roman")
|
| 202 |
+
font.setPointSize(10)
|
| 203 |
+
self.chb_pxl.setFont(font)
|
| 204 |
+
self.chb_pxl.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 205 |
+
self.chb_pxl.setObjectName("chb_pxl")
|
| 206 |
+
self.gridLayout.addWidget(self.chb_pxl, 10, 0, 1, 2)
|
| 207 |
+
self.chb_see_pxl = QtWidgets.QCheckBox(self.centralwidget)
|
| 208 |
+
font = QtGui.QFont()
|
| 209 |
+
font.setFamily("Times New Roman")
|
| 210 |
+
font.setPointSize(10)
|
| 211 |
+
self.chb_see_pxl.setFont(font)
|
| 212 |
+
self.chb_see_pxl.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 213 |
+
self.chb_see_pxl.setObjectName("chb_see_pxl")
|
| 214 |
+
self.gridLayout.addWidget(self.chb_see_pxl, 10, 3, 1, 2)
|
| 215 |
+
self.chb_fix = QtWidgets.QCheckBox(self.centralwidget)
|
| 216 |
+
font = QtGui.QFont()
|
| 217 |
+
font.setFamily("Times New Roman")
|
| 218 |
+
font.setPointSize(10)
|
| 219 |
+
self.chb_fix.setFont(font)
|
| 220 |
+
self.chb_fix.setLayoutDirection(QtCore.Qt.LeftToRight)
|
| 221 |
+
self.chb_fix.setObjectName("chb_fix")
|
| 222 |
+
self.gridLayout.addWidget(self.chb_fix, 10, 6, 1, 4)
|
| 223 |
+
self.l_st = QtWidgets.QLabel(self.centralwidget)
|
| 224 |
+
font = QtGui.QFont()
|
| 225 |
+
font.setFamily("Times New Roman")
|
| 226 |
+
font.setPointSize(10)
|
| 227 |
+
self.l_st.setFont(font)
|
| 228 |
+
self.l_st.setObjectName("l_st")
|
| 229 |
+
self.gridLayout.addWidget(self.l_st, 11, 0, 1, 1)
|
| 230 |
+
self.le_st = QtWidgets.QLineEdit(self.centralwidget)
|
| 231 |
+
self.le_st.setEnabled(False)
|
| 232 |
+
font = QtGui.QFont()
|
| 233 |
+
font.setFamily("Times New Roman")
|
| 234 |
+
font.setPointSize(8)
|
| 235 |
+
self.le_st.setFont(font)
|
| 236 |
+
self.le_st.setObjectName("le_st")
|
| 237 |
+
self.gridLayout.addWidget(self.le_st, 11, 1, 1, 2)
|
| 238 |
+
self.l_dft = QtWidgets.QLabel(self.centralwidget)
|
| 239 |
+
font = QtGui.QFont()
|
| 240 |
+
font.setFamily("Times New Roman")
|
| 241 |
+
font.setPointSize(10)
|
| 242 |
+
self.l_dft.setFont(font)
|
| 243 |
+
self.l_dft.setObjectName("l_dft")
|
| 244 |
+
self.gridLayout.addWidget(self.l_dft, 11, 3, 1, 1)
|
| 245 |
+
self.le_dft = QtWidgets.QLineEdit(self.centralwidget)
|
| 246 |
+
self.le_dft.setEnabled(False)
|
| 247 |
+
font = QtGui.QFont()
|
| 248 |
+
font.setFamily("Times New Roman")
|
| 249 |
+
font.setPointSize(8)
|
| 250 |
+
self.le_dft.setFont(font)
|
| 251 |
+
self.le_dft.setObjectName("le_dft")
|
| 252 |
+
self.gridLayout.addWidget(self.le_dft, 11, 4, 1, 1)
|
| 253 |
+
self.l_mfr = QtWidgets.QLabel(self.centralwidget)
|
| 254 |
+
font = QtGui.QFont()
|
| 255 |
+
font.setFamily("Times New Roman")
|
| 256 |
+
font.setPointSize(10)
|
| 257 |
+
self.l_mfr.setFont(font)
|
| 258 |
+
self.l_mfr.setObjectName("l_mfr")
|
| 259 |
+
self.gridLayout.addWidget(self.l_mfr, 11, 6, 1, 2)
|
| 260 |
+
self.le_mfr = QtWidgets.QLineEdit(self.centralwidget)
|
| 261 |
+
self.le_mfr.setEnabled(False)
|
| 262 |
+
font = QtGui.QFont()
|
| 263 |
+
font.setFamily("Times New Roman")
|
| 264 |
+
self.le_mfr.setFont(font)
|
| 265 |
+
self.le_mfr.setObjectName("le_mfr")
|
| 266 |
+
self.gridLayout.addWidget(self.le_mfr, 11, 8, 1, 2)
|
| 267 |
+
self.pb_start = QtWidgets.QPushButton(self.centralwidget)
|
| 268 |
+
font = QtGui.QFont()
|
| 269 |
+
font.setFamily("Times New Roman")
|
| 270 |
+
font.setPointSize(12)
|
| 271 |
+
self.pb_start.setFont(font)
|
| 272 |
+
self.pb_start.setObjectName("pb_start")
|
| 273 |
+
self.gridLayout.addWidget(self.pb_start, 12, 0, 1, 2)
|
| 274 |
+
self.l_monitor = QtWidgets.QLabel(self.centralwidget)
|
| 275 |
+
font = QtGui.QFont()
|
| 276 |
+
font.setFamily("Times New Roman")
|
| 277 |
+
font.setPointSize(9)
|
| 278 |
+
self.l_monitor.setFont(font)
|
| 279 |
+
self.l_monitor.setObjectName("l_monitor")
|
| 280 |
+
self.gridLayout.addWidget(self.l_monitor, 12, 2, 2, 3)
|
| 281 |
+
self.pb_stop = QtWidgets.QPushButton(self.centralwidget)
|
| 282 |
+
font = QtGui.QFont()
|
| 283 |
+
font.setFamily("Times New Roman")
|
| 284 |
+
font.setPointSize(12)
|
| 285 |
+
self.pb_stop.setFont(font)
|
| 286 |
+
self.pb_stop.setObjectName("pb_stop")
|
| 287 |
+
self.gridLayout.addWidget(self.pb_stop, 13, 0, 1, 2)
|
| 288 |
+
MainWindow.setCentralWidget(self.centralwidget)
|
| 289 |
+
self.menubar = QtWidgets.QMenuBar(MainWindow)
|
| 290 |
+
self.menubar.setGeometry(QtCore.QRect(0, 0, 362, 22))
|
| 291 |
+
self.menubar.setObjectName("menubar")
|
| 292 |
+
MainWindow.setMenuBar(self.menubar)
|
| 293 |
+
self.statusbar = QtWidgets.QStatusBar(MainWindow)
|
| 294 |
+
self.statusbar.setObjectName("statusbar")
|
| 295 |
+
MainWindow.setStatusBar(self.statusbar)
|
| 296 |
+
|
| 297 |
+
self.retranslateUi(MainWindow)
|
| 298 |
+
QtCore.QMetaObject.connectSlotsByName(MainWindow)
|
| 299 |
+
|
| 300 |
+
def retranslateUi(self, MainWindow):
|
| 301 |
+
_translate = QtCore.QCoreApplication.translate
|
| 302 |
+
MainWindow.setWindowTitle(_translate("MainWindow", "Owleye"))
|
| 303 |
+
self.l_num.setText(_translate("MainWindow", "Subject Number :"))
|
| 304 |
+
self.le_num.setText(_translate("MainWindow", "1"))
|
| 305 |
+
self.l_cam.setText(_translate("MainWindow", "Camera ID :"))
|
| 306 |
+
self.le_cam.setText(_translate("MainWindow", "0"))
|
| 307 |
+
self.chb_cam.setText(_translate("MainWindow", "Camera"))
|
| 308 |
+
self.chb_clb.setText(_translate("MainWindow", "Calibration"))
|
| 309 |
+
self.l_name.setText(_translate("MainWindow", "Subject Name :"))
|
| 310 |
+
self.le_name.setText(_translate("MainWindow", ""))
|
| 311 |
+
self.l_dcp.setText(_translate("MainWindow", "Descriptions :"))
|
| 312 |
+
self.te_dcp.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
|
| 313 |
+
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
|
| 314 |
+
"p, li { white-space: pre-wrap; }\n"
|
| 315 |
+
"</style></head><body style=\" font-family:\'Times New Roman\'; font-size:8.1pt; font-weight:400; font-style:normal;\">\n"
|
| 316 |
+
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'MS Shell Dlg 2\';\"></span></p></body></html>"))
|
| 317 |
+
self.l_clg_grd.setText(_translate("MainWindow", "Calibration Grid :"))
|
| 318 |
+
self.le_clb_grd.setText(_translate("MainWindow", "4, 200, 6, 100"))
|
| 319 |
+
self.chb_smp.setText(_translate("MainWindow", "Sampling"))
|
| 320 |
+
self.chb_tst.setText(_translate("MainWindow", "Testing"))
|
| 321 |
+
self.chb_blink.setText(_translate("MainWindow", "Tune Blinking Threshold"))
|
| 322 |
+
self.l_blink.setText(_translate("MainWindow", "Threshold :"))
|
| 323 |
+
self.le_blink.setText(_translate("MainWindow", "4.5"))
|
| 324 |
+
self.chb_tune_mdl.setText(_translate("MainWindow", "Tune Eye Tracking Model"))
|
| 325 |
+
self.l_shift.setText(_translate("MainWindow", "SS :"))
|
| 326 |
+
self.le_shift.setText(_translate("MainWindow", "0"))
|
| 327 |
+
self.rb_smp.setText(_translate("MainWindow", "Sampling data"))
|
| 328 |
+
self.rb_tst.setText(_translate("MainWindow", "Test data"))
|
| 329 |
+
self.chb_io.setText(_translate("MainWindow", "Use IO Model"))
|
| 330 |
+
self.chb_pxl.setText(_translate("MainWindow", "Get Pixels"))
|
| 331 |
+
self.chb_see_pxl.setText(_translate("MainWindow", "See Pixels"))
|
| 332 |
+
self.chb_fix.setText(_translate("MainWindow", "Get Fixations"))
|
| 333 |
+
self.l_st.setText(_translate("MainWindow", "ST :"))
|
| 334 |
+
self.le_st.setText(_translate("MainWindow", "2.5"))
|
| 335 |
+
self.l_dft.setText(_translate("MainWindow", "DFT :"))
|
| 336 |
+
self.le_dft.setText(_translate("MainWindow", "0.3"))
|
| 337 |
+
self.l_mfr.setText(_translate("MainWindow", "MFR :"))
|
| 338 |
+
self.le_mfr.setText(_translate("MainWindow", "0.125, 0.165"))
|
| 339 |
+
self.pb_start.setText(_translate("MainWindow", "Start"))
|
| 340 |
+
self.l_monitor.setText(_translate("MainWindow", "Not Running..."))
|
| 341 |
+
self.pb_stop.setText(_translate("MainWindow", "Stop"))
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def do(self):
|
| 345 |
+
self.pb_start.clicked.connect(self.b_start_action)
|
| 346 |
+
self.pb_start.clicked.connect(lambda: self.pb_start.setEnabled(False))
|
| 347 |
+
self.pb_stop.clicked.connect(self.b_stop_action)
|
| 348 |
+
self.chb_clb.clicked.connect(self.clb_uncheck)
|
| 349 |
+
self.chb_blink.clicked.connect(self.blink_uncheck)
|
| 350 |
+
self.rb_smp.clicked.connect(self.smp_uncheck)
|
| 351 |
+
self.rb_tst.clicked.connect(self.tst_uncheck)
|
| 352 |
+
self.chb_fix.clicked.connect(self.fix_uncheck)
|
| 353 |
+
|
| 354 |
+
def b_start_action(self):
|
| 355 |
+
# # After it's activated, the algorithm receives user's data and start to do all the needed actions
|
| 356 |
+
self.num = int(self.le_num.text())
|
| 357 |
+
self.cam_id = int(self.le_cam.text())
|
| 358 |
+
self.name = self.le_name.text()
|
| 359 |
+
self.dcp = self.te_dcp.toPlainText()
|
| 360 |
+
|
| 361 |
+
clb_grid_txt = self.le_clb_grd.text()
|
| 362 |
+
clb_grid_txt = " " + clb_grid_txt + " "
|
| 363 |
+
sep = []
|
| 364 |
+
sep.append(0)
|
| 365 |
+
for pos, s in enumerate(clb_grid_txt):
|
| 366 |
+
if s == ',':
|
| 367 |
+
sep.append(pos)
|
| 368 |
+
|
| 369 |
+
sep.append(len(clb_grid_txt)-1)
|
| 370 |
+
grid_len = len(sep)
|
| 371 |
+
self.clb_grid = []
|
| 372 |
+
for i in range(grid_len-1):
|
| 373 |
+
self.clb_grid.append(int(clb_grid_txt[sep[i]+1:sep[i+1]]))
|
| 374 |
+
|
| 375 |
+
self.thb = float(self.le_blink.text())
|
| 376 |
+
self.ss = int(self.le_shift.text())
|
| 377 |
+
self.st = float(self.le_st.text())
|
| 378 |
+
self.dft = float(self.le_dft.text())
|
| 379 |
+
mfr = self.le_mfr.text()
|
| 380 |
+
for (i, char) in enumerate(mfr):
|
| 381 |
+
if char == ",":
|
| 382 |
+
break
|
| 383 |
+
self.mfr = float(mfr[:i]), float(mfr[i+1:])
|
| 384 |
+
|
| 385 |
+
self.worker = Worker()
|
| 386 |
+
""" Worker is created for gaining the ability of multithreading. Unless, you couldn't stop the program" while it is running """
|
| 387 |
+
|
| 388 |
+
# # Giving the data that the user entered, to the program.
|
| 389 |
+
self.worker.num = self.num
|
| 390 |
+
self.worker.camera_id = self.cam_id
|
| 391 |
+
self.worker.info = (self.name, self.dcp)
|
| 392 |
+
self.worker.clb_grid = self.clb_grid
|
| 393 |
+
self.worker.thb = self.thb
|
| 394 |
+
self.worker.ss = self.ss
|
| 395 |
+
self.worker.st = self.st
|
| 396 |
+
self.worker.dft = self.dft
|
| 397 |
+
self.worker.mfr = self.mfr
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
if self.chb_cam.checkState() == 2:
|
| 401 |
+
self.worker.cam = True
|
| 402 |
+
if self.chb_clb.checkState() == 2:
|
| 403 |
+
self.worker.clb = True
|
| 404 |
+
if self.chb_smp.checkState() == 2:
|
| 405 |
+
self.worker.smp = True
|
| 406 |
+
if self.chb_tst.checkState() == 2:
|
| 407 |
+
self.worker.acc = True
|
| 408 |
+
if self.chb_blink.checkState() == 2:
|
| 409 |
+
self.worker.tbt = True
|
| 410 |
+
if self.chb_tune_mdl.checkState() == 2:
|
| 411 |
+
self.worker.mdl = True
|
| 412 |
+
if self.chb_io.checkState() == 2:
|
| 413 |
+
self.worker.uio = True
|
| 414 |
+
if (self.chb_pxl.checkState() == 2) and self.rb_smp.isChecked():
|
| 415 |
+
self.worker.gps = True
|
| 416 |
+
if (self.chb_pxl.checkState() == 2) and self.rb_tst.isChecked():
|
| 417 |
+
self.worker.gpa = True
|
| 418 |
+
if (self.chb_see_pxl.checkState() == 2) and self.rb_smp.isChecked():
|
| 419 |
+
self.worker.sps = True
|
| 420 |
+
if (self.chb_see_pxl.checkState() == 2) and self.rb_tst.isChecked():
|
| 421 |
+
self.worker.spa = True
|
| 422 |
+
if self.chb_fix.checkState() == 2:
|
| 423 |
+
self.worker.gfx = True
|
| 424 |
+
|
| 425 |
+
self.thread = QThread()
|
| 426 |
+
self.worker.moveToThread(self.thread)
|
| 427 |
+
|
| 428 |
+
self.thread.started.connect(self.worker.do_work)
|
| 429 |
+
|
| 430 |
+
self.thread.start()
|
| 431 |
+
|
| 432 |
+
self.worker.finished.connect(self.thread.quit)
|
| 433 |
+
self.worker.cam_started.connect(lambda: self.monitor("Camera"))
|
| 434 |
+
self.worker.clb_started.connect(lambda: self.monitor("Calibration"))
|
| 435 |
+
self.worker.smp_started.connect(lambda: self.monitor("Sampling"))
|
| 436 |
+
self.worker.acc_started.connect(lambda: self.monitor("Testing"))
|
| 437 |
+
self.worker.tbt_started.connect(lambda: self.monitor("Seeing Blinking"))
|
| 438 |
+
self.worker.mdl_started.connect(lambda: self.monitor("Tuning params"))
|
| 439 |
+
self.worker.gps_started.connect(lambda: self.monitor("Getting sampling pixels"))
|
| 440 |
+
self.worker.gpa_started.connect(lambda: self.monitor("Getting test pixels"))
|
| 441 |
+
self.worker.sps_started.connect(lambda: self.monitor("Seeing sampling pixels"))
|
| 442 |
+
self.worker.spa_started.connect(lambda: self.monitor("Seeing test pixels"))
|
| 443 |
+
self.worker.gfx_started.connect(lambda: self.monitor("Getting fixations"))
|
| 444 |
+
|
| 445 |
+
self.worker.finished.connect(lambda: self.monitor("Eye Tracking finished!"))
|
| 446 |
+
self.worker.finished.connect(lambda: self.pb_start.setEnabled(True))
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
def b_stop_action(self):
|
| 450 |
+
self.worker.running = False
|
| 451 |
+
|
| 452 |
+
def monitor(self, txt):
|
| 453 |
+
self.l_monitor.setText(txt)
|
| 454 |
+
|
| 455 |
+
def clb_uncheck(self):
|
| 456 |
+
if self.chb_clb.checkState() == 2:
|
| 457 |
+
self.le_name.setEnabled(True)
|
| 458 |
+
self.te_dcp.setEnabled(True)
|
| 459 |
+
self.le_clb_grd.setEnabled(True)
|
| 460 |
+
else:
|
| 461 |
+
self.le_name.setEnabled(False)
|
| 462 |
+
self.te_dcp.setEnabled(False)
|
| 463 |
+
self.le_clb_grd.setEnabled(False)
|
| 464 |
+
|
| 465 |
+
def blink_uncheck(self):
|
| 466 |
+
if self.chb_blink.checkState() == 2:
|
| 467 |
+
self.le_blink.setEnabled(True)
|
| 468 |
+
else:
|
| 469 |
+
self.le_blink.setEnabled(False)
|
| 470 |
+
|
| 471 |
+
def smp_uncheck(self):
|
| 472 |
+
if self.rb_smp.isChecked():
|
| 473 |
+
self.chb_fix.setEnabled(True)
|
| 474 |
+
self.chb_io.setEnabled(True)
|
| 475 |
+
else:
|
| 476 |
+
self.chb_fix.setEnabled(False)
|
| 477 |
+
self.chb_io.setEnabled(False)
|
| 478 |
+
|
| 479 |
+
if self.rb_smp.isChecked() and (self.chb_fix.checkState() == 2):
|
| 480 |
+
self.le_st.setEnabled(True)
|
| 481 |
+
self.le_dft.setEnabled(True)
|
| 482 |
+
self.le_mfr.setEnabled(True)
|
| 483 |
+
|
| 484 |
+
def tst_uncheck(self):
|
| 485 |
+
if self.rb_tst.isChecked():
|
| 486 |
+
self.chb_fix.setEnabled(False)
|
| 487 |
+
self.chb_io.setEnabled(False)
|
| 488 |
+
self.le_st.setEnabled(False)
|
| 489 |
+
self.le_dft.setEnabled(False)
|
| 490 |
+
self.le_mfr.setEnabled(False)
|
| 491 |
+
else:
|
| 492 |
+
self.chb_fix.setEnabled(True)
|
| 493 |
+
self.chb_io.setEnabled(True)
|
| 494 |
+
|
| 495 |
+
def fix_uncheck(self):
|
| 496 |
+
if self.rb_smp.isChecked() and (self.chb_fix.checkState() == 2):
|
| 497 |
+
self.le_st.setEnabled(True)
|
| 498 |
+
self.le_dft.setEnabled(True)
|
| 499 |
+
self.le_mfr.setEnabled(True)
|
| 500 |
+
else:
|
| 501 |
+
self.le_st.setEnabled(False)
|
| 502 |
+
self.le_dft.setEnabled(False)
|
| 503 |
+
self.le_mfr.setEnabled(False)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
if __name__ == "__main__":
|
| 507 |
+
import sys
|
| 508 |
+
app = QtWidgets.QApplication(sys.argv)
|
| 509 |
+
MainWindow = QtWidgets.QMainWindow()
|
| 510 |
+
ui = Ui_MainWindow()
|
| 511 |
+
ui.setupUi(MainWindow)
|
| 512 |
+
ui.do()
|
| 513 |
+
MainWindow.show()
|
| 514 |
+
sys.exit(app.exec_())
|
requirements.txt
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
absl-py==2.0.0
|
| 2 |
+
altgraph==0.17.4
|
| 3 |
+
astunparse==1.6.3
|
| 4 |
+
attrs==23.1.0
|
| 5 |
+
cachetools==5.3.1
|
| 6 |
+
certifi==2023.7.22
|
| 7 |
+
cffi==1.16.0
|
| 8 |
+
charset-normalizer==3.3.0
|
| 9 |
+
contourpy==1.1.1
|
| 10 |
+
cycler==0.12.1
|
| 11 |
+
dill==0.3.8
|
| 12 |
+
et-xmlfile==1.1.0
|
| 13 |
+
flatbuffers==23.5.26
|
| 14 |
+
fonttools==4.43.1
|
| 15 |
+
gast==0.5.4
|
| 16 |
+
google-auth==2.23.2
|
| 17 |
+
google-auth-oauthlib==1.0.0
|
| 18 |
+
google-pasta==0.2.0
|
| 19 |
+
grpcio==1.59.0
|
| 20 |
+
h5py==3.9.0
|
| 21 |
+
idna==3.4
|
| 22 |
+
importlib-metadata==6.8.0
|
| 23 |
+
importlib-resources==6.1.0
|
| 24 |
+
joblib==1.3.2
|
| 25 |
+
keras==2.14.0
|
| 26 |
+
kiwisolver==1.4.5
|
| 27 |
+
libclang==16.0.6
|
| 28 |
+
Markdown==3.5
|
| 29 |
+
MarkupSafe==2.1.3
|
| 30 |
+
matplotlib==3.8.0
|
| 31 |
+
mediapipe==0.10.5
|
| 32 |
+
ml-dtypes==0.2.0
|
| 33 |
+
numpy==1.26.0
|
| 34 |
+
oauthlib==3.2.2
|
| 35 |
+
opencv-contrib-python==4.8.1.78
|
| 36 |
+
opencv-python==4.8.1.78
|
| 37 |
+
openpyxl==3.1.2
|
| 38 |
+
opt-einsum==3.3.0
|
| 39 |
+
packaging==23.2
|
| 40 |
+
pefile==2023.2.7
|
| 41 |
+
Pillow==10.0.1
|
| 42 |
+
protobuf==3.20.3
|
| 43 |
+
pyasn1==0.5.0
|
| 44 |
+
pyasn1-modules==0.3.0
|
| 45 |
+
pycparser==2.21
|
| 46 |
+
pyinstaller==6.5.0
|
| 47 |
+
pyinstaller-hooks-contrib==2024.3
|
| 48 |
+
pyparsing==3.1.1
|
| 49 |
+
PyQt5==5.15.9
|
| 50 |
+
PyQt5-Qt5==5.15.2
|
| 51 |
+
PyQt5-sip==12.12.2
|
| 52 |
+
python-dateutil==2.8.2
|
| 53 |
+
pywin32-ctypes==0.2.2
|
| 54 |
+
requests==2.31.0
|
| 55 |
+
requests-oauthlib==1.3.1
|
| 56 |
+
rsa==4.9
|
| 57 |
+
scikit-learn==1.3.1
|
| 58 |
+
scipy==1.11.3
|
| 59 |
+
screeninfo==0.8.1
|
| 60 |
+
six==1.16.0
|
| 61 |
+
sounddevice==0.4.6
|
| 62 |
+
tensorboard==2.14.1
|
| 63 |
+
tensorboard-data-server==0.7.1
|
| 64 |
+
tensorflow==2.14.0
|
| 65 |
+
tensorflow-estimator==2.14.0
|
| 66 |
+
tensorflow-intel==2.14.0
|
| 67 |
+
tensorflow-io-gcs-filesystem==0.31.0
|
| 68 |
+
termcolor==2.3.0
|
| 69 |
+
threadpoolctl==3.2.0
|
| 70 |
+
typing_extensions==4.8.0
|
| 71 |
+
urllib3==2.0.6
|
| 72 |
+
Werkzeug==3.0.0
|
| 73 |
+
wrapt==1.14.1
|
| 74 |
+
zipp==3.17.0
|