Spaces:
Runtime error
Runtime error
Upload 136 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +70 -0
- .gitignore +2 -0
- README.md +107 -10
- ball_tracking_train.py +9 -0
- data.yaml +6 -0
- images/predicting_ball_path.png +3 -0
- modelSave.py +7 -0
- predict.py +293 -0
- requirements.txt +0 -0
- runs/detect/train/F1_curve.png +3 -0
- runs/detect/train/PR_curve.png +0 -0
- runs/detect/train/P_curve.png +0 -0
- runs/detect/train/R_curve.png +3 -0
- runs/detect/train/args.yaml +105 -0
- runs/detect/train/confusion_matrix.png +0 -0
- runs/detect/train/confusion_matrix_normalized.png +0 -0
- runs/detect/train/events.out.tfevents.1710619176.LAPTOP-02FVE3SQ.27180.0 +3 -0
- runs/detect/train/labels.jpg +3 -0
- runs/detect/train/labels_correlogram.jpg +3 -0
- runs/detect/train/results.csv +31 -0
- runs/detect/train/results.png +3 -0
- runs/detect/train/train_batch0.jpg +3 -0
- runs/detect/train/train_batch1.jpg +3 -0
- runs/detect/train/train_batch2.jpg +3 -0
- runs/detect/train/train_batch680.jpg +3 -0
- runs/detect/train/train_batch681.jpg +3 -0
- runs/detect/train/train_batch682.jpg +3 -0
- runs/detect/train/val_batch0_labels.jpg +3 -0
- runs/detect/train/val_batch0_pred.jpg +3 -0
- runs/detect/train/val_batch1_labels.jpg +3 -0
- runs/detect/train/val_batch1_pred.jpg +3 -0
- runs/detect/train/weights/best.pt +3 -0
- runs/detect/train/weights/last.pt +3 -0
- runs/detect/train2/F1_curve.png +0 -0
- runs/detect/train2/PR_curve.png +0 -0
- runs/detect/train2/P_curve.png +0 -0
- runs/detect/train2/R_curve.png +0 -0
- runs/detect/train2/args.yaml +105 -0
- runs/detect/train2/confusion_matrix.png +0 -0
- runs/detect/train2/confusion_matrix_normalized.png +0 -0
- runs/detect/train2/events.out.tfevents.1710659328.LAPTOP-02FVE3SQ.23948.0 +3 -0
- runs/detect/train2/labels.jpg +3 -0
- runs/detect/train2/labels_correlogram.jpg +3 -0
- runs/detect/train2/results.csv +41 -0
- runs/detect/train2/results.png +3 -0
- runs/detect/train2/train_batch0.jpg +3 -0
- runs/detect/train2/train_batch1.jpg +3 -0
- runs/detect/train2/train_batch2.jpg +3 -0
- runs/detect/train2/train_batch2040.jpg +3 -0
- runs/detect/train2/train_batch2041.jpg +3 -0
.gitattributes
CHANGED
|
@@ -36,3 +36,73 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 36 |
6773201.jpg filter=lfs diff=lfs merge=lfs -text
|
| 37 |
static/6773201.jpg filter=lfs diff=lfs merge=lfs -text
|
| 38 |
test2.mp4 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
6773201.jpg filter=lfs diff=lfs merge=lfs -text
|
| 37 |
static/6773201.jpg filter=lfs diff=lfs merge=lfs -text
|
| 38 |
test2.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
images/predicting_ball_path.png filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
runs/detect/train/F1_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
runs/detect/train/labels_correlogram.jpg filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
runs/detect/train/labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
runs/detect/train/R_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
runs/detect/train/results.png filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
runs/detect/train/train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
runs/detect/train/train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
runs/detect/train/train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
runs/detect/train/train_batch680.jpg filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
runs/detect/train/train_batch681.jpg filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
runs/detect/train/train_batch682.jpg filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
runs/detect/train/val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
runs/detect/train/val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
runs/detect/train/val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 54 |
+
runs/detect/train/val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
runs/detect/train2/labels_correlogram.jpg filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
runs/detect/train2/labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
runs/detect/train2/results.png filter=lfs diff=lfs merge=lfs -text
|
| 58 |
+
runs/detect/train2/train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 59 |
+
runs/detect/train2/train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
runs/detect/train2/train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
runs/detect/train2/train_batch2040.jpg filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
runs/detect/train2/train_batch2041.jpg filter=lfs diff=lfs merge=lfs -text
|
| 63 |
+
runs/detect/train2/train_batch2042.jpg filter=lfs diff=lfs merge=lfs -text
|
| 64 |
+
runs/detect/train2/val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 65 |
+
runs/detect/train2/val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 66 |
+
runs/detect/train2/val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 67 |
+
runs/detect/train2/val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 68 |
+
runs/detect/train3/labels_correlogram.jpg filter=lfs diff=lfs merge=lfs -text
|
| 69 |
+
runs/detect/train3/labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 70 |
+
runs/detect/train3/results.png filter=lfs diff=lfs merge=lfs -text
|
| 71 |
+
runs/detect/train3/train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 72 |
+
runs/detect/train3/train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 73 |
+
runs/detect/train3/train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 74 |
+
runs/detect/train3/train_batch4080.jpg filter=lfs diff=lfs merge=lfs -text
|
| 75 |
+
runs/detect/train3/train_batch4081.jpg filter=lfs diff=lfs merge=lfs -text
|
| 76 |
+
runs/detect/train3/train_batch4082.jpg filter=lfs diff=lfs merge=lfs -text
|
| 77 |
+
runs/detect/train3/val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 78 |
+
runs/detect/train3/val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 79 |
+
runs/detect/train3/val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 80 |
+
runs/detect/train3/val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 81 |
+
runs/detect/train4/labels_correlogram.jpg filter=lfs diff=lfs merge=lfs -text
|
| 82 |
+
runs/detect/train4/labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 83 |
+
runs/detect/train4/results.png filter=lfs diff=lfs merge=lfs -text
|
| 84 |
+
runs/detect/train4/train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 85 |
+
runs/detect/train4/train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 86 |
+
runs/detect/train4/train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 87 |
+
runs/detect/train4/train_batch7140.jpg filter=lfs diff=lfs merge=lfs -text
|
| 88 |
+
runs/detect/train4/train_batch7141.jpg filter=lfs diff=lfs merge=lfs -text
|
| 89 |
+
runs/detect/train4/train_batch7142.jpg filter=lfs diff=lfs merge=lfs -text
|
| 90 |
+
runs/detect/train4/val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 91 |
+
runs/detect/train4/val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 92 |
+
runs/detect/train4/val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 93 |
+
runs/detect/train4/val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 94 |
+
runs/detect/train5/labels_correlogram.jpg filter=lfs diff=lfs merge=lfs -text
|
| 95 |
+
runs/detect/train5/labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 96 |
+
runs/detect/train5/results.png filter=lfs diff=lfs merge=lfs -text
|
| 97 |
+
runs/detect/train5/train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 98 |
+
runs/detect/train5/train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 99 |
+
runs/detect/train5/train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 100 |
+
runs/detect/train5/train_batch9180.jpg filter=lfs diff=lfs merge=lfs -text
|
| 101 |
+
runs/detect/train5/train_batch9181.jpg filter=lfs diff=lfs merge=lfs -text
|
| 102 |
+
runs/detect/train5/train_batch9182.jpg filter=lfs diff=lfs merge=lfs -text
|
| 103 |
+
runs/detect/train5/val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 104 |
+
runs/detect/train5/val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 105 |
+
runs/detect/train5/val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 106 |
+
runs/detect/train5/val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 107 |
+
videos/test.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 108 |
+
videos/test1.mp4 filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cricket_ball_data
|
| 2 |
+
myenv
|
README.md
CHANGED
|
@@ -1,14 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
---
|
| 11 |
|
| 12 |
-
|
|
|
|
| 13 |
|
| 14 |
-
|
|
|
|
| 1 |
+
# Cricket Ball Trajectory Prediction🏏
|
| 2 |
+
Revolutionizing Cricket Analytics: Predicting Ball Trajectories with Precision and Innovation
|
| 3 |
+
## Overview
|
| 4 |
+
This project aims to predict the trajectory of a cricket ball in videos using advanced object detection and tracking techniques powered by YOLOv8. The system extracts frames from cricket videos, detects the cricket ball, and predicts its future trajectory, providing detailed insights into its motion.
|
| 5 |
+
|
| 6 |
+
---
|
| 7 |
+
|
| 8 |
+
## Features
|
| 9 |
+
- **Cricket Ball Detection**: Utilizes YOLOv8 for precise detection of cricket balls in diverse scenarios.
|
| 10 |
+
- **Trajectory Prediction**: Predicts the future positions of the ball based on its current trajectory.
|
| 11 |
+
- **Angle Calculation**: Calculates the angle of the ball's motion, identifying key events like bounces.
|
| 12 |
+
- **Dataset Creation**: Custom dataset with 1778 annotated images created from YouTube videos for robust model training.
|
| 13 |
+
- **Real-time Tracking**: Processes live or pre-recorded cricket videos for instant ball tracking.
|
| 14 |
+
|
| 15 |
+
---
|
| 16 |
+
|
| 17 |
+
## Directory Structure
|
| 18 |
+
```
|
| 19 |
+
└── kushagra3204-Cricket-Ball-Trajectory-Prediction/
|
| 20 |
+
├── runs/
|
| 21 |
+
│ └── detect/ # YOLOv8 training outputs
|
| 22 |
+
├── youtube_video_image_extractor.py # Extract frames from YouTube videos
|
| 23 |
+
├── yolov8s.pt # YOLOv8 small model
|
| 24 |
+
├── predict.py # Ball detection and trajectory prediction
|
| 25 |
+
├── modelSave.py # Save trained model to ONNX format
|
| 26 |
+
├── yolov8m.pt # YOLOv8 medium model
|
| 27 |
+
├── videos/ # Directory to store test videos
|
| 28 |
+
├── requirements.txt # Python dependencies
|
| 29 |
+
├── yolov8l.pt # YOLOv8 large model
|
| 30 |
+
├── ball_tracking_train.py # YOLOv8 training script
|
| 31 |
+
├── README.md # Project documentation
|
| 32 |
+
├── yolov8n.pt # YOLOv8 nano model
|
| 33 |
+
└── data.yaml # Dataset configuration for YOLOv8
|
| 34 |
+
```
|
| 35 |
+
|
| 36 |
---
|
| 37 |
+
|
| 38 |
+
## Installation
|
| 39 |
+
1. **Clone the Repository**
|
| 40 |
+
```bash
|
| 41 |
+
git clone https://github.com/kushagra3204/Cricket-Ball-Trajectory-Prediction.git
|
| 42 |
+
cd Cricket-Ball-Trajectory-Prediction
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
2. **Install Dependencies**
|
| 46 |
+
```bash
|
| 47 |
+
pip install -r requirements.txt
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
3. **Download Pre-trained Models**
|
| 51 |
+
Place the YOLOv8 models (`yolov8n.pt`, `yolov8s.pt`, etc.) in the project directory.
|
| 52 |
+
|
| 53 |
+
---
|
| 54 |
+
|
| 55 |
+
## Usage
|
| 56 |
+
### Extract Frames from Videos
|
| 57 |
+
Use `youtube_video_image_extractor.py` to extract frames from a YouTube video:
|
| 58 |
+
```bash
|
| 59 |
+
python youtube_video_image_extractor.py
|
| 60 |
+
```
|
| 61 |
+
### Train the Model
|
| 62 |
+
Train the YOLOv8 model with the custom dataset:
|
| 63 |
+
```bash
|
| 64 |
+
python ball_tracking_train.py
|
| 65 |
+
```
|
| 66 |
+
|
| 67 |
+
### Run Ball Detection and Prediction
|
| 68 |
+
Detect the ball and predict its trajectory:
|
| 69 |
+
```bash
|
| 70 |
+
python predict.py
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
### Save Model to ONNX Format
|
| 74 |
+
Export the trained model to ONNX:
|
| 75 |
+
```bash
|
| 76 |
+
python modelSave.py
|
| 77 |
+
```
|
| 78 |
+
|
| 79 |
+
---
|
| 80 |
+
|
| 81 |
+
## <a href="https://www.kaggle.com/datasets/kushagra3204/cricket-ball-dataset-for-yolo" target="_blank">Dataset</a>
|
| 82 |
+
```
|
| 83 |
+
https://www.kaggle.com/datasets/kushagra3204/cricket-ball-dataset-for-yolo
|
| 84 |
+
```
|
| 85 |
+
The dataset includes 1778 annotated images in YOLOv8 format, created by extracting frames from cricket videos using LabelImg.
|
| 86 |
+
|
| 87 |
+
### Key Features:
|
| 88 |
+
- **Diverse Conditions**: Includes images under various lighting and background conditions.
|
| 89 |
+
- **Real-world Scenarios**: Captures cricket balls in motion, both in gameplay and practice settings.
|
| 90 |
+
- **Multiple Ball States**: Covers new and worn cricket balls for comprehensive detection.
|
| 91 |
+
|
| 92 |
+
---
|
| 93 |
+
|
| 94 |
+
## Applications
|
| 95 |
+
- **Real-time Cricket Analysis**: Player performance analysis, ball trajectory tracking, and umpiring.
|
| 96 |
+
- **Broadcasting Enhancements**: Real-time overlays, highlights, and ball tracking.
|
| 97 |
+
- **Automated Summarization**: Key moment extraction for highlight reels.
|
| 98 |
+
|
| 99 |
+
---
|
| 100 |
+
|
| 101 |
+
## Images of the Working System
|
| 102 |
+
Below are some visual examples showcasing the system in action:
|
| 103 |
+
|
| 104 |
+

|
| 105 |
+
|
| 106 |
---
|
| 107 |
|
| 108 |
+
## Contributing
|
| 109 |
+
We warmly invite researchers, developers, and enthusiasts from around the world to join us in making this project even more impactful. Your unique skills and ideas can help elevate this work to new heights, revolutionizing the field of sports analytics and cricket ball trajectory prediction.
|
| 110 |
|
| 111 |
+
Whether you're contributing code, refining models, expanding datasets, or sharing feedback, your collaboration is invaluable in advancing this mission. Together, we can create a groundbreaking tool that enhances cricket analysis, assists players, and engages fans globally.
|
ball_tracking_train.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ultralytics import YOLO
|
| 2 |
+
import tensorflow as tf
|
| 3 |
+
|
| 4 |
+
gpus = tf.config.list_physical_devices('GPU')
|
| 5 |
+
for gpu in gpus:
|
| 6 |
+
tf.config.experimental.set_memory_growth(gpu,True)
|
| 7 |
+
|
| 8 |
+
model = YOLO("yolov8s.pt")
|
| 9 |
+
results = model.train(data="data.yaml", epochs=100)
|
data.yaml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
path: C:/Users/Kushagra Agarwal/Desktop/Image-Processing/Ball Tracking/cricket_ball_data
|
| 2 |
+
train: train/images
|
| 3 |
+
val: valid/images
|
| 4 |
+
|
| 5 |
+
names:
|
| 6 |
+
0: cricketBall
|
images/predicting_ball_path.png
ADDED
|
Git LFS Details
|
modelSave.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ultralytics import YOLO
|
| 2 |
+
import cv2
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
model_path = os.path.join('runs','detect','train3','weights','best.pt')
|
| 6 |
+
model = YOLO(model_path)
|
| 7 |
+
model.export(format='onnx')
|
predict.py
ADDED
|
@@ -0,0 +1,293 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import deque
|
| 2 |
+
from ultralytics import YOLO
|
| 3 |
+
import math
|
| 4 |
+
import time
|
| 5 |
+
import cv2
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
def angle_between_lines(m1, m2=1):
|
| 9 |
+
if m1 != -1/m2:
|
| 10 |
+
angle = math.degrees(math.atan(abs((m2 - m1) / (1 + m1 * m2))))
|
| 11 |
+
return angle
|
| 12 |
+
else:
|
| 13 |
+
return 90.0
|
| 14 |
+
|
| 15 |
+
class FixedSizeQueue:
|
| 16 |
+
def __init__(self, max_size):
|
| 17 |
+
self.queue = deque(maxlen=max_size)
|
| 18 |
+
|
| 19 |
+
def add(self, item):
|
| 20 |
+
self.queue.append(item)
|
| 21 |
+
|
| 22 |
+
def pop(self):
|
| 23 |
+
self.queue.popleft()
|
| 24 |
+
|
| 25 |
+
def clear(self):
|
| 26 |
+
self.queue.clear()
|
| 27 |
+
|
| 28 |
+
def get_queue(self):
|
| 29 |
+
return self.queue
|
| 30 |
+
|
| 31 |
+
def __len__(self):
|
| 32 |
+
return len(self.queue)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
model_path = os.path.join('runs','detect','train5','weights','best.pt')
|
| 36 |
+
model = YOLO(model_path)
|
| 37 |
+
|
| 38 |
+
video_path = os.path.join('videos','test1.mp4')
|
| 39 |
+
cap = cv2.VideoCapture(video_path)
|
| 40 |
+
ret = True
|
| 41 |
+
prevTime = 0
|
| 42 |
+
centroid_history = FixedSizeQueue(10)
|
| 43 |
+
start_time = time.time()
|
| 44 |
+
interval = 0.6
|
| 45 |
+
paused = False
|
| 46 |
+
angle = 0
|
| 47 |
+
prev_frame_time = 0
|
| 48 |
+
new_frame_time = 0
|
| 49 |
+
|
| 50 |
+
while ret:
|
| 51 |
+
ret, frame = cap.read()
|
| 52 |
+
if ret:
|
| 53 |
+
new_frame_time = time.time()
|
| 54 |
+
fps = 1/(new_frame_time-prev_frame_time)
|
| 55 |
+
prev_frame_time = new_frame_time
|
| 56 |
+
fps = int(fps)
|
| 57 |
+
fps = str(fps)
|
| 58 |
+
print(list(centroid_history.queue))
|
| 59 |
+
current_time = time.time()
|
| 60 |
+
if current_time - start_time >= interval and len(centroid_history)>0:
|
| 61 |
+
centroid_history.pop()
|
| 62 |
+
start_time = current_time
|
| 63 |
+
|
| 64 |
+
results = model.track(frame, persist=True,conf=0.35,verbose=False)
|
| 65 |
+
boxes = results[0].boxes
|
| 66 |
+
box = boxes.xyxy
|
| 67 |
+
rows,cols = box.shape
|
| 68 |
+
if len(box)!=0:
|
| 69 |
+
for i in range(rows):
|
| 70 |
+
x1,y1,x2,y2 = box[i]
|
| 71 |
+
x1,y1,x2,y2 = x1.item(),y1.item(),x2.item(),y2.item()
|
| 72 |
+
|
| 73 |
+
centroid_x = int((x1+x2)/2)
|
| 74 |
+
centroid_y = int((y1+y2)/2)
|
| 75 |
+
|
| 76 |
+
centroid_history.add((centroid_x, centroid_y))
|
| 77 |
+
cv2.circle(frame,(centroid_x, centroid_y),radius=3,color=(0,0,255),thickness=-1)
|
| 78 |
+
cv2.rectangle(frame,(int(x1),int(y1)),(int(x2),int(y2)),(0,0,255),2)
|
| 79 |
+
|
| 80 |
+
if len(centroid_history) > 1:
|
| 81 |
+
centroid_list = list(centroid_history.get_queue())
|
| 82 |
+
for i in range(1, len(centroid_history)):
|
| 83 |
+
# if math.sqrt(y_diff**2+x_diff**2)<7:
|
| 84 |
+
cv2.line(frame, centroid_history.get_queue()[i-1], centroid_history.get_queue()[i], (255, 0, 0), 4)
|
| 85 |
+
|
| 86 |
+
if len(centroid_history) > 1:
|
| 87 |
+
centroid_list = list(centroid_history.get_queue())
|
| 88 |
+
x_diff = centroid_list[-1][0] - centroid_list[-2][0]
|
| 89 |
+
y_diff = centroid_list[-1][1] - centroid_list[-2][1]
|
| 90 |
+
if(x_diff!=0):
|
| 91 |
+
m1 = y_diff/x_diff
|
| 92 |
+
if m1==1:
|
| 93 |
+
angle = 90
|
| 94 |
+
elif m1!=0:
|
| 95 |
+
angle = 90-angle_between_lines(m1)
|
| 96 |
+
if angle>=45:
|
| 97 |
+
print("ball bounced")
|
| 98 |
+
future_positions = [centroid_list[-1]]
|
| 99 |
+
for i in range(1, 5):
|
| 100 |
+
future_positions.append(
|
| 101 |
+
(
|
| 102 |
+
centroid_list[-1][0] + x_diff * i,
|
| 103 |
+
centroid_list[-1][1] + y_diff * i
|
| 104 |
+
)
|
| 105 |
+
)
|
| 106 |
+
print("Future Positions: ",future_positions)
|
| 107 |
+
for i in range(1,len(future_positions)):
|
| 108 |
+
cv2.line(frame, future_positions[i-1], future_positions[i], (0, 255, 0), 4)
|
| 109 |
+
cv2.circle(frame,future_positions[i],radius=3,color=(0,0,255),thickness=-1)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
text = "Angle: {:.2f} degrees".format(angle)
|
| 113 |
+
cv2.putText(frame,text,(20,20),cv2.FONT_HERSHEY_PLAIN,1,(255,0,0),2)
|
| 114 |
+
cv2.putText(frame, f'FPS: {fps}', (20, 50), cv2.FONT_HERSHEY_SIMPLEX , 1, (255, 0, 0), 2)
|
| 115 |
+
frame_resized = cv2.resize(frame, (1000, 600))
|
| 116 |
+
cv2.imshow('frame',frame_resized)
|
| 117 |
+
|
| 118 |
+
key = cv2.waitKey(1)
|
| 119 |
+
if key & 0xFF == ord('q'):
|
| 120 |
+
break
|
| 121 |
+
elif key & 0xFF == ord(' '):
|
| 122 |
+
paused = not paused
|
| 123 |
+
|
| 124 |
+
while paused:
|
| 125 |
+
key = cv2.waitKey(30) & 0xFF
|
| 126 |
+
if key == ord(' '):
|
| 127 |
+
paused = not paused
|
| 128 |
+
elif key == ord('q'):
|
| 129 |
+
break
|
| 130 |
+
cap.release()
|
| 131 |
+
cv2.destroyAllWindows()
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
# import numpy as np
|
| 137 |
+
# import cv2
|
| 138 |
+
# import time
|
| 139 |
+
# import os
|
| 140 |
+
# import math
|
| 141 |
+
# from collections import deque
|
| 142 |
+
# from ultralytics import YOLO
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
# def angle_between_lines(m1, m2=1):
|
| 146 |
+
# if m1 != -1/m2:
|
| 147 |
+
# angle = math.degrees(math.atan(abs((m2 - m1) / (1 + m1 * m2))))
|
| 148 |
+
# return angle
|
| 149 |
+
# else:
|
| 150 |
+
# return 90.0
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
# class FixedSizeQueue:
|
| 154 |
+
# def __init__(self, max_size):
|
| 155 |
+
# self.queue = deque(maxlen=max_size)
|
| 156 |
+
|
| 157 |
+
# def add(self, item):
|
| 158 |
+
# self.queue.append(item)
|
| 159 |
+
|
| 160 |
+
# def pop(self):
|
| 161 |
+
# self.queue.popleft()
|
| 162 |
+
|
| 163 |
+
# def clear(self):
|
| 164 |
+
# self.queue.clear()
|
| 165 |
+
|
| 166 |
+
# def get_queue(self):
|
| 167 |
+
# return self.queue
|
| 168 |
+
|
| 169 |
+
# def __len__(self):
|
| 170 |
+
# return len(self.queue)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
# model_path = os.path.join('runs', 'detect', 'train5', 'weights', 'best.pt')
|
| 174 |
+
# model = YOLO(model_path)
|
| 175 |
+
|
| 176 |
+
# video_path = os.path.join('videos', 'test1.mp4')
|
| 177 |
+
# cap = cv2.VideoCapture(video_path)
|
| 178 |
+
# ret = True
|
| 179 |
+
# prevTime = 0
|
| 180 |
+
# centroid_history = FixedSizeQueue(10)
|
| 181 |
+
# start_time = time.time()
|
| 182 |
+
# interval = 0.6
|
| 183 |
+
# paused = False
|
| 184 |
+
# angle = 0
|
| 185 |
+
# prev_frame_time = 0
|
| 186 |
+
# new_frame_time = 0
|
| 187 |
+
|
| 188 |
+
# # Smoothing function for lines (Bezier curve)
|
| 189 |
+
# def create_bezier_curve(points, smoothness=50):
|
| 190 |
+
# t = np.linspace(0, 1, smoothness)
|
| 191 |
+
# curve = []
|
| 192 |
+
# for i in range(smoothness):
|
| 193 |
+
# x = (1 - t[i]) ** 2 * points[0][0] + 2 * (1 - t[i]) * t[i] * points[1][0] + t[i] ** 2 * points[2][0]
|
| 194 |
+
# y = (1 - t[i]) ** 2 * points[0][1] + 2 * (1 - t[i]) * t[i] * points[1][1] + t[i] ** 2 * points[2][1]
|
| 195 |
+
# curve.append([int(x), int(y)])
|
| 196 |
+
# return np.array(curve, dtype=np.int32)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
# while ret:
|
| 200 |
+
# ret, frame = cap.read()
|
| 201 |
+
# if ret:
|
| 202 |
+
# new_frame_time = time.time()
|
| 203 |
+
# fps = 1/(new_frame_time-prev_frame_time)
|
| 204 |
+
# prev_frame_time = new_frame_time
|
| 205 |
+
# fps = int(fps)
|
| 206 |
+
# fps = str(fps)
|
| 207 |
+
|
| 208 |
+
# current_time = time.time()
|
| 209 |
+
# if current_time - start_time >= interval and len(centroid_history) > 0:
|
| 210 |
+
# centroid_history.pop()
|
| 211 |
+
# start_time = current_time
|
| 212 |
+
|
| 213 |
+
# results = model.track(frame, persist=True, conf=0.35, verbose=False)
|
| 214 |
+
# boxes = results[0].boxes
|
| 215 |
+
# box = boxes.xyxy
|
| 216 |
+
# rows, cols = box.shape
|
| 217 |
+
# if len(box) != 0:
|
| 218 |
+
# for i in range(rows):
|
| 219 |
+
# x1, y1, x2, y2 = box[i]
|
| 220 |
+
# x1, y1, x2, y2 = x1.item(), y1.item(), x2.item(), y2.item()
|
| 221 |
+
|
| 222 |
+
# centroid_x = int((x1 + x2) / 2)
|
| 223 |
+
# centroid_y = int((y1 + y2) / 2)
|
| 224 |
+
|
| 225 |
+
# centroid_history.add((centroid_x, centroid_y))
|
| 226 |
+
# cv2.circle(frame, (centroid_x, centroid_y), radius=3, color=(0, 0, 255), thickness=-1)
|
| 227 |
+
# cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (0, 0, 255), 2)
|
| 228 |
+
|
| 229 |
+
# # Smoothly connecting centroids using Bezier curve
|
| 230 |
+
# if len(centroid_history) > 2:
|
| 231 |
+
# centroid_list = list(centroid_history.get_queue())
|
| 232 |
+
# curve_points = []
|
| 233 |
+
# for i in range(1, len(centroid_history)):
|
| 234 |
+
# mid_point = (
|
| 235 |
+
# int((centroid_list[i - 1][0] + centroid_list[i][0]) / 2),
|
| 236 |
+
# int((centroid_list[i - 1][1] + centroid_list[i][1]) / 2)
|
| 237 |
+
# )
|
| 238 |
+
# curve_points.append(mid_point)
|
| 239 |
+
|
| 240 |
+
# bezier_curve = create_bezier_curve([centroid_list[0], curve_points[0], centroid_list[-1]])
|
| 241 |
+
# cv2.polylines(frame, [bezier_curve], isClosed=False, color=(255, 0, 0), thickness=3)
|
| 242 |
+
|
| 243 |
+
# # Calculate angle and future positions
|
| 244 |
+
# if len(centroid_history) > 1:
|
| 245 |
+
# centroid_list = list(centroid_history.get_queue())
|
| 246 |
+
# x_diff = centroid_list[-1][0] - centroid_list[-2][0]
|
| 247 |
+
# y_diff = centroid_list[-1][1] - centroid_list[-2][1]
|
| 248 |
+
|
| 249 |
+
# if x_diff != 0:
|
| 250 |
+
# m1 = y_diff / x_diff
|
| 251 |
+
# if m1 == 1:
|
| 252 |
+
# angle = 90
|
| 253 |
+
# elif m1 != 0:
|
| 254 |
+
# angle = 90 - angle_between_lines(m1)
|
| 255 |
+
|
| 256 |
+
# future_positions = [centroid_list[-1]]
|
| 257 |
+
# for i in range(1, 5):
|
| 258 |
+
# future_positions.append(
|
| 259 |
+
# (
|
| 260 |
+
# centroid_list[-1][0] + x_diff * i,
|
| 261 |
+
# centroid_list[-1][1] + y_diff * i
|
| 262 |
+
# )
|
| 263 |
+
# )
|
| 264 |
+
|
| 265 |
+
# # Smoothly connect future positions
|
| 266 |
+
# bezier_curve_future = create_bezier_curve([future_positions[0], future_positions[1], future_positions[-1]])
|
| 267 |
+
# cv2.polylines(frame, [bezier_curve_future], isClosed=False, color=(0, 255, 0), thickness=3)
|
| 268 |
+
|
| 269 |
+
# for pos in future_positions:
|
| 270 |
+
# cv2.circle(frame, pos, radius=3, color=(0, 0, 255), thickness=-1)
|
| 271 |
+
|
| 272 |
+
# text = "Angle: {:.2f} degrees".format(angle)
|
| 273 |
+
# cv2.putText(frame, text, (20, 20), cv2.FONT_HERSHEY_PLAIN, 1, (255, 0, 0), 2)
|
| 274 |
+
# cv2.putText(frame, f'FPS: {fps}', (20, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2)
|
| 275 |
+
|
| 276 |
+
# frame_resized = cv2.resize(frame, (1000, 600))
|
| 277 |
+
# cv2.imshow('frame', frame_resized)
|
| 278 |
+
|
| 279 |
+
# key = cv2.waitKey(1)
|
| 280 |
+
# if key & 0xFF == ord('q'):
|
| 281 |
+
# break
|
| 282 |
+
# elif key & 0xFF == ord(' '):
|
| 283 |
+
# paused = not paused
|
| 284 |
+
|
| 285 |
+
# while paused:
|
| 286 |
+
# key = cv2.waitKey(30) & 0xFF
|
| 287 |
+
# if key == ord(' '):
|
| 288 |
+
# paused = not paused
|
| 289 |
+
# elif key == ord('q'):
|
| 290 |
+
# break
|
| 291 |
+
|
| 292 |
+
# cap.release()
|
| 293 |
+
# cv2.destroyAllWindows()
|
requirements.txt
CHANGED
|
Binary files a/requirements.txt and b/requirements.txt differ
|
|
|
runs/detect/train/F1_curve.png
ADDED
|
Git LFS Details
|
runs/detect/train/PR_curve.png
ADDED
|
runs/detect/train/P_curve.png
ADDED
|
runs/detect/train/R_curve.png
ADDED
|
Git LFS Details
|
runs/detect/train/args.yaml
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
task: detect
|
| 2 |
+
mode: train
|
| 3 |
+
model: yolov8n.yaml
|
| 4 |
+
data: data.yaml
|
| 5 |
+
epochs: 30
|
| 6 |
+
time: null
|
| 7 |
+
patience: 100
|
| 8 |
+
batch: 16
|
| 9 |
+
imgsz: 640
|
| 10 |
+
save: true
|
| 11 |
+
save_period: -1
|
| 12 |
+
cache: false
|
| 13 |
+
device: null
|
| 14 |
+
workers: 8
|
| 15 |
+
project: null
|
| 16 |
+
name: train
|
| 17 |
+
exist_ok: false
|
| 18 |
+
pretrained: true
|
| 19 |
+
optimizer: auto
|
| 20 |
+
verbose: true
|
| 21 |
+
seed: 0
|
| 22 |
+
deterministic: true
|
| 23 |
+
single_cls: false
|
| 24 |
+
rect: false
|
| 25 |
+
cos_lr: false
|
| 26 |
+
close_mosaic: 10
|
| 27 |
+
resume: false
|
| 28 |
+
amp: true
|
| 29 |
+
fraction: 1.0
|
| 30 |
+
profile: false
|
| 31 |
+
freeze: null
|
| 32 |
+
multi_scale: false
|
| 33 |
+
overlap_mask: true
|
| 34 |
+
mask_ratio: 4
|
| 35 |
+
dropout: 0.0
|
| 36 |
+
val: true
|
| 37 |
+
split: val
|
| 38 |
+
save_json: false
|
| 39 |
+
save_hybrid: false
|
| 40 |
+
conf: null
|
| 41 |
+
iou: 0.7
|
| 42 |
+
max_det: 300
|
| 43 |
+
half: false
|
| 44 |
+
dnn: false
|
| 45 |
+
plots: true
|
| 46 |
+
source: null
|
| 47 |
+
vid_stride: 1
|
| 48 |
+
stream_buffer: false
|
| 49 |
+
visualize: false
|
| 50 |
+
augment: false
|
| 51 |
+
agnostic_nms: false
|
| 52 |
+
classes: null
|
| 53 |
+
retina_masks: false
|
| 54 |
+
embed: null
|
| 55 |
+
show: false
|
| 56 |
+
save_frames: false
|
| 57 |
+
save_txt: false
|
| 58 |
+
save_conf: false
|
| 59 |
+
save_crop: false
|
| 60 |
+
show_labels: true
|
| 61 |
+
show_conf: true
|
| 62 |
+
show_boxes: true
|
| 63 |
+
line_width: null
|
| 64 |
+
format: torchscript
|
| 65 |
+
keras: false
|
| 66 |
+
optimize: false
|
| 67 |
+
int8: false
|
| 68 |
+
dynamic: false
|
| 69 |
+
simplify: false
|
| 70 |
+
opset: null
|
| 71 |
+
workspace: 4
|
| 72 |
+
nms: false
|
| 73 |
+
lr0: 0.01
|
| 74 |
+
lrf: 0.01
|
| 75 |
+
momentum: 0.937
|
| 76 |
+
weight_decay: 0.0005
|
| 77 |
+
warmup_epochs: 3.0
|
| 78 |
+
warmup_momentum: 0.8
|
| 79 |
+
warmup_bias_lr: 0.1
|
| 80 |
+
box: 7.5
|
| 81 |
+
cls: 0.5
|
| 82 |
+
dfl: 1.5
|
| 83 |
+
pose: 12.0
|
| 84 |
+
kobj: 1.0
|
| 85 |
+
label_smoothing: 0.0
|
| 86 |
+
nbs: 64
|
| 87 |
+
hsv_h: 0.015
|
| 88 |
+
hsv_s: 0.7
|
| 89 |
+
hsv_v: 0.4
|
| 90 |
+
degrees: 0.0
|
| 91 |
+
translate: 0.1
|
| 92 |
+
scale: 0.5
|
| 93 |
+
shear: 0.0
|
| 94 |
+
perspective: 0.0
|
| 95 |
+
flipud: 0.0
|
| 96 |
+
fliplr: 0.5
|
| 97 |
+
mosaic: 1.0
|
| 98 |
+
mixup: 0.0
|
| 99 |
+
copy_paste: 0.0
|
| 100 |
+
auto_augment: randaugment
|
| 101 |
+
erasing: 0.4
|
| 102 |
+
crop_fraction: 1.0
|
| 103 |
+
cfg: null
|
| 104 |
+
tracker: botsort.yaml
|
| 105 |
+
save_dir: runs\detect\train
|
runs/detect/train/confusion_matrix.png
ADDED
|
runs/detect/train/confusion_matrix_normalized.png
ADDED
|
runs/detect/train/events.out.tfevents.1710619176.LAPTOP-02FVE3SQ.27180.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:67b88de34fcbc29ccf98ee24ce1b780d0b22608b118b967c2dc1f3c8e4438d19
|
| 3 |
+
size 205654
|
runs/detect/train/labels.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/labels_correlogram.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/results.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
|
| 2 |
+
1, 2.9785, 4.1122, 4.2092, 0.00367, 0.69231, 0.00987, 0.00335, 2.2443, 3.7599, 4.1418, 0.00064706, 0.00064706, 0.00064706
|
| 3 |
+
2, 2.9129, 3.7433, 3.9319, 0.00374, 0.70513, 0.01234, 0.00421, 2.283, 3.678, 4.1773, 0.0012704, 0.0012704, 0.0012704
|
| 4 |
+
3, 2.9648, 3.3952, 3.68, 0.02594, 0.19231, 0.01447, 0.00483, 2.5276, 6.5918, 4.163, 0.0018497, 0.0018497, 0.0018497
|
| 5 |
+
4, 2.922, 3.2448, 3.5678, 0.00772, 0.53846, 0.00639, 0.00255, 2.6098, 282.49, 7.8106, 0.001802, 0.001802, 0.001802
|
| 6 |
+
5, 2.9106, 3.0068, 3.4563, 0.01747, 0.4359, 0.01008, 0.00403, 2.564, 310.45, 7.2578, 0.001802, 0.001802, 0.001802
|
| 7 |
+
6, 2.7071, 2.7859, 3.3269, 0.29031, 0.29487, 0.23053, 0.07973, 2.4015, 6.7424, 3.6955, 0.001736, 0.001736, 0.001736
|
| 8 |
+
7, 2.6661, 2.6738, 3.2578, 0.15741, 0.28205, 0.13283, 0.04128, 2.2222, 3.0908, 3.1609, 0.00167, 0.00167, 0.00167
|
| 9 |
+
8, 2.5845, 2.636, 3.2224, 0.36488, 0.30769, 0.24992, 0.10982, 2.1164, 3.4204, 3.0675, 0.001604, 0.001604, 0.001604
|
| 10 |
+
9, 2.5313, 2.6114, 3.1696, 0.24841, 0.28205, 0.15686, 0.07529, 2.0368, 4.524, 2.9618, 0.001538, 0.001538, 0.001538
|
| 11 |
+
10, 2.3675, 2.441, 3.05, 0.14012, 0.24359, 0.09419, 0.03074, 2.133, 3.814, 3.1085, 0.001472, 0.001472, 0.001472
|
| 12 |
+
11, 2.3652, 2.4484, 2.9704, 0.39739, 0.41026, 0.33251, 0.16884, 1.9064, 2.4546, 2.8835, 0.001406, 0.001406, 0.001406
|
| 13 |
+
12, 2.2555, 2.2918, 2.9004, 0.5257, 0.49738, 0.40078, 0.2241, 1.7378, 3.1788, 2.9381, 0.00134, 0.00134, 0.00134
|
| 14 |
+
13, 2.19, 2.2912, 2.8598, 0.65678, 0.41026, 0.46223, 0.28516, 1.7775, 2.6816, 3.0436, 0.001274, 0.001274, 0.001274
|
| 15 |
+
14, 2.1442, 2.4015, 2.8046, 0.38038, 0.3542, 0.25775, 0.11504, 1.9477, 2.8981, 2.8937, 0.001208, 0.001208, 0.001208
|
| 16 |
+
15, 2.1018, 2.3133, 2.7443, 0.77093, 0.47436, 0.61998, 0.38832, 1.5382, 2.2977, 2.6553, 0.001142, 0.001142, 0.001142
|
| 17 |
+
16, 2.0587, 2.1454, 2.6787, 0.67569, 0.57692, 0.64534, 0.38138, 1.5314, 2.0685, 2.532, 0.001076, 0.001076, 0.001076
|
| 18 |
+
17, 1.9946, 2.0214, 2.619, 0.68447, 0.37179, 0.53519, 0.32004, 1.5374, 2.2123, 2.4177, 0.00101, 0.00101, 0.00101
|
| 19 |
+
18, 1.892, 2.0068, 2.4925, 0.88956, 0.48718, 0.61594, 0.39418, 1.5834, 2.0166, 2.3996, 0.000944, 0.000944, 0.000944
|
| 20 |
+
19, 1.899, 1.9704, 2.5089, 0.78939, 0.52862, 0.63163, 0.3909, 1.4241, 1.9696, 2.3185, 0.000878, 0.000878, 0.000878
|
| 21 |
+
20, 1.7673, 1.8728, 2.3625, 0.84029, 0.57692, 0.72422, 0.48179, 1.3059, 1.7369, 2.1165, 0.000812, 0.000812, 0.000812
|
| 22 |
+
21, 1.5035, 2.0678, 2.2751, 0.9374, 0.5641, 0.6884, 0.48403, 1.3065, 1.8132, 2.139, 0.000746, 0.000746, 0.000746
|
| 23 |
+
22, 1.4341, 1.8763, 2.1984, 0.87803, 0.57692, 0.73991, 0.51666, 1.2492, 1.8004, 2.0635, 0.00068, 0.00068, 0.00068
|
| 24 |
+
23, 1.3808, 1.7775, 2.0582, 0.966, 0.58974, 0.78011, 0.52467, 1.2326, 1.6171, 1.9621, 0.000614, 0.000614, 0.000614
|
| 25 |
+
24, 1.3441, 1.6567, 2.0348, 0.91807, 0.66667, 0.82138, 0.59602, 1.1664, 1.4552, 1.9257, 0.000548, 0.000548, 0.000548
|
| 26 |
+
25, 1.2599, 1.5807, 1.9404, 0.80975, 0.66667, 0.78333, 0.55559, 1.2357, 1.5361, 2.0033, 0.000482, 0.000482, 0.000482
|
| 27 |
+
26, 1.3012, 1.5666, 1.9369, 0.57749, 0.70096, 0.67983, 0.41625, 1.3787, 1.8761, 2.1761, 0.000416, 0.000416, 0.000416
|
| 28 |
+
27, 1.3047, 1.6818, 1.9253, 0.94117, 0.64103, 0.84781, 0.59356, 1.1485, 1.3088, 1.8654, 0.00035, 0.00035, 0.00035
|
| 29 |
+
28, 1.2549, 1.4966, 1.9067, 0.81571, 0.66667, 0.80591, 0.6337, 0.9911, 1.2801, 1.7223, 0.000284, 0.000284, 0.000284
|
| 30 |
+
29, 1.1618, 1.4299, 1.7644, 0.96502, 0.74359, 0.90157, 0.67221, 1.0147, 1.2228, 1.7514, 0.000218, 0.000218, 0.000218
|
| 31 |
+
30, 1.1168, 1.3897, 1.7266, 0.96674, 0.74525, 0.92046, 0.71501, 0.93414, 1.0617, 1.6553, 0.000152, 0.000152, 0.000152
|
runs/detect/train/results.png
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch0.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch1.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch2.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch680.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch681.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/train_batch682.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/val_batch0_labels.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/val_batch0_pred.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/val_batch1_labels.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/val_batch1_pred.jpg
ADDED
|
Git LFS Details
|
runs/detect/train/weights/best.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8296083809be6a6c7e5a692478b2668726ed3fd8d6d0dd4595f5c74b359e9d29
|
| 3 |
+
size 6249241
|
runs/detect/train/weights/last.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4f072e377dc78d2df9195c37348e9ea4191297c3511a47e5ee2cbd0e9bce79c0
|
| 3 |
+
size 6249241
|
runs/detect/train2/F1_curve.png
ADDED
|
runs/detect/train2/PR_curve.png
ADDED
|
runs/detect/train2/P_curve.png
ADDED
|
runs/detect/train2/R_curve.png
ADDED
|
runs/detect/train2/args.yaml
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
task: detect
|
| 2 |
+
mode: train
|
| 3 |
+
model: yolov8n.pt
|
| 4 |
+
data: data.yaml
|
| 5 |
+
epochs: 40
|
| 6 |
+
time: null
|
| 7 |
+
patience: 100
|
| 8 |
+
batch: 16
|
| 9 |
+
imgsz: 640
|
| 10 |
+
save: true
|
| 11 |
+
save_period: -1
|
| 12 |
+
cache: false
|
| 13 |
+
device: null
|
| 14 |
+
workers: 8
|
| 15 |
+
project: null
|
| 16 |
+
name: train2
|
| 17 |
+
exist_ok: false
|
| 18 |
+
pretrained: true
|
| 19 |
+
optimizer: auto
|
| 20 |
+
verbose: true
|
| 21 |
+
seed: 0
|
| 22 |
+
deterministic: true
|
| 23 |
+
single_cls: false
|
| 24 |
+
rect: false
|
| 25 |
+
cos_lr: false
|
| 26 |
+
close_mosaic: 10
|
| 27 |
+
resume: false
|
| 28 |
+
amp: true
|
| 29 |
+
fraction: 1.0
|
| 30 |
+
profile: false
|
| 31 |
+
freeze: null
|
| 32 |
+
multi_scale: false
|
| 33 |
+
overlap_mask: true
|
| 34 |
+
mask_ratio: 4
|
| 35 |
+
dropout: 0.0
|
| 36 |
+
val: true
|
| 37 |
+
split: val
|
| 38 |
+
save_json: false
|
| 39 |
+
save_hybrid: false
|
| 40 |
+
conf: null
|
| 41 |
+
iou: 0.7
|
| 42 |
+
max_det: 300
|
| 43 |
+
half: false
|
| 44 |
+
dnn: false
|
| 45 |
+
plots: true
|
| 46 |
+
source: null
|
| 47 |
+
vid_stride: 1
|
| 48 |
+
stream_buffer: false
|
| 49 |
+
visualize: false
|
| 50 |
+
augment: false
|
| 51 |
+
agnostic_nms: false
|
| 52 |
+
classes: null
|
| 53 |
+
retina_masks: false
|
| 54 |
+
embed: null
|
| 55 |
+
show: false
|
| 56 |
+
save_frames: false
|
| 57 |
+
save_txt: false
|
| 58 |
+
save_conf: false
|
| 59 |
+
save_crop: false
|
| 60 |
+
show_labels: true
|
| 61 |
+
show_conf: true
|
| 62 |
+
show_boxes: true
|
| 63 |
+
line_width: null
|
| 64 |
+
format: torchscript
|
| 65 |
+
keras: false
|
| 66 |
+
optimize: false
|
| 67 |
+
int8: false
|
| 68 |
+
dynamic: false
|
| 69 |
+
simplify: false
|
| 70 |
+
opset: null
|
| 71 |
+
workspace: 4
|
| 72 |
+
nms: false
|
| 73 |
+
lr0: 0.01
|
| 74 |
+
lrf: 0.01
|
| 75 |
+
momentum: 0.937
|
| 76 |
+
weight_decay: 0.0005
|
| 77 |
+
warmup_epochs: 3.0
|
| 78 |
+
warmup_momentum: 0.8
|
| 79 |
+
warmup_bias_lr: 0.1
|
| 80 |
+
box: 7.5
|
| 81 |
+
cls: 0.5
|
| 82 |
+
dfl: 1.5
|
| 83 |
+
pose: 12.0
|
| 84 |
+
kobj: 1.0
|
| 85 |
+
label_smoothing: 0.0
|
| 86 |
+
nbs: 64
|
| 87 |
+
hsv_h: 0.015
|
| 88 |
+
hsv_s: 0.7
|
| 89 |
+
hsv_v: 0.4
|
| 90 |
+
degrees: 0.0
|
| 91 |
+
translate: 0.1
|
| 92 |
+
scale: 0.5
|
| 93 |
+
shear: 0.0
|
| 94 |
+
perspective: 0.0
|
| 95 |
+
flipud: 0.0
|
| 96 |
+
fliplr: 0.5
|
| 97 |
+
mosaic: 1.0
|
| 98 |
+
mixup: 0.0
|
| 99 |
+
copy_paste: 0.0
|
| 100 |
+
auto_augment: randaugment
|
| 101 |
+
erasing: 0.4
|
| 102 |
+
crop_fraction: 1.0
|
| 103 |
+
cfg: null
|
| 104 |
+
tracker: botsort.yaml
|
| 105 |
+
save_dir: runs\detect\train2
|
runs/detect/train2/confusion_matrix.png
ADDED
|
runs/detect/train2/confusion_matrix_normalized.png
ADDED
|
runs/detect/train2/events.out.tfevents.1710659328.LAPTOP-02FVE3SQ.23948.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6618fb154e0996a4bd9bb3c732d3908fd9362a322f93fa22d156d299e6b76825
|
| 3 |
+
size 212274
|
runs/detect/train2/labels.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/labels_correlogram.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/results.csv
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
|
| 2 |
+
1, 1.1047, 2.1871, 1.1409, 0.94021, 0.80641, 0.92202, 0.70802, 0.68163, 2.0052, 1.1296, 0.00065686, 0.00065686, 0.00065686
|
| 3 |
+
2, 1.1981, 1.658, 1.1814, 0.81402, 0.74359, 0.85621, 0.59959, 1.126, 1.6208, 1.58, 0.0012908, 0.0012908, 0.0012908
|
| 4 |
+
3, 1.2421, 1.484, 1.2134, 0.82597, 0.69231, 0.81507, 0.57657, 1.0131, 2.2553, 1.3548, 0.0018917, 0.0018917, 0.0018917
|
| 5 |
+
4, 1.2737, 1.3645, 1.2336, 0.83055, 0.84615, 0.88788, 0.65913, 0.94423, 1.3135, 1.3936, 0.0018515, 0.0018515, 0.0018515
|
| 6 |
+
5, 1.2179, 1.1735, 1.1846, 0.83089, 0.81888, 0.84902, 0.64307, 0.88787, 1.8545, 1.2309, 0.0018515, 0.0018515, 0.0018515
|
| 7 |
+
6, 1.2473, 1.0709, 1.1908, 0.95542, 0.82435, 0.94637, 0.74702, 0.78608, 1.1094, 1.1351, 0.001802, 0.001802, 0.001802
|
| 8 |
+
7, 1.1497, 1.0202, 1.1376, 0.9799, 0.88462, 0.96624, 0.78833, 0.78475, 0.65293, 1.1231, 0.0017525, 0.0017525, 0.0017525
|
| 9 |
+
8, 1.1195, 0.95065, 1.1263, 0.93462, 0.91635, 0.97274, 0.80499, 0.73964, 0.70569, 1.0964, 0.001703, 0.001703, 0.001703
|
| 10 |
+
9, 1.0833, 0.90107, 1.1154, 0.87598, 0.92308, 0.94684, 0.77389, 0.67181, 0.74382, 1.0358, 0.0016535, 0.0016535, 0.0016535
|
| 11 |
+
10, 1.091, 0.89814, 1.1145, 0.96231, 0.89744, 0.96086, 0.8318, 0.59603, 0.59705, 0.98855, 0.001604, 0.001604, 0.001604
|
| 12 |
+
11, 1.0791, 0.90118, 1.1031, 0.95898, 0.97436, 0.98281, 0.79127, 0.75318, 0.56472, 1.075, 0.0015545, 0.0015545, 0.0015545
|
| 13 |
+
12, 1.0669, 0.80623, 1.0949, 0.97373, 0.9505, 0.9837, 0.86486, 0.57065, 0.54864, 0.95182, 0.001505, 0.001505, 0.001505
|
| 14 |
+
13, 1.0369, 0.78291, 1.0761, 0.97421, 0.96867, 0.98834, 0.84417, 0.58932, 0.48974, 0.98404, 0.0014555, 0.0014555, 0.0014555
|
| 15 |
+
14, 1.0019, 0.73216, 1.0622, 0.97399, 0.96, 0.98572, 0.85716, 0.60326, 0.48429, 0.97399, 0.001406, 0.001406, 0.001406
|
| 16 |
+
15, 1.0059, 0.77476, 1.0685, 0.93631, 0.98718, 0.98162, 0.84255, 0.61238, 0.47218, 0.98011, 0.0013565, 0.0013565, 0.0013565
|
| 17 |
+
16, 0.96494, 0.7081, 1.0437, 0.97188, 0.98718, 0.99011, 0.87996, 0.5301, 0.42242, 0.92216, 0.001307, 0.001307, 0.001307
|
| 18 |
+
17, 0.98096, 0.69407, 1.0545, 0.98684, 0.96133, 0.98596, 0.84604, 0.59702, 0.47641, 0.96197, 0.0012575, 0.0012575, 0.0012575
|
| 19 |
+
18, 0.97623, 0.70072, 1.0289, 0.97335, 0.98718, 0.99188, 0.86789, 0.55849, 0.43352, 0.94431, 0.001208, 0.001208, 0.001208
|
| 20 |
+
19, 0.97241, 0.67529, 1.0513, 0.99929, 1, 0.995, 0.87601, 0.50267, 0.38779, 0.90575, 0.0011585, 0.0011585, 0.0011585
|
| 21 |
+
20, 0.95114, 0.63805, 1.0299, 0.99764, 0.98718, 0.99407, 0.8399, 0.56451, 0.38259, 0.92673, 0.001109, 0.001109, 0.001109
|
| 22 |
+
21, 0.93095, 0.64765, 1.0385, 0.98192, 0.98718, 0.99387, 0.90415, 0.49723, 0.36752, 0.908, 0.0010595, 0.0010595, 0.0010595
|
| 23 |
+
22, 0.91245, 0.60299, 1.0146, 0.99864, 0.98718, 0.99487, 0.86743, 0.55957, 0.36439, 0.96013, 0.00101, 0.00101, 0.00101
|
| 24 |
+
23, 0.91872, 0.59216, 1.0131, 0.99622, 0.98718, 0.99386, 0.89465, 0.51427, 0.33628, 0.90227, 0.0009605, 0.0009605, 0.0009605
|
| 25 |
+
24, 0.89617, 0.59292, 1.0118, 0.97228, 0.98718, 0.99367, 0.90222, 0.47766, 0.33715, 0.88725, 0.000911, 0.000911, 0.000911
|
| 26 |
+
25, 0.85862, 0.57106, 1.0013, 0.99699, 0.97436, 0.99374, 0.89424, 0.48901, 0.31365, 0.89452, 0.0008615, 0.0008615, 0.0008615
|
| 27 |
+
26, 0.87391, 0.55319, 1.0043, 0.98578, 1, 0.99487, 0.89354, 0.50188, 0.30709, 0.89996, 0.000812, 0.000812, 0.000812
|
| 28 |
+
27, 0.86391, 0.53506, 1.0036, 1, 0.99919, 0.995, 0.87404, 0.58339, 0.32838, 0.93729, 0.0007625, 0.0007625, 0.0007625
|
| 29 |
+
28, 0.86012, 0.53592, 0.98804, 0.99877, 1, 0.995, 0.90056, 0.50144, 0.30495, 0.90022, 0.000713, 0.000713, 0.000713
|
| 30 |
+
29, 0.85647, 0.53517, 0.99247, 0.97222, 1, 0.99462, 0.91023, 0.46925, 0.29517, 0.87565, 0.0006635, 0.0006635, 0.0006635
|
| 31 |
+
30, 0.81294, 0.51002, 0.98658, 0.98728, 0.99493, 0.99475, 0.9044, 0.50788, 0.30693, 0.89236, 0.000614, 0.000614, 0.000614
|
| 32 |
+
31, 0.87108, 0.51121, 0.97877, 0.99788, 0.98718, 0.99487, 0.89498, 0.49344, 0.29819, 0.88282, 0.0005645, 0.0005645, 0.0005645
|
| 33 |
+
32, 0.85448, 0.48646, 0.96261, 0.9991, 1, 0.995, 0.92544, 0.43247, 0.26777, 0.86531, 0.000515, 0.000515, 0.000515
|
| 34 |
+
33, 0.84107, 0.47227, 0.95817, 0.98731, 0.99786, 0.99487, 0.88484, 0.50347, 0.29199, 0.89606, 0.0004655, 0.0004655, 0.0004655
|
| 35 |
+
34, 0.829, 0.46833, 0.96283, 0.99929, 1, 0.995, 0.91089, 0.46188, 0.26732, 0.86755, 0.000416, 0.000416, 0.000416
|
| 36 |
+
35, 0.80208, 0.45, 0.94028, 1, 0.99912, 0.995, 0.91552, 0.42164, 0.26805, 0.84837, 0.0003665, 0.0003665, 0.0003665
|
| 37 |
+
36, 0.79391, 0.44258, 0.93629, 0.99929, 1, 0.995, 0.93306, 0.43542, 0.25278, 0.85318, 0.000317, 0.000317, 0.000317
|
| 38 |
+
37, 0.79675, 0.42337, 0.94194, 0.99923, 1, 0.995, 0.92608, 0.43251, 0.2404, 0.85534, 0.0002675, 0.0002675, 0.0002675
|
| 39 |
+
38, 0.7831, 0.42416, 0.9195, 0.99931, 1, 0.995, 0.92414, 0.4224, 0.24287, 0.84754, 0.000218, 0.000218, 0.000218
|
| 40 |
+
39, 0.77622, 0.42281, 0.93631, 0.99919, 1, 0.995, 0.93545, 0.42191, 0.23646, 0.85075, 0.0001685, 0.0001685, 0.0001685
|
| 41 |
+
40, 0.76306, 0.40916, 0.93092, 0.99904, 1, 0.995, 0.93037, 0.41183, 0.23433, 0.84507, 0.000119, 0.000119, 0.000119
|
runs/detect/train2/results.png
ADDED
|
Git LFS Details
|
runs/detect/train2/train_batch0.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/train_batch1.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/train_batch2.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/train_batch2040.jpg
ADDED
|
Git LFS Details
|
runs/detect/train2/train_batch2041.jpg
ADDED
|
Git LFS Details
|