Datasets:
pax-synetic
commited on
Commit
·
1ae42c8
0
Parent(s):
initial commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- .gitignore +209 -0
- FileCrawler.py +134 -0
- LICENSE +21 -0
- PrepareDatasets.py +89 -0
- README.md +110 -0
- applesm5-train-det.py +108 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_0.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_1.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_10.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_12.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_13.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_2.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_3.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_4.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_6.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_7.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_8.jpg +3 -0
- real-original/yolos/images/trains/DSC_1042_17kv1r16k_9.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_0.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_10.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_12.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_2.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_3.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_4.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_5.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_6.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_7.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_8.jpg +3 -0
- real-original/yolos/images/trains/DSC_1043_17kv1r17k_9.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_0.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_1.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_10.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_11.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_12.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_13.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_14.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_2.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_3.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_5.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_6.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_7.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_8.jpg +3 -0
- real-original/yolos/images/trains/DSC_1047_17kv1r21k_9.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_0.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_1.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_10.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_11.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_12.jpg +3 -0
- real-original/yolos/images/trains/DSC_1048_17kv1r22k_13.jpg +3 -0
.gitattributes
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[codz]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py.cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# UV
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
#uv.lock
|
| 102 |
+
|
| 103 |
+
# poetry
|
| 104 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 105 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 106 |
+
# commonly ignored for libraries.
|
| 107 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 108 |
+
#poetry.lock
|
| 109 |
+
#poetry.toml
|
| 110 |
+
|
| 111 |
+
# pdm
|
| 112 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 113 |
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
| 114 |
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
| 115 |
+
#pdm.lock
|
| 116 |
+
#pdm.toml
|
| 117 |
+
.pdm-python
|
| 118 |
+
.pdm-build/
|
| 119 |
+
|
| 120 |
+
# pixi
|
| 121 |
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
| 122 |
+
#pixi.lock
|
| 123 |
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
| 124 |
+
# in the .venv directory. It is recommended not to include this directory in version control.
|
| 125 |
+
.pixi
|
| 126 |
+
|
| 127 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 128 |
+
__pypackages__/
|
| 129 |
+
|
| 130 |
+
# Celery stuff
|
| 131 |
+
celerybeat-schedule
|
| 132 |
+
celerybeat.pid
|
| 133 |
+
|
| 134 |
+
# SageMath parsed files
|
| 135 |
+
*.sage.py
|
| 136 |
+
|
| 137 |
+
# Environments
|
| 138 |
+
.env
|
| 139 |
+
.envrc
|
| 140 |
+
.venv
|
| 141 |
+
env/
|
| 142 |
+
venv/
|
| 143 |
+
ENV/
|
| 144 |
+
env.bak/
|
| 145 |
+
venv.bak/
|
| 146 |
+
|
| 147 |
+
# Spyder project settings
|
| 148 |
+
.spyderproject
|
| 149 |
+
.spyproject
|
| 150 |
+
|
| 151 |
+
# Rope project settings
|
| 152 |
+
.ropeproject
|
| 153 |
+
|
| 154 |
+
# mkdocs documentation
|
| 155 |
+
/site
|
| 156 |
+
|
| 157 |
+
# mypy
|
| 158 |
+
.mypy_cache/
|
| 159 |
+
.dmypy.json
|
| 160 |
+
dmypy.json
|
| 161 |
+
|
| 162 |
+
# Pyre type checker
|
| 163 |
+
.pyre/
|
| 164 |
+
|
| 165 |
+
# pytype static type analyzer
|
| 166 |
+
.pytype/
|
| 167 |
+
|
| 168 |
+
# Cython debug symbols
|
| 169 |
+
cython_debug/
|
| 170 |
+
|
| 171 |
+
# PyCharm
|
| 172 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 173 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 174 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 175 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 176 |
+
#.idea/
|
| 177 |
+
|
| 178 |
+
# Abstra
|
| 179 |
+
# Abstra is an AI-powered process automation framework.
|
| 180 |
+
# Ignore directories containing user credentials, local state, and settings.
|
| 181 |
+
# Learn more at https://abstra.io/docs
|
| 182 |
+
.abstra/
|
| 183 |
+
|
| 184 |
+
# Visual Studio Code
|
| 185 |
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
| 186 |
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
| 187 |
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
| 188 |
+
# you could uncomment the following to ignore the entire vscode folder
|
| 189 |
+
# .vscode/
|
| 190 |
+
|
| 191 |
+
# Ruff stuff:
|
| 192 |
+
.ruff_cache/
|
| 193 |
+
|
| 194 |
+
# PyPI configuration file
|
| 195 |
+
.pypirc
|
| 196 |
+
|
| 197 |
+
# Cursor
|
| 198 |
+
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
| 199 |
+
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
| 200 |
+
# refer to https://docs.cursor.com/context/ignore-files
|
| 201 |
+
.cursorignore
|
| 202 |
+
.cursorindexingignore
|
| 203 |
+
|
| 204 |
+
# Marimo
|
| 205 |
+
marimo/_static/
|
| 206 |
+
marimo/_lsp/
|
| 207 |
+
__marimo__/
|
| 208 |
+
|
| 209 |
+
ai/
|
FileCrawler.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
from django.core.serializers.json import DjangoJSONEncoder
|
| 4 |
+
import json
|
| 5 |
+
json.JSONEncoder.default = DjangoJSONEncoder
|
| 6 |
+
|
| 7 |
+
from tqdm import tqdm
|
| 8 |
+
|
| 9 |
+
import random
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Directory(object):
|
| 13 |
+
def __init__(self, path, name):
|
| 14 |
+
self._path = path.replace('\\', '/').replace('//', '/')
|
| 15 |
+
self._name = name
|
| 16 |
+
self._files = {}
|
| 17 |
+
self._filesArr = []
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class File(object):
|
| 21 |
+
def __init__(self, directory, path, name, extension):
|
| 22 |
+
self._directory = directory
|
| 23 |
+
self._path = path.replace('\\', '/').replace('//', '/')
|
| 24 |
+
self._name = name
|
| 25 |
+
self._extension = extension
|
| 26 |
+
self._pathDir = os.path.dirname(self._path)
|
| 27 |
+
self._dirName = os.path.basename(self._pathDir)
|
| 28 |
+
self._fileSize = None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class FileCrawler(object):
|
| 32 |
+
def __init__(self, p_rootFolder, p_directoryNameContainsFilterSet, p_fileNameContainsFilterSet, p_extensionFilterSet):
|
| 33 |
+
self._rootFolder = p_rootFolder
|
| 34 |
+
|
| 35 |
+
self._directoryNameContainsFilterSet = None
|
| 36 |
+
if p_directoryNameContainsFilterSet is not None and len(p_directoryNameContainsFilterSet) > 0:
|
| 37 |
+
self._directoryNameContainsFilterSet = p_directoryNameContainsFilterSet
|
| 38 |
+
|
| 39 |
+
self._fileNameContainsFilterSet = None
|
| 40 |
+
if p_fileNameContainsFilterSet is not None and len(p_fileNameContainsFilterSet) > 0:
|
| 41 |
+
self._fileNameContainsFilterSet = p_fileNameContainsFilterSet
|
| 42 |
+
|
| 43 |
+
self._extensionFilterSet = None
|
| 44 |
+
if p_extensionFilterSet is not None and len(p_extensionFilterSet) > 0:
|
| 45 |
+
self._extensionFilterSet = p_extensionFilterSet
|
| 46 |
+
|
| 47 |
+
self._pathFiles = []
|
| 48 |
+
|
| 49 |
+
self._directories = {}
|
| 50 |
+
self._files = {}
|
| 51 |
+
self._filesArr = []
|
| 52 |
+
|
| 53 |
+
self.Crawl(p_rootFolder)
|
| 54 |
+
|
| 55 |
+
self._lenDirectories = len(self._directories)
|
| 56 |
+
self._lenFiles = len(self._files)
|
| 57 |
+
self._pathFiles = sorted(self._pathFiles)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def Decimate(self, decimateCount=None, isRandomSample=True):
|
| 61 |
+
files = None
|
| 62 |
+
if decimateCount is not None:
|
| 63 |
+
if isRandomSample:
|
| 64 |
+
files = random.sample(self._filesArr, decimateCount)
|
| 65 |
+
else:
|
| 66 |
+
files = self._filesArr[:decimateCount]
|
| 67 |
+
else:
|
| 68 |
+
files = self._files.values()
|
| 69 |
+
|
| 70 |
+
namesExtensionsFiles = {}
|
| 71 |
+
for fileIdx, file in tqdm(enumerate(files), desc="Decimate"):
|
| 72 |
+
namesExtensionsFiles[file._name + file._extension] = file
|
| 73 |
+
|
| 74 |
+
return namesExtensionsFiles
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def Crawl(self, p_rootFolder, p_indent=''):
|
| 78 |
+
fname = p_rootFolder.split(os.sep)[-1]
|
| 79 |
+
rootLevelCount = p_rootFolder.count(os.sep)
|
| 80 |
+
for root, dirNames, fileNames in os.walk(p_rootFolder):
|
| 81 |
+
levelCount = root.count(os.sep) - rootLevelCount
|
| 82 |
+
indent = p_indent + ' ' * (levelCount*2)
|
| 83 |
+
for fileName in fileNames:
|
| 84 |
+
fF, fFe = os.path.splitext(fileName)
|
| 85 |
+
ff = fileName.lower()
|
| 86 |
+
ff, ffe = os.path.splitext(ff)
|
| 87 |
+
|
| 88 |
+
if self._extensionFilterSet is not None:
|
| 89 |
+
if ffe not in self._extensionFilterSet:
|
| 90 |
+
continue
|
| 91 |
+
|
| 92 |
+
if self._directoryNameContainsFilterSet is not None:
|
| 93 |
+
directoryNameFilterFound = False
|
| 94 |
+
for directoryNameFilter in self._directoryNameContainsFilterSet:
|
| 95 |
+
if directoryNameFilter in root:
|
| 96 |
+
directoryNameFilterFound = True
|
| 97 |
+
break
|
| 98 |
+
if not directoryNameFilterFound:
|
| 99 |
+
continue
|
| 100 |
+
|
| 101 |
+
if self._fileNameContainsFilterSet is not None:
|
| 102 |
+
fileNameFilterFound = False
|
| 103 |
+
for fileNameFilter in self._fileNameContainsFilterSet:
|
| 104 |
+
if fileNameFilter in ff:
|
| 105 |
+
fileNameFilterFound = True
|
| 106 |
+
break
|
| 107 |
+
if not fileNameFilterFound:
|
| 108 |
+
continue
|
| 109 |
+
|
| 110 |
+
pathFile = os.path.join(root, fileName)
|
| 111 |
+
pathFile = pathFile.replace('\\', '/').replace('//', '/')
|
| 112 |
+
self._pathFiles.append(pathFile)
|
| 113 |
+
|
| 114 |
+
root = root.replace('\\', '/').replace('//', '/')
|
| 115 |
+
if root not in self._directories:
|
| 116 |
+
directoryName = os.path.basename(root)
|
| 117 |
+
directory = Directory(root, directoryName)
|
| 118 |
+
self._directories[directory._path] = directory
|
| 119 |
+
else:
|
| 120 |
+
directory = self._directories[root]
|
| 121 |
+
|
| 122 |
+
file = File(directory, pathFile, fF, fFe)
|
| 123 |
+
self._files[file._path] = file
|
| 124 |
+
self._filesArr.append(file)
|
| 125 |
+
directory._files[file._path] = file
|
| 126 |
+
directory._filesArr.append(file)
|
| 127 |
+
|
| 128 |
+
for dirName in dirNames:
|
| 129 |
+
dirRootFolder = f'{root}/{dirName}'
|
| 130 |
+
self.Crawl(dirRootFolder, indent)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
if __name__ == '__main__':
|
| 134 |
+
print('done.')
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 Syneticai
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
PrepareDatasets.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
|
| 5 |
+
from tqdm import tqdm
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def CreateYaml(newDatasetFolder):
|
| 9 |
+
pathYaml = f'{newDatasetFolder}/{newDatasetFolder}.yaml'
|
| 10 |
+
|
| 11 |
+
userName = '{user}'
|
| 12 |
+
|
| 13 |
+
if os.path.exists(pathYaml):
|
| 14 |
+
os.remove(pathYaml)
|
| 15 |
+
with open(pathYaml, 'w', newline='\n') as f:
|
| 16 |
+
f.write(f'''
|
| 17 |
+
train: /home/{userName}/datasets/ApplesM5/{newDatasetFolder}/yolos/images/trains
|
| 18 |
+
val: /home/{userName}/datasets/ApplesM5/{newDatasetFolder}/yolos/images/vals
|
| 19 |
+
|
| 20 |
+
nc: 1
|
| 21 |
+
names:
|
| 22 |
+
0: 'Apple'
|
| 23 |
+
|
| 24 |
+
''')
|
| 25 |
+
f.flush()
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def IgnorePlusHash(src, names):
|
| 29 |
+
# Ignore files containing "+#" in their names
|
| 30 |
+
return [name for name in names if '+#' in name]
|
| 31 |
+
|
| 32 |
+
if __name__ == "__main__":
|
| 33 |
+
|
| 34 |
+
originsFolders = [
|
| 35 |
+
'synetic-bg',
|
| 36 |
+
'real',
|
| 37 |
+
'real-original'
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
newFolders = [
|
| 41 |
+
'synetic',
|
| 42 |
+
|
| 43 |
+
'synetic-bg-train+real-val',
|
| 44 |
+
'synetic-train+real-val',
|
| 45 |
+
'synetic+real',
|
| 46 |
+
|
| 47 |
+
'synetic-bg-train+real-original-val',
|
| 48 |
+
'synetic-train+real-original-val',
|
| 49 |
+
'synetic+real-original',
|
| 50 |
+
]
|
| 51 |
+
|
| 52 |
+
for originFolder in originsFolders:
|
| 53 |
+
CreateYaml(originFolder)
|
| 54 |
+
|
| 55 |
+
newDatasetsFoldersAndTrainsValsOrigins = [
|
| 56 |
+
[ 'synetic', ['synetic-bg'], ['synetic-bg'], True ],
|
| 57 |
+
|
| 58 |
+
[ 'synetic-bg-train+real-val', ['synetic-bg'], ['real'], False ],
|
| 59 |
+
[ 'synetic-train+real-val', ['synetic-bg'], ['real'], False ],
|
| 60 |
+
[ 'synetic+real', ['synetic', 'real'], ['synetic', 'real'], False ],
|
| 61 |
+
|
| 62 |
+
[ 'synetic-bg-train+real-original-val', ['synetic-bg'], ['real-original'], False ],
|
| 63 |
+
[ 'synetic-train+real-original-val', ['synetic-bg'], ['real-original'], False ],
|
| 64 |
+
[ 'synetic+real-original', ['synetic', 'real-original'], ['synetic', 'real-original'], False ],
|
| 65 |
+
]
|
| 66 |
+
|
| 67 |
+
datasFormats = ['images', 'labels']
|
| 68 |
+
|
| 69 |
+
datasOriginsTypes = ['trains', 'vals']
|
| 70 |
+
|
| 71 |
+
for (newDatasetFolder, trainsOrigins, valsOrigins, ignorePlusHash) in newDatasetsFoldersAndTrainsValsOrigins:
|
| 72 |
+
print(f'Processing: {newDatasetFolder}..')
|
| 73 |
+
|
| 74 |
+
if os.path.exists(newDatasetFolder):
|
| 75 |
+
shutil.rmtree(newDatasetFolder)
|
| 76 |
+
os.makedirs(newDatasetFolder)
|
| 77 |
+
|
| 78 |
+
CreateYaml(newDatasetFolder)
|
| 79 |
+
|
| 80 |
+
for dataOriginType, datasOrigins in tqdm(zip(datasOriginsTypes, [trainsOrigins, valsOrigins])):
|
| 81 |
+
for dataOrigin in datasOrigins:
|
| 82 |
+
for dataFormat in datasFormats:
|
| 83 |
+
src = f'./{dataOrigin}/yolos/{dataFormat}/{dataOriginType}/'
|
| 84 |
+
dst = f'{newDatasetFolder}/yolos/{dataFormat}/{dataOriginType}/'
|
| 85 |
+
|
| 86 |
+
if ignorePlusHash:
|
| 87 |
+
shutil.copytree(src, dst, dirs_exist_ok=True, ignore=IgnorePlusHash)
|
| 88 |
+
else:
|
| 89 |
+
shutil.copytree(src, dst, dirs_exist_ok=True)
|
README.md
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ApplesM5
|
| 2 |
+
## Breaking the Bottleneck: Synthetic Data as the New Foundation for Vision AI
|
| 3 |
+
|
| 4 |
+
This repository contains training images and scripts for the Synetic AI **ApplesM5** object detection project that was used in the **Breaking the Bottleneck: Synthetic Data as the New Foundation for Vision AI** white paper, using **Ultralytics YOLO12**. The core scripts allow you to train models using custom YAML datasets and evaluate the results using the provided train_metrics.py script.
|
| 5 |
+
|
| 6 |
+
The paper is available for download at https://synetic.ai/white-paper/breaking/benchmark .
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
## 📂 Repository Structure (Key Files)
|
| 11 |
+
|
| 12 |
+
| File | Purpose |
|
| 13 |
+
| -------------------------- | ------------------------------------------------------------------------------------------- |
|
| 14 |
+
| `PrepareDatasets.py` | Produces the individual datasets used for training various combinations. |
|
| 15 |
+
| `applesm5-train-det.py` | Trains YOLO12 detection models using specified datasets and hyperparameters. |
|
| 16 |
+
| `FileCrawler.py` | Recursively crawls directories to find image and label files. Used for evaluating datasets. |
|
| 17 |
+
| `train_metrics.py` | Runs evaluations on trained YOLO12 models and computes mAP, precision, and recall metrics. |
|
| 18 |
+
| `*.yaml` (dataset configs) | Define dataset splits, including training, validation, and test image directories. |
|
| 19 |
+
|
| 20 |
+
---
|
| 21 |
+
|
| 22 |
+
## ⚙️ Setup
|
| 23 |
+
|
| 24 |
+
### 1. Install Dependencies
|
| 25 |
+
|
| 26 |
+
```bash
|
| 27 |
+
pip install ultralytics tqdm
|
| 28 |
+
```
|
| 29 |
+
|
| 30 |
+
Your environment should have **PyTorch** and GPU drivers properly configured.
|
| 31 |
+
|
| 32 |
+
---
|
| 33 |
+
|
| 34 |
+
## 🚀 Usage
|
| 35 |
+
|
| 36 |
+
### 0. Prepare Datasets (`PrepareDatasets.py`)
|
| 37 |
+
|
| 38 |
+
It will produce multiple folders combinations of synetic and real from the real and synetic source folders.
|
| 39 |
+
|
| 40 |
+
```bash
|
| 41 |
+
python PrepareDatasets.py
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
### A. Training Models (`applesm5-train-det.py`)
|
| 45 |
+
|
| 46 |
+
To train object detection models using YOLO12:
|
| 47 |
+
|
| 48 |
+
```bash
|
| 49 |
+
python applesm5-train-det.py
|
| 50 |
+
```
|
| 51 |
+
|
| 52 |
+
Key things to configure:
|
| 53 |
+
|
| 54 |
+
- Edit the `dataNames` list to point to your dataset YAML files (e.g., `real`, `synetic+real`, etc.).
|
| 55 |
+
- YAML files should be placed at `/home/user/datasets/ApplesM5/`.
|
| 56 |
+
- Adjust `hyperparams`, `epochs`, and GPU `devices` as needed.
|
| 57 |
+
- The script trains multiple model variants (`yolo12n.yaml`, etc.) and saves results to the Ultralytics default `runs/detect/` folder.
|
| 58 |
+
|
| 59 |
+
---
|
| 60 |
+
|
| 61 |
+
### B. Dataset YAML Files
|
| 62 |
+
|
| 63 |
+
Example dataset YAML (`real.yaml`):
|
| 64 |
+
|
| 65 |
+
```yaml
|
| 66 |
+
path: /path/to/your/dataset
|
| 67 |
+
train: images/train
|
| 68 |
+
val: images/val
|
| 69 |
+
test: images/test
|
| 70 |
+
names:
|
| 71 |
+
0: apple
|
| 72 |
+
```
|
| 73 |
+
|
| 74 |
+
Modify the paths in your YAML files to point to your dataset locations.
|
| 75 |
+
|
| 76 |
+
---
|
| 77 |
+
|
| 78 |
+
### C. Evaluating Models (`train_metrics.py`)
|
| 79 |
+
|
| 80 |
+
After training, you can evaluate your models on a validation dataset:
|
| 81 |
+
|
| 82 |
+
```bash
|
| 83 |
+
python train_metrics.py
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
Make sure to adjust the following in the script:
|
| 87 |
+
|
| 88 |
+
- `modelPaths`: list of trained YOLO12 model `.pt` files to evaluate.
|
| 89 |
+
- `pathValsDataset`: path to your validation images (`.png`/`.jpg`).
|
| 90 |
+
|
| 91 |
+
This will compute **mAP50**, **mAP50-95**, **precision**, and **recall** scores and print them to the console.
|
| 92 |
+
|
| 93 |
+
---
|
| 94 |
+
|
| 95 |
+
## ✅ Example Workflow
|
| 96 |
+
|
| 97 |
+
1. Prepare datasets and YAML config files.
|
| 98 |
+
2. Train detection models with `applesm5-train-det.py`.
|
| 99 |
+
3. Run `train_metrics.py` to benchmark models.
|
| 100 |
+
4. Iterate on your datasets and training parameters to improve performance.
|
| 101 |
+
|
| 102 |
+
---
|
| 103 |
+
|
| 104 |
+
## 🔧 Notes
|
| 105 |
+
|
| 106 |
+
- The training script assumes a multi-GPU setup (adjust the `devices` list if needed).
|
| 107 |
+
- The repo is tuned for an NVIDIA DGX or similar system with 8 GPUs but can be modified for single-GPU setups.
|
| 108 |
+
- Dataset YAML and trained model `.pt` files follow the **Ultralytics YOLO12** conventions.
|
| 109 |
+
|
| 110 |
+
---
|
applesm5-train-det.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ultralytics
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
'''
|
| 5 |
+
|
| 6 |
+
NOTE:
|
| 7 |
+
adjust this code as needed - the below are parameters for a 8xB200 system
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
'''
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
if __name__ == "__main__":
|
| 14 |
+
ultralytics.checks()
|
| 15 |
+
|
| 16 |
+
userName = '{user}'
|
| 17 |
+
|
| 18 |
+
epochs = 100
|
| 19 |
+
|
| 20 |
+
# pick and choose what datasets to train - make sure to first run PrepareDatasets.py
|
| 21 |
+
dataNames = [
|
| 22 |
+
'synetic-train+real-val',
|
| 23 |
+
'synetic-train+real-original-val',
|
| 24 |
+
|
| 25 |
+
'synetic-bg-train+real-val',
|
| 26 |
+
'synetic+real',
|
| 27 |
+
|
| 28 |
+
'synetic-bg-train+real-original-val',
|
| 29 |
+
'synetic+real-original',
|
| 30 |
+
|
| 31 |
+
'real',
|
| 32 |
+
'real-original',
|
| 33 |
+
|
| 34 |
+
'synetic-bg',
|
| 35 |
+
'synetic',
|
| 36 |
+
]
|
| 37 |
+
|
| 38 |
+
hyperparams = [
|
| 39 |
+
('12', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 40 |
+
('11', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 41 |
+
('v8', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 42 |
+
('v6', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 43 |
+
('v5', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 44 |
+
('v3', 'n', f'/home/{userName}/datasets/ApplesM5'),
|
| 45 |
+
('rtdetr', '-l', f'/home/{userName}/datasets/ApplesM5')
|
| 46 |
+
]
|
| 47 |
+
|
| 48 |
+
for dataName in dataNames:
|
| 49 |
+
for hyperparam in hyperparams:
|
| 50 |
+
modelVersion, modelSize, pathDataYaml = hyperparam
|
| 51 |
+
|
| 52 |
+
pathDataYaml = f'{pathDataYaml}/{dataName}/{dataName}.yaml'
|
| 53 |
+
|
| 54 |
+
projectName = f'ApplesM5_{modelVersion}{modelSize}'
|
| 55 |
+
taskName = 'detect'
|
| 56 |
+
|
| 57 |
+
if 'rtdetr' in modelVersion:
|
| 58 |
+
modelName = f"{modelVersion}{modelSize}.pt"
|
| 59 |
+
modelDet = ultralytics.RTDETR(modelName)
|
| 60 |
+
else:
|
| 61 |
+
modelName = f"yolo{modelVersion}{modelSize}.yaml"
|
| 62 |
+
modelDet = ultralytics.YOLO(modelName)
|
| 63 |
+
|
| 64 |
+
trainName = f'{projectName}-{taskName}-{epochs}_{dataName}_0'
|
| 65 |
+
|
| 66 |
+
devices = [0, 1, 2, 3, 4, 5, 6, 7]
|
| 67 |
+
devicesLen = len(devices)
|
| 68 |
+
|
| 69 |
+
batchSize = devicesLen * 30 * 2
|
| 70 |
+
|
| 71 |
+
batchSize = int(batchSize)
|
| 72 |
+
|
| 73 |
+
results = modelDet.train(
|
| 74 |
+
imgsz=640,
|
| 75 |
+
|
| 76 |
+
name=trainName,
|
| 77 |
+
data=pathDataYaml,
|
| 78 |
+
task=taskName,
|
| 79 |
+
epochs=epochs,
|
| 80 |
+
device=devices,
|
| 81 |
+
batch=batchSize,
|
| 82 |
+
workers=28,
|
| 83 |
+
|
| 84 |
+
cache='disk',
|
| 85 |
+
|
| 86 |
+
flipud=0.5,
|
| 87 |
+
fliplr=0.5,
|
| 88 |
+
|
| 89 |
+
hsv_h=0.1,
|
| 90 |
+
hsv_s=0.1,
|
| 91 |
+
hsv_v=0.1,
|
| 92 |
+
|
| 93 |
+
mosaic=0.75,
|
| 94 |
+
close_mosaic=0,
|
| 95 |
+
|
| 96 |
+
degrees=45.0,
|
| 97 |
+
shear=15.0,
|
| 98 |
+
perspective=0.0005,
|
| 99 |
+
translate=0.3,
|
| 100 |
+
mixup=0.1, # image mixup (probability)
|
| 101 |
+
copy_paste=0.1, # segment copy-paste (probability)
|
| 102 |
+
auto_augment='randaugment', # (str) auto augmentation policy for classification (randaugment, autoaugment, augmix)
|
| 103 |
+
augment=True,
|
| 104 |
+
|
| 105 |
+
val=True,
|
| 106 |
+
|
| 107 |
+
)
|
| 108 |
+
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_0.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_1.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_10.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_12.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_13.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_2.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_3.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_4.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_6.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_7.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_8.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1042_17kv1r16k_9.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_0.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_10.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_12.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_2.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_3.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_4.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_5.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_6.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_7.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_8.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1043_17kv1r17k_9.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_0.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_1.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_10.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_11.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_12.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_13.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_14.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_2.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_3.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_5.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_6.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_7.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_8.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1047_17kv1r21k_9.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_0.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_1.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_10.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_11.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_12.jpg
ADDED
|
Git LFS Details
|
real-original/yolos/images/trains/DSC_1048_17kv1r22k_13.jpg
ADDED
|
Git LFS Details
|