Commit ·
fbb3a5d
0
Parent(s):
Argus v1.0
Browse files- .gitattributes +160 -0
- LICENSE +127 -0
- README.md +153 -0
- argus.py +1017 -0
- config.json +2022 -0
- model.safetensors +3 -0
.gitattributes
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
quickstart/data/000002.jpg filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
quickstart/data/000008.jpg filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
quickstart/data/000031.jpg filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
quickstart/data/000058.jpg filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
quickstart/data/000083.jpg filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
quickstart/data/000089.jpg filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
quickstart/data/000191.jpg filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
quickstart/data/000400.jpg filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
quickstart/data/000436.jpg filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
quickstart/data/000452.jpg filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
quickstart/data/000496.jpg filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
quickstart/data/000557.jpg filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
quickstart/data/000575.jpg filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
quickstart/data/000591.jpg filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
quickstart/data/000600.jpg filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
quickstart/data/000665.jpg filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
quickstart/data/000696.jpg filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
quickstart/data/000773.jpg filter=lfs diff=lfs merge=lfs -text
|
| 54 |
+
quickstart/data/000781.jpg filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
quickstart/data/000793.jpg filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
quickstart/data/000868.jpg filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
quickstart/data/000880.jpg filter=lfs diff=lfs merge=lfs -text
|
| 58 |
+
quickstart/data/000889.jpg filter=lfs diff=lfs merge=lfs -text
|
| 59 |
+
quickstart/data/000939.jpg filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
quickstart/data/000957.jpg filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
quickstart/data/000998.jpg filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
quickstart/data/001057.jpg filter=lfs diff=lfs merge=lfs -text
|
| 63 |
+
quickstart/data/001078.jpg filter=lfs diff=lfs merge=lfs -text
|
| 64 |
+
quickstart/data/001118.jpg filter=lfs diff=lfs merge=lfs -text
|
| 65 |
+
quickstart/data/001191.jpg filter=lfs diff=lfs merge=lfs -text
|
| 66 |
+
quickstart/data/001289.jpg filter=lfs diff=lfs merge=lfs -text
|
| 67 |
+
quickstart/data/001348.jpg filter=lfs diff=lfs merge=lfs -text
|
| 68 |
+
quickstart/data/001394.jpg filter=lfs diff=lfs merge=lfs -text
|
| 69 |
+
quickstart/data/001430.jpg filter=lfs diff=lfs merge=lfs -text
|
| 70 |
+
quickstart/data/001586.jpg filter=lfs diff=lfs merge=lfs -text
|
| 71 |
+
quickstart/data/001587.jpg filter=lfs diff=lfs merge=lfs -text
|
| 72 |
+
quickstart/data/001599.jpg filter=lfs diff=lfs merge=lfs -text
|
| 73 |
+
quickstart/data/001624.jpg filter=lfs diff=lfs merge=lfs -text
|
| 74 |
+
quickstart/data/001631.jpg filter=lfs diff=lfs merge=lfs -text
|
| 75 |
+
quickstart/data/001634.jpg filter=lfs diff=lfs merge=lfs -text
|
| 76 |
+
quickstart/data/001685.jpg filter=lfs diff=lfs merge=lfs -text
|
| 77 |
+
quickstart/data/001741.jpg filter=lfs diff=lfs merge=lfs -text
|
| 78 |
+
quickstart/data/001763.jpg filter=lfs diff=lfs merge=lfs -text
|
| 79 |
+
quickstart/data/001851.jpg filter=lfs diff=lfs merge=lfs -text
|
| 80 |
+
quickstart/data/001934.jpg filter=lfs diff=lfs merge=lfs -text
|
| 81 |
+
quickstart/data/001949.jpg filter=lfs diff=lfs merge=lfs -text
|
| 82 |
+
quickstart/data/001951.jpg filter=lfs diff=lfs merge=lfs -text
|
| 83 |
+
quickstart/data/001983.jpg filter=lfs diff=lfs merge=lfs -text
|
| 84 |
+
quickstart/data/002022.jpg filter=lfs diff=lfs merge=lfs -text
|
| 85 |
+
quickstart/data/002070.jpg filter=lfs diff=lfs merge=lfs -text
|
| 86 |
+
quickstart/data/002086.jpg filter=lfs diff=lfs merge=lfs -text
|
| 87 |
+
quickstart/data/002186.jpg filter=lfs diff=lfs merge=lfs -text
|
| 88 |
+
quickstart/data/002284.jpg filter=lfs diff=lfs merge=lfs -text
|
| 89 |
+
quickstart/data/002334.jpg filter=lfs diff=lfs merge=lfs -text
|
| 90 |
+
quickstart/data/002450.jpg filter=lfs diff=lfs merge=lfs -text
|
| 91 |
+
quickstart/data/002468.jpg filter=lfs diff=lfs merge=lfs -text
|
| 92 |
+
quickstart/data/002489.jpg filter=lfs diff=lfs merge=lfs -text
|
| 93 |
+
quickstart/data/002497.jpg filter=lfs diff=lfs merge=lfs -text
|
| 94 |
+
quickstart/data/002514.jpg filter=lfs diff=lfs merge=lfs -text
|
| 95 |
+
quickstart/data/002538.jpg filter=lfs diff=lfs merge=lfs -text
|
| 96 |
+
quickstart/data/002553.jpg filter=lfs diff=lfs merge=lfs -text
|
| 97 |
+
quickstart/data/002586.jpg filter=lfs diff=lfs merge=lfs -text
|
| 98 |
+
quickstart/data/002592.jpg filter=lfs diff=lfs merge=lfs -text
|
| 99 |
+
quickstart/data/002598.jpg filter=lfs diff=lfs merge=lfs -text
|
| 100 |
+
quickstart/data/002640.jpg filter=lfs diff=lfs merge=lfs -text
|
| 101 |
+
quickstart/data/002660.jpg filter=lfs diff=lfs merge=lfs -text
|
| 102 |
+
quickstart/data/002671.jpg filter=lfs diff=lfs merge=lfs -text
|
| 103 |
+
quickstart/data/002799.jpg filter=lfs diff=lfs merge=lfs -text
|
| 104 |
+
quickstart/data/002905.jpg filter=lfs diff=lfs merge=lfs -text
|
| 105 |
+
quickstart/data/002939.jpg filter=lfs diff=lfs merge=lfs -text
|
| 106 |
+
quickstart/data/002953.jpg filter=lfs diff=lfs merge=lfs -text
|
| 107 |
+
quickstart/data/003084.jpg filter=lfs diff=lfs merge=lfs -text
|
| 108 |
+
quickstart/data/003132.jpg filter=lfs diff=lfs merge=lfs -text
|
| 109 |
+
quickstart/data/003148.jpg filter=lfs diff=lfs merge=lfs -text
|
| 110 |
+
quickstart/data/003254.jpg filter=lfs diff=lfs merge=lfs -text
|
| 111 |
+
quickstart/data/003344.jpg filter=lfs diff=lfs merge=lfs -text
|
| 112 |
+
quickstart/data/003391.jpg filter=lfs diff=lfs merge=lfs -text
|
| 113 |
+
quickstart/data/003420.jpg filter=lfs diff=lfs merge=lfs -text
|
| 114 |
+
quickstart/data/003502.jpg filter=lfs diff=lfs merge=lfs -text
|
| 115 |
+
quickstart/data/003541.jpg filter=lfs diff=lfs merge=lfs -text
|
| 116 |
+
quickstart/data/003614.jpg filter=lfs diff=lfs merge=lfs -text
|
| 117 |
+
quickstart/data/003665.jpg filter=lfs diff=lfs merge=lfs -text
|
| 118 |
+
quickstart/data/003713.jpg filter=lfs diff=lfs merge=lfs -text
|
| 119 |
+
quickstart/data/003754.jpg filter=lfs diff=lfs merge=lfs -text
|
| 120 |
+
quickstart/data/003805.jpg filter=lfs diff=lfs merge=lfs -text
|
| 121 |
+
quickstart/data/003871.jpg filter=lfs diff=lfs merge=lfs -text
|
| 122 |
+
quickstart/data/003880.jpg filter=lfs diff=lfs merge=lfs -text
|
| 123 |
+
quickstart/data/003911.jpg filter=lfs diff=lfs merge=lfs -text
|
| 124 |
+
quickstart/data/003978.jpg filter=lfs diff=lfs merge=lfs -text
|
| 125 |
+
quickstart/data/004039.jpg filter=lfs diff=lfs merge=lfs -text
|
| 126 |
+
quickstart/data/004066.jpg filter=lfs diff=lfs merge=lfs -text
|
| 127 |
+
quickstart/data/004082.jpg filter=lfs diff=lfs merge=lfs -text
|
| 128 |
+
quickstart/data/004096.jpg filter=lfs diff=lfs merge=lfs -text
|
| 129 |
+
quickstart/data/004131.jpg filter=lfs diff=lfs merge=lfs -text
|
| 130 |
+
quickstart/data/004170.jpg filter=lfs diff=lfs merge=lfs -text
|
| 131 |
+
quickstart/data/004172.jpg filter=lfs diff=lfs merge=lfs -text
|
| 132 |
+
quickstart/data/004263.jpg filter=lfs diff=lfs merge=lfs -text
|
| 133 |
+
quickstart/data/004304.jpg filter=lfs diff=lfs merge=lfs -text
|
| 134 |
+
quickstart/data/004315.jpg filter=lfs diff=lfs merge=lfs -text
|
| 135 |
+
quickstart/data/004329.jpg filter=lfs diff=lfs merge=lfs -text
|
| 136 |
+
quickstart/data/004371.jpg filter=lfs diff=lfs merge=lfs -text
|
| 137 |
+
quickstart/data/004431.jpg filter=lfs diff=lfs merge=lfs -text
|
| 138 |
+
quickstart/data/004510.jpg filter=lfs diff=lfs merge=lfs -text
|
| 139 |
+
quickstart/data/004514.jpg filter=lfs diff=lfs merge=lfs -text
|
| 140 |
+
quickstart/data/004517.jpg filter=lfs diff=lfs merge=lfs -text
|
| 141 |
+
quickstart/data/004525.jpg filter=lfs diff=lfs merge=lfs -text
|
| 142 |
+
quickstart/data/004535.jpg filter=lfs diff=lfs merge=lfs -text
|
| 143 |
+
quickstart/data/004546.jpg filter=lfs diff=lfs merge=lfs -text
|
| 144 |
+
quickstart/data/004548.jpg filter=lfs diff=lfs merge=lfs -text
|
| 145 |
+
quickstart/data/004557.jpg filter=lfs diff=lfs merge=lfs -text
|
| 146 |
+
quickstart/data/004585.jpg filter=lfs diff=lfs merge=lfs -text
|
| 147 |
+
quickstart/data/004590.jpg filter=lfs diff=lfs merge=lfs -text
|
| 148 |
+
quickstart/data/004610.jpg filter=lfs diff=lfs merge=lfs -text
|
| 149 |
+
quickstart/data/004627.jpg filter=lfs diff=lfs merge=lfs -text
|
| 150 |
+
quickstart/data/004651.jpg filter=lfs diff=lfs merge=lfs -text
|
| 151 |
+
quickstart/data/004656.jpg filter=lfs diff=lfs merge=lfs -text
|
| 152 |
+
quickstart/data/004702.jpg filter=lfs diff=lfs merge=lfs -text
|
| 153 |
+
quickstart/data/004743.jpg filter=lfs diff=lfs merge=lfs -text
|
| 154 |
+
quickstart/data/004755.jpg filter=lfs diff=lfs merge=lfs -text
|
| 155 |
+
quickstart/data/004775.jpg filter=lfs diff=lfs merge=lfs -text
|
| 156 |
+
quickstart/data/004781.jpg filter=lfs diff=lfs merge=lfs -text
|
| 157 |
+
quickstart/data/004831.jpg filter=lfs diff=lfs merge=lfs -text
|
| 158 |
+
quickstart/data/004852.jpg filter=lfs diff=lfs merge=lfs -text
|
| 159 |
+
quickstart/data/004939.jpg filter=lfs diff=lfs merge=lfs -text
|
| 160 |
+
quickstart/data/004978.jpg filter=lfs diff=lfs merge=lfs -text
|
LICENSE
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FAIR Noncommercial Research License
|
| 2 |
+
|
| 3 |
+
*v1 Last Updated: August 18, 2025*
|
| 4 |
+
|
| 5 |
+
**"Acceptable Use Policy"** means the FAIR Acceptable Use Policy, applicable to Research Materials, that is incorporated into this Agreement.
|
| 6 |
+
|
| 7 |
+
**"Agreement"** means the terms and conditions for use, reproduction, distribution and modification of the Research Materials set forth herein.
|
| 8 |
+
|
| 9 |
+
**"Documentation"** means the specifications, manuals and documentation accompanying
|
| 10 |
+
Research Materials distributed by Meta.
|
| 11 |
+
|
| 12 |
+
**"Licensee"** or **"you"** means you, or your employer or any other person or entity (if you are entering into this Agreement on such person or entity's behalf), of the age required under applicable laws, rules or regulations to provide legal consent and that has legal authority to bind your employer or such other person or entity if you are entering in this Agreement on their behalf.
|
| 13 |
+
|
| 14 |
+
**"Meta"** or **"we"** means Meta Platforms Ireland Limited (if you are located in or, if you are an entity, your principal place of business is in the EEA or Switzerland) and Meta Platforms, Inc. (if you are located outside of the EEA or Switzerland).
|
| 15 |
+
|
| 16 |
+
**"Noncommercial Research Uses"** means noncommercial research use cases related to research, development, education, processing, or analysis and in each case, is not primarily intended for commercial advantage or monetary compensation to you or others.
|
| 17 |
+
|
| 18 |
+
**"Research Materials"** means, collectively, Documentation and the models, software and algorithms, including machine-learning model code, trained model weights, inference-enabling code, training-enabling code, fine-tuning enabling code, demonstration materials and other elements of the foregoing distributed by Meta and made available under this Agreement.
|
| 19 |
+
|
| 20 |
+
By clicking "I Accept" below or by using or distributing any portion or element of the Research Materials, you agree to be bound by this Agreement.
|
| 21 |
+
|
| 22 |
+
## 1. License Rights and Redistribution.
|
| 23 |
+
|
| 24 |
+
a. <ins>Grant of Rights</ins>. You are granted a non-exclusive, worldwide, non-transferable and royalty-free limited license under Meta's intellectual property or other rights owned by Meta embodied in the Research Materials to use, reproduce, distribute, copy, create derivative works of, and make modifications to the Research Materials.
|
| 25 |
+
|
| 26 |
+
b. <ins>Redistribution and Use</ins>.
|
| 27 |
+
|
| 28 |
+
i. You will not use the Research Materials or any outputs or results of the Research Materials in connection with any commercial uses or for any uses other than Noncommercial Research Uses;
|
| 29 |
+
|
| 30 |
+
ii. Distribution of Research Materials, and any derivative works thereof, are subject to the terms of this Agreement. If you distribute or make the Research Materials, or any derivative works thereof, available to a third party, you may only do so under the terms of this Agreement. You shall also provide a copy of this Agreement to such third party.
|
| 31 |
+
|
| 32 |
+
iii. If you submit for publication the results of research you perform on, using, or otherwise in connection with Research Materials, you must acknowledge the use of Research Materials in your publication.
|
| 33 |
+
|
| 34 |
+
iv. Your use of the Research Materials must comply with applicable laws and regulations (including Trade Control Laws) and adhere to the FAIR Acceptable Use Policy, which is hereby incorporated by reference into this Agreement.
|
| 35 |
+
|
| 36 |
+
## 2. User Support.
|
| 37 |
+
|
| 38 |
+
Your Noncommercial Research Use of the Research Materials is done at your own discretion; Meta does not process any information nor provide any service in relation to such use. Meta is under no obligation to provide any support services for the Research Materials. Any support provided is "as is", "with all faults", and without warranty of any kind.
|
| 39 |
+
|
| 40 |
+
## 3. Disclaimer of Warranty.
|
| 41 |
+
|
| 42 |
+
UNLESS REQUIRED BY APPLICABLE LAW, THE RESEARCH MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, AND META DISCLAIMS ALL WARRANTIES OF ANY KIND, BOTH EXPRESS AND IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING THE RESEARCH MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE RESEARCH MATERIALS AND ANY OUTPUT AND RESULTS.
|
| 43 |
+
|
| 44 |
+
## 4. Limitation of Liability.
|
| 45 |
+
|
| 46 |
+
IN NO EVENT WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS AGREEMENT, FOR ANY LOST PROFITS OR ANY DIRECT OR INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF META OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF ANY OF THE FOREGOING.
|
| 47 |
+
|
| 48 |
+
## 5. Intellectual Property.
|
| 49 |
+
|
| 50 |
+
a. Subject to Meta's ownership of Research Materials and derivatives made by or for Meta, with respect to any derivative works and modifications of the Research Materials that are made by you, as between you and Meta, you are and will be the owner of such derivative works and modifications.
|
| 51 |
+
|
| 52 |
+
b. If you institute litigation or other proceedings against Meta or any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Research Materials, outputs or results, or any portion of any of the foregoing, constitutes infringement of intellectual property or other rights owned or licensable by you, then any licenses granted to you under this Agreement shall terminate as of the date such litigation or claim is filed or instituted. You will indemnify and hold harmless Meta from and against any claim by any third party arising out of or related to your use or distribution of the Research Materials.
|
| 53 |
+
|
| 54 |
+
## 6. Term and Termination.
|
| 55 |
+
|
| 56 |
+
The term of this Agreement will commence upon your acceptance of this Agreement or access to the Research Materials and will continue in full force and effect until terminated in accordance with the terms and conditions herein. Meta may terminate this Agreement if you are in breach of any term or condition of this Agreement. Upon termination of this Agreement, you shall delete and cease use of the Research Materials. Sections 3, 4 and 7 shall survive the termination of this Agreement.
|
| 57 |
+
|
| 58 |
+
## 7. Governing Law and Jurisdiction.
|
| 59 |
+
|
| 60 |
+
This Agreement will be governed and construed under the laws of the State of California without regard to choice of law principles, and the UN Convention on Contracts for the International Sale of Goods does not apply to this Agreement. The courts of California shall have exclusive jurisdiction of any dispute arising out of this Agreement.
|
| 61 |
+
|
| 62 |
+
## 8. Modifications and Amendments.
|
| 63 |
+
|
| 64 |
+
Meta may modify this Agreement from time to time; provided that they are similar in spirit to the current version of the Agreement, but may differ in detail to address new problems or concerns. All such changes will be effective immediately. Your continued use of the Research Materials after any modification to this Agreement constitutes your agreement to such modification. Except as provided in this Agreement, no modification or addition to any provision of this Agreement will be binding unless it is in writing and signed by an authorized representative of both you and Meta.
|
| 65 |
+
|
| 66 |
+
## FAIR Acceptable Use Policy
|
| 67 |
+
|
| 68 |
+
The Fundamental AI Research (FAIR) team at Meta seeks to further understanding of new and existing research domains with the mission of advancing the state-of-the-art in artificial intelligence through open research for the benefit of all.
|
| 69 |
+
|
| 70 |
+
As part of this mission, Meta makes certain research materials available for noncommercial research use. Meta is committed to promoting the safe and responsible use of such research materials.
|
| 71 |
+
|
| 72 |
+
### Prohibited Uses
|
| 73 |
+
|
| 74 |
+
You agree you will not use, or allow others to use, Research Materials to:
|
| 75 |
+
|
| 76 |
+
Violate the law or others' rights, including to:
|
| 77 |
+
Engage in, promote, generate, contribute to, encourage, plan, incite, or further illegal or unlawful activity or content, such as:
|
| 78 |
+
Violence or terrorism
|
| 79 |
+
Exploitation or harm to children, including the solicitation, creation, acquisition, or dissemination of child exploitative content or failure to report Child Sexual Abuse Material
|
| 80 |
+
Human trafficking, exploitation, and sexual violence
|
| 81 |
+
The illegal distribution of information or materials to minors, including obscene materials, or failure to employ legally required age-gating in connection with such information or materials.
|
| 82 |
+
Sexual solicitation
|
| 83 |
+
Any other criminal activity
|
| 84 |
+
|
| 85 |
+
Engage in, promote, incite, or facilitate the harassment, abuse, threatening, or bullying of individuals or groups of individuals
|
| 86 |
+
|
| 87 |
+
Engage in, promote, incite, or facilitate discrimination or other unlawful or harmful conduct in the provision of employment, employment benefits, credit, housing, other economic benefits, or other essential goods and services
|
| 88 |
+
|
| 89 |
+
Engage in the unauthorized or unlicensed practice of any profession including, but not limited to, financial, legal, medical/health, or related professional practices
|
| 90 |
+
|
| 91 |
+
Collect, process, disclose, generate, or infer health, demographic, or other sensitive personal or private information about individuals without rights and consents required by applicable laws
|
| 92 |
+
|
| 93 |
+
Engage in or facilitate any action or generate any content that infringes, misappropriates, or otherwise violates any third-party rights, including the outputs or results of any technology using FAIR research materials
|
| 94 |
+
|
| 95 |
+
Create, generate, or facilitate the creation of malicious code, malware, computer viruses or do anything else that could disable, overburden, interfere with or impair the proper working, integrity, operation or appearance of a website or computer system
|
| 96 |
+
|
| 97 |
+
2. Engage in, promote, incite, facilitate, or assist in the planning or development of activities that present a risk of death or bodily harm to individuals, including use of research artifacts related to the following:
|
| 98 |
+
|
| 99 |
+
Military, warfare, nuclear industries or applications, espionage, use for materials or activities that are subject to the International Traffic Arms Regulations (ITAR) maintained by the United States Department of State
|
| 100 |
+
|
| 101 |
+
Guns and illegal weapons (including weapon development)
|
| 102 |
+
|
| 103 |
+
Illegal drugs and regulated/controlled substances
|
| 104 |
+
|
| 105 |
+
Operation of critical infrastructure, transportation technologies, or heavy machinery
|
| 106 |
+
|
| 107 |
+
Self-harm or harm to others, including suicide, cutting, and eating disorders
|
| 108 |
+
|
| 109 |
+
Any content intended to incite or promote violence, abuse, or any infliction of bodily harm to an individual
|
| 110 |
+
|
| 111 |
+
3. Intentionally deceive or mislead others, including use of FAIR Research Materials related to the following:
|
| 112 |
+
|
| 113 |
+
Generating, promoting, or furthering fraud or the creation or promotion of disinformation
|
| 114 |
+
|
| 115 |
+
Generating, promoting, or furthering defamatory content, including the creation of defamatory statements, images, or other content
|
| 116 |
+
|
| 117 |
+
Generating, promoting, or further distributing spam
|
| 118 |
+
|
| 119 |
+
Impersonating another individual without consent, authorization, or legal right
|
| 120 |
+
|
| 121 |
+
Representing that outputs of FAIR research materials or outputs from technology using FAIR research materials are human-generated
|
| 122 |
+
|
| 123 |
+
Generating or facilitating false online engagement, including fake reviews and other means of fake online engagement
|
| 124 |
+
|
| 125 |
+
4. Fail to appropriately disclose to end users any known dangers of your Research Materials.
|
| 126 |
+
|
| 127 |
+
Please report any violation of this Policy or other problems that could lead to a violation of this Policy by submitting a report [here](https://docs.google.com/forms/d/e/1FAIpQLSeb11cryAopJ7LNrC4nxEUXrHY26hfkXQMf_uH-oFgA3WlYZQ/viewform).
|
README.md
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: other
|
| 3 |
+
license_name: fair-research-license
|
| 4 |
+
license_link: LICENSE
|
| 5 |
+
tags:
|
| 6 |
+
- multi-task-perception
|
| 7 |
+
- computer-vision
|
| 8 |
+
- image-classification
|
| 9 |
+
- semantic-segmentation
|
| 10 |
+
- depth-estimation
|
| 11 |
+
- keypoint-correspondence
|
| 12 |
+
- vision-transformer
|
| 13 |
+
library_name: pytorch
|
| 14 |
+
datasets:
|
| 15 |
+
- imagenet-1k
|
| 16 |
+
- scene_parse_150
|
| 17 |
+
- sayakpaul/nyu_depth_v2
|
| 18 |
+
metrics:
|
| 19 |
+
- accuracy
|
| 20 |
+
---
|
| 21 |
+
|
| 22 |
+
# Argus
|
| 23 |
+
|
| 24 |
+
Argus is a multi-task perception system built on a single compact vision backbone. From one forward pass through the encoder, the model produces classification labels, semantic segmentation masks, metric depth maps, and dense keypoint correspondences, thereby collapsing four domain-specific pipelines into a unified package of roughly 86 million parameters. The system is named after Argus Panoptes, the many-eyed giant of Greek mythology who was tasked by Hera with watching over everything at once.
|
| 25 |
+
|
| 26 |
+
The underlying backbone is EUPE-ViT-B, which was introduced in *Efficient Universal Perception Encoder* (Zhu et al., Meta FAIR, arXiv:2603.22387, March 2026). That paper demonstrates that a small vision encoder can be distilled from a collection of larger specialist teachers, yielding features that transfer well to image understanding, dense prediction, and vision–language tasks simultaneously. Argus takes the released EUPE-ViT-B backbone, leaves its weights frozen, and attaches four lightweight task heads that were trained or constructed independently for this project.
|
| 27 |
+
|
| 28 |
+
## Architecture
|
| 29 |
+
|
| 30 |
+
```
|
| 31 |
+
Image → EUPE-ViT-B (frozen, 86M parameters) → shared features
|
| 32 |
+
|
| 33 |
+
├── Classification — kNN over 1000 class prototypes
|
| 34 |
+
├── Segmentation — linear head, 150 ADE20K classes
|
| 35 |
+
├── Depth — linear head, 256 bins, trained on NYU
|
| 36 |
+
└── Correspondence — training-free dense feature matching
|
| 37 |
+
```
|
| 38 |
+
|
| 39 |
+
The segmentation and depth heads each consist of a BatchNorm layer followed by a single 1×1 convolution, and they were trained with the backbone held frozen throughout. Classification is performed by extracting the backbone's CLS token, normalizing it, and computing cosine similarity against a precomputed matrix of 1000 class prototypes that were built from the full ImageNet-1k training set. Keypoint correspondence requires no trained parameters at all: source and target features are extracted from two images, upsampled to pixel resolution, and matched by cosine similarity at each source keypoint.
|
| 40 |
+
|
| 41 |
+
Argus does not perform object detection. The four tasks above are what the model was built to do, and they are the scope in which its behavior has been validated. Detection would require a trained detection head on top of the backbone, which is out of scope for this release.
|
| 42 |
+
|
| 43 |
+
## Reproduction of the EUPE Paper
|
| 44 |
+
|
| 45 |
+
All four of the paper's reported benchmarks were reproduced as part of building Argus, and the results either matched the published numbers within rounding error or exceeded them modestly.
|
| 46 |
+
|
| 47 |
+
| Task | Dataset | Metric | Paper | Argus | Delta |
|
| 48 |
+
|----------------|--------------|------------------------|-------|--------|---------|
|
| 49 |
+
| Classification | ImageNet-1k | kNN k=10 top-1 | 84.1 | 84.07 | −0.03 |
|
| 50 |
+
| Segmentation | ADE20K | mean IoU | 52.4 | 52.72 | +0.32 |
|
| 51 |
+
| Depth | NYU Depth v2 | RMSE (lower is better) | 0.391 | 0.3914 | +0.0004 |
|
| 52 |
+
| Correspondence | SPair-71k | PCK@0.1 | 51.3 | 54.35 | +3.05 |
|
| 53 |
+
|
| 54 |
+
The classification evaluation used the full 1.28-million-image ImageNet-1k training set as the kNN reference and the 50,000-image validation set as the query. The segmentation and depth heads were trained using the same linear-probe configurations described in the EUPE repository. Correspondence was evaluated on the SPair-71k test split at 512-pixel resolution across all 12,234 test pairs, for a total of 88,328 keypoints, with no failures during the run.
|
| 55 |
+
|
| 56 |
+
## Comparison with Standard Baselines
|
| 57 |
+
|
| 58 |
+
As a sanity check, Argus was compared against several well-known models on the same 200-image COCO subset. The classification comparison uses a keyword cross-reference between each model's top-k ImageNet predictions and the COCO ground-truth detection labels on those images, which provides a consistent yardstick across differently-trained models despite the label-space mismatch. **These hit rates measure agreement with COCO detection labels via keyword matching on the 200-image subset; they are not raw ImageNet accuracy.** For reference, all three classifiers exceed 80% top-1 on the full ImageNet validation set.
|
| 59 |
+
|
| 60 |
+
**Classification** (hit rate against COCO detection labels, 200 images):
|
| 61 |
+
|
| 62 |
+
| Model | Parameters | Top-1 hit | Top-5 hit | Latency | Peak VRAM |
|
| 63 |
+
|--------------------|------------|-----------|-----------|---------|-----------|
|
| 64 |
+
| Argus (EUPE-ViT-B) | 86 M | 42.2% | 66.8% | 13.1 ms | 0.34 GB |
|
| 65 |
+
| ConvNeXt-Base | 89 M | 40.2% | 71.4% | 10.4 ms | 0.35 GB |
|
| 66 |
+
| ResNet50 | 26 M | 36.2% | 61.8% | 8.4 ms | 0.12 GB |
|
| 67 |
+
|
| 68 |
+
**Segmentation**:
|
| 69 |
+
|
| 70 |
+
| Model | Parameters | Classes | Latency | Peak VRAM |
|
| 71 |
+
|----------------------------|------------|---------|---------|-----------|
|
| 72 |
+
| Argus (EUPE + linear head) | 86 M | 150 | 11.8 ms | 0.41 GB |
|
| 73 |
+
| DeepLabV3-ResNet50 | 42 M | 21 | 15.9 ms | 0.33 GB |
|
| 74 |
+
|
| 75 |
+
**Depth**:
|
| 76 |
+
|
| 77 |
+
| Model | Parameters | Latency | Peak VRAM |
|
| 78 |
+
|----------------------------|------------|---------|-----------|
|
| 79 |
+
| Argus (EUPE + linear head) | 86 M | 13.3 ms | 0.35 GB |
|
| 80 |
+
| Depth-Anything-V2-Base | 98 M | 18.8 ms | 0.68 GB |
|
| 81 |
+
|
| 82 |
+
Argus produces the top-1 classification accuracy of the three image classifiers, with ConvNeXt-Base edging it slightly on top-5 (which is characteristic of trained softmax heads relative to kNN). It is faster than DeepLabV3 while predicting a much richer label space, and it is faster than Depth-Anything-V2 while using roughly half the VRAM. Although these baselines and Argus were trained for different objectives on different datasets, the comparison is useful for understanding what the model delivers in practice.
|
| 83 |
+
|
| 84 |
+
## Usage
|
| 85 |
+
|
| 86 |
+
```python
|
| 87 |
+
from PIL import Image
|
| 88 |
+
from transformers import AutoModel
|
| 89 |
+
|
| 90 |
+
model = AutoModel.from_pretrained("phanerozoic/argus", trust_remote_code=True)
|
| 91 |
+
|
| 92 |
+
image = Image.open("your_image.jpg").convert("RGB")
|
| 93 |
+
|
| 94 |
+
# Any single task can be called directly:
|
| 95 |
+
top5 = model.classify(image, top_k=5)
|
| 96 |
+
seg = model.segment(image) # returns [H, W] class indices
|
| 97 |
+
depth = model.depth(image) # returns [H, W] metric depth in meters
|
| 98 |
+
|
| 99 |
+
# Or all three can be run at once in a single call:
|
| 100 |
+
result = model.perceive(image)
|
| 101 |
+
# result["classification"] — list of top-5 {"class_id", "class_name", "score"}
|
| 102 |
+
# result["segmentation"] — numpy array of ADE20K class indices
|
| 103 |
+
# result["depth"] — numpy array of depth values in meters
|
| 104 |
+
# result["timings_ms"] — per-task latency breakdown
|
| 105 |
+
|
| 106 |
+
# Keypoint correspondence requires two images and a set of source points:
|
| 107 |
+
target = Image.open("other_image.jpg").convert("RGB")
|
| 108 |
+
src_points = [[100, 100], [200, 200]]
|
| 109 |
+
predicted_target_points = model.correspond(image, target, src_points)
|
| 110 |
+
```
|
| 111 |
+
|
| 112 |
+
The model uses HuggingFace's custom-code mechanism (`trust_remote_code=True`),
|
| 113 |
+
so the loader code is fetched from the model repo automatically. No additional
|
| 114 |
+
files need to be cloned.
|
| 115 |
+
|
| 116 |
+
## Training Data and Procedure
|
| 117 |
+
|
| 118 |
+
| Component | Source dataset | Trained by |
|
| 119 |
+
|---------------------|----------------------------------------------------------|------------------------------------------------------------|
|
| 120 |
+
| EUPE-ViT-B backbone | LVD-1689M (approximately 1.7 billion web images) | Meta FAIR (used here frozen, never retrained) |
|
| 121 |
+
| Segmentation head | ADE20K (20,210 training images, 2,000 validation images) | This repository — 40,000 iterations of linear-probe training |
|
| 122 |
+
| Depth head | NYU Depth v2 (24,231 training images) | This repository — 38,400 iterations of linear-probe training |
|
| 123 |
+
| Class prototypes | ImageNet-1k (1.28 million training images) | This repository — mean CLS feature per class |
|
| 124 |
+
| Correspondence | None (training-free) | — |
|
| 125 |
+
|
| 126 |
+
The segmentation and depth heads each contain fewer than 150,000 trainable parameters. Training used the EUPE repository's official linear-probe configurations with the backbone weights held strictly frozen, which matches the protocol used in the EUPE paper. The class prototypes were produced by running the frozen backbone over the full ImageNet-1k training set at 224×224 resolution, computing the mean L2-normalized CLS feature per class, and saving the resulting 1000×768 matrix alongside the backbone and task heads.
|
| 127 |
+
|
| 128 |
+
## Notes
|
| 129 |
+
|
| 130 |
+
The segmentation head was trained on ADE20K's 150-class indoor-and-urban label space, which does not align directly with COCO or other detection benchmarks. The depth head was trained on NYU Depth v2 and is indoor-biased; outdoor metric depth should be treated as approximate. Classification uses kNN over class prototypes rather than a trained softmax head, which produces more decisive top-1 predictions but flatter top-k distributions.
|
| 131 |
+
|
| 132 |
+
## License
|
| 133 |
+
|
| 134 |
+
The EUPE-ViT-B backbone weights inside this checkpoint were released by Meta FAIR under the [FAIR Research License](https://huggingface.co/facebook/EUPE-ViT-B/blob/main/LICENSE), which restricts use to non-commercial research and education. The task heads and class prototypes in this checkpoint were trained independently by the author of this repository and would on their own be releasable under a permissive license. However, because they are inseparably bundled with the backbone weights in a single file, the unified checkpoint inherits the more restrictive license of its most restricted component. In practical terms, the entire `argus.pt` file should be treated as released under the FAIR Research License. See `LICENSE` for the full text.
|
| 135 |
+
|
| 136 |
+
## Citation
|
| 137 |
+
|
| 138 |
+
If you use Argus or the underlying EUPE backbone in academic work, please cite the original paper:
|
| 139 |
+
|
| 140 |
+
```bibtex
|
| 141 |
+
@misc{zhu2026eupe,
|
| 142 |
+
title={Efficient Universal Perception Encoder},
|
| 143 |
+
author={Zhu, Chenchen and Suri, Saksham and Jose, Cijo and Oquab, Maxime and Szafraniec, Marc and Wen, Wei and Xiong, Yunyang and Labatut, Patrick and Bojanowski, Piotr and Krishnamoorthi, Raghuraman and Chandra, Vikas},
|
| 144 |
+
year={2026},
|
| 145 |
+
eprint={2603.22387},
|
| 146 |
+
archivePrefix={arXiv},
|
| 147 |
+
primaryClass={cs.CV}
|
| 148 |
+
}
|
| 149 |
+
```
|
| 150 |
+
|
| 151 |
+
## Acknowledgements
|
| 152 |
+
|
| 153 |
+
The EUPE backbone was trained and released by Meta FAIR. The dataset loading utilities are from the DINOv3 repository. The Argus task heads, benchmarks, and packaging were done by [phanerozoic](https://huggingface.co/phanerozoic).
|
argus.py
ADDED
|
@@ -0,0 +1,1017 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Argus: multi-task perception on a single EUPE-ViT-B backbone.
|
| 3 |
+
|
| 4 |
+
from transformers import AutoModel
|
| 5 |
+
model = AutoModel.from_pretrained("phanerozoic/argus", trust_remote_code=True)
|
| 6 |
+
result = model.perceive(image)
|
| 7 |
+
|
| 8 |
+
The EUPE-ViT-B backbone architecture, all supporting layers, and the Argus
|
| 9 |
+
task heads are inlined below. The backbone code is reproduced from
|
| 10 |
+
facebookresearch/EUPE (Meta FAIR) under the FAIR Research License.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import math
|
| 14 |
+
import time
|
| 15 |
+
from functools import partial
|
| 16 |
+
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union
|
| 17 |
+
|
| 18 |
+
import numpy as np
|
| 19 |
+
import torch
|
| 20 |
+
import torch.nn.functional as F
|
| 21 |
+
import torch.nn.init
|
| 22 |
+
from PIL import Image
|
| 23 |
+
from torch import Tensor, nn
|
| 24 |
+
from torchvision.transforms import v2
|
| 25 |
+
from transformers import PretrainedConfig, PreTrainedModel
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
# ===========================================================================
|
| 29 |
+
# EUPE backbone — vendored verbatim from facebookresearch/EUPE
|
| 30 |
+
# ===========================================================================
|
| 31 |
+
|
| 32 |
+
# ---------- utility helpers (from eupe/utils/utils.py) ---------------------
|
| 33 |
+
|
| 34 |
+
def cat_keep_shapes(x_list: List[Tensor]) -> Tuple[Tensor, List[Tuple[int]], List[int]]:
|
| 35 |
+
shapes = [x.shape for x in x_list]
|
| 36 |
+
num_tokens = [x.select(dim=-1, index=0).numel() for x in x_list]
|
| 37 |
+
flattened = torch.cat([x.flatten(0, -2) for x in x_list])
|
| 38 |
+
return flattened, shapes, num_tokens
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def uncat_with_shapes(flattened: Tensor, shapes: List[Tuple[int]], num_tokens: List[int]) -> List[Tensor]:
|
| 42 |
+
outputs_splitted = torch.split_with_sizes(flattened, num_tokens, dim=0)
|
| 43 |
+
shapes_adjusted = [shape[:-1] + torch.Size([flattened.shape[-1]]) for shape in shapes]
|
| 44 |
+
outputs_reshaped = [o.reshape(shape) for o, shape in zip(outputs_splitted, shapes_adjusted)]
|
| 45 |
+
return outputs_reshaped
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def named_apply(
|
| 49 |
+
fn: Callable,
|
| 50 |
+
module: nn.Module,
|
| 51 |
+
name: str = "",
|
| 52 |
+
depth_first: bool = True,
|
| 53 |
+
include_root: bool = False,
|
| 54 |
+
) -> nn.Module:
|
| 55 |
+
if not depth_first and include_root:
|
| 56 |
+
fn(module=module, name=name)
|
| 57 |
+
for child_name, child_module in module.named_children():
|
| 58 |
+
child_name = ".".join((name, child_name)) if name else child_name
|
| 59 |
+
named_apply(
|
| 60 |
+
fn=fn,
|
| 61 |
+
module=child_module,
|
| 62 |
+
name=child_name,
|
| 63 |
+
depth_first=depth_first,
|
| 64 |
+
include_root=True,
|
| 65 |
+
)
|
| 66 |
+
if depth_first and include_root:
|
| 67 |
+
fn(module=module, name=name)
|
| 68 |
+
return module
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# ---------- RMSNorm (from eupe/layers/rms_norm.py) -------------------------
|
| 72 |
+
|
| 73 |
+
class RMSNorm(nn.Module):
|
| 74 |
+
def __init__(self, dim: int, eps: float = 1e-5):
|
| 75 |
+
super().__init__()
|
| 76 |
+
self.weight = nn.Parameter(torch.ones(dim))
|
| 77 |
+
self.eps = eps
|
| 78 |
+
|
| 79 |
+
def reset_parameters(self) -> None:
|
| 80 |
+
nn.init.constant_(self.weight, 1)
|
| 81 |
+
|
| 82 |
+
def _norm(self, x: Tensor) -> Tensor:
|
| 83 |
+
return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps)
|
| 84 |
+
|
| 85 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 86 |
+
output = self._norm(x.float()).type_as(x)
|
| 87 |
+
return output * self.weight
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
# ---------- LayerScale (from eupe/layers/layer_scale.py) -------------------
|
| 91 |
+
|
| 92 |
+
class LayerScale(nn.Module):
|
| 93 |
+
def __init__(
|
| 94 |
+
self,
|
| 95 |
+
dim: int,
|
| 96 |
+
init_values: Union[float, Tensor] = 1e-5,
|
| 97 |
+
inplace: bool = False,
|
| 98 |
+
device=None,
|
| 99 |
+
) -> None:
|
| 100 |
+
super().__init__()
|
| 101 |
+
self.inplace = inplace
|
| 102 |
+
self.gamma = nn.Parameter(torch.empty(dim, device=device))
|
| 103 |
+
self.init_values = init_values
|
| 104 |
+
|
| 105 |
+
def reset_parameters(self):
|
| 106 |
+
nn.init.constant_(self.gamma, self.init_values)
|
| 107 |
+
|
| 108 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 109 |
+
return x.mul_(self.gamma) if self.inplace else x * self.gamma
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
# ---------- PatchEmbed (from eupe/layers/patch_embed.py) -------------------
|
| 113 |
+
|
| 114 |
+
def make_2tuple(x):
|
| 115 |
+
if isinstance(x, tuple):
|
| 116 |
+
assert len(x) == 2
|
| 117 |
+
return x
|
| 118 |
+
assert isinstance(x, int)
|
| 119 |
+
return (x, x)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class PatchEmbed(nn.Module):
|
| 123 |
+
def __init__(
|
| 124 |
+
self,
|
| 125 |
+
img_size: Union[int, Tuple[int, int]] = 224,
|
| 126 |
+
patch_size: Union[int, Tuple[int, int]] = 16,
|
| 127 |
+
in_chans: int = 3,
|
| 128 |
+
embed_dim: int = 768,
|
| 129 |
+
norm_layer: Optional[Callable] = None,
|
| 130 |
+
flatten_embedding: bool = True,
|
| 131 |
+
) -> None:
|
| 132 |
+
super().__init__()
|
| 133 |
+
image_HW = make_2tuple(img_size)
|
| 134 |
+
patch_HW = make_2tuple(patch_size)
|
| 135 |
+
patch_grid_size = (image_HW[0] // patch_HW[0], image_HW[1] // patch_HW[1])
|
| 136 |
+
|
| 137 |
+
self.img_size = image_HW
|
| 138 |
+
self.patch_size = patch_HW
|
| 139 |
+
self.patches_resolution = patch_grid_size
|
| 140 |
+
self.num_patches = patch_grid_size[0] * patch_grid_size[1]
|
| 141 |
+
self.in_chans = in_chans
|
| 142 |
+
self.embed_dim = embed_dim
|
| 143 |
+
self.flatten_embedding = flatten_embedding
|
| 144 |
+
|
| 145 |
+
self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_HW, stride=patch_HW)
|
| 146 |
+
self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity()
|
| 147 |
+
|
| 148 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 149 |
+
_, _, H, W = x.shape
|
| 150 |
+
x = self.proj(x)
|
| 151 |
+
H, W = x.size(2), x.size(3)
|
| 152 |
+
x = x.flatten(2).transpose(1, 2)
|
| 153 |
+
x = self.norm(x)
|
| 154 |
+
if not self.flatten_embedding:
|
| 155 |
+
x = x.reshape(-1, H, W, self.embed_dim)
|
| 156 |
+
return x
|
| 157 |
+
|
| 158 |
+
def reset_parameters(self):
|
| 159 |
+
k = 1 / (self.in_chans * (self.patch_size[0] ** 2))
|
| 160 |
+
nn.init.uniform_(self.proj.weight, -math.sqrt(k), math.sqrt(k))
|
| 161 |
+
if self.proj.bias is not None:
|
| 162 |
+
nn.init.uniform_(self.proj.bias, -math.sqrt(k), math.sqrt(k))
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
# ---------- RoPE (from eupe/layers/rope_position_encoding.py) --------------
|
| 166 |
+
|
| 167 |
+
class RopePositionEmbedding(nn.Module):
|
| 168 |
+
def __init__(
|
| 169 |
+
self,
|
| 170 |
+
embed_dim: int,
|
| 171 |
+
*,
|
| 172 |
+
num_heads: int,
|
| 173 |
+
base: Optional[float] = 100.0,
|
| 174 |
+
min_period: Optional[float] = None,
|
| 175 |
+
max_period: Optional[float] = None,
|
| 176 |
+
normalize_coords: Literal["min", "max", "separate"] = "separate",
|
| 177 |
+
shift_coords: Optional[float] = None,
|
| 178 |
+
jitter_coords: Optional[float] = None,
|
| 179 |
+
rescale_coords: Optional[float] = None,
|
| 180 |
+
dtype: Optional[torch.dtype] = None,
|
| 181 |
+
device: Optional[torch.device] = None,
|
| 182 |
+
):
|
| 183 |
+
super().__init__()
|
| 184 |
+
assert embed_dim % (4 * num_heads) == 0
|
| 185 |
+
both_periods = min_period is not None and max_period is not None
|
| 186 |
+
if (base is None and not both_periods) or (base is not None and both_periods):
|
| 187 |
+
raise ValueError("Either `base` or `min_period`+`max_period` must be provided.")
|
| 188 |
+
|
| 189 |
+
D_head = embed_dim // num_heads
|
| 190 |
+
self.base = base
|
| 191 |
+
self.min_period = min_period
|
| 192 |
+
self.max_period = max_period
|
| 193 |
+
self.D_head = D_head
|
| 194 |
+
self.normalize_coords = normalize_coords
|
| 195 |
+
self.shift_coords = shift_coords
|
| 196 |
+
self.jitter_coords = jitter_coords
|
| 197 |
+
self.rescale_coords = rescale_coords
|
| 198 |
+
|
| 199 |
+
self.dtype = dtype
|
| 200 |
+
self.register_buffer(
|
| 201 |
+
"periods",
|
| 202 |
+
torch.empty(D_head // 4, device=device, dtype=dtype),
|
| 203 |
+
persistent=True,
|
| 204 |
+
)
|
| 205 |
+
self._init_weights()
|
| 206 |
+
|
| 207 |
+
def forward(self, *, H: int, W: int) -> Tuple[Tensor, Tensor]:
|
| 208 |
+
device = self.periods.device
|
| 209 |
+
dtype = self.dtype
|
| 210 |
+
dd = {"device": device, "dtype": dtype}
|
| 211 |
+
|
| 212 |
+
if self.normalize_coords == "max":
|
| 213 |
+
max_HW = max(H, W)
|
| 214 |
+
coords_h = torch.arange(0.5, H, **dd) / max_HW
|
| 215 |
+
coords_w = torch.arange(0.5, W, **dd) / max_HW
|
| 216 |
+
elif self.normalize_coords == "min":
|
| 217 |
+
min_HW = min(H, W)
|
| 218 |
+
coords_h = torch.arange(0.5, H, **dd) / min_HW
|
| 219 |
+
coords_w = torch.arange(0.5, W, **dd) / min_HW
|
| 220 |
+
elif self.normalize_coords == "separate":
|
| 221 |
+
coords_h = torch.arange(0.5, H, **dd) / H
|
| 222 |
+
coords_w = torch.arange(0.5, W, **dd) / W
|
| 223 |
+
else:
|
| 224 |
+
raise ValueError(f"Unknown normalize_coords: {self.normalize_coords}")
|
| 225 |
+
coords = torch.stack(torch.meshgrid(coords_h, coords_w, indexing="ij"), dim=-1)
|
| 226 |
+
coords = coords.flatten(0, 1)
|
| 227 |
+
coords = 2.0 * coords - 1.0
|
| 228 |
+
|
| 229 |
+
if self.training and self.shift_coords is not None:
|
| 230 |
+
shift_hw = torch.empty(2, **dd).uniform_(-self.shift_coords, self.shift_coords)
|
| 231 |
+
coords += shift_hw[None, :]
|
| 232 |
+
|
| 233 |
+
if self.training and self.jitter_coords is not None:
|
| 234 |
+
jitter_max = np.log(self.jitter_coords)
|
| 235 |
+
jitter_min = -jitter_max
|
| 236 |
+
jitter_hw = torch.empty(2, **dd).uniform_(jitter_min, jitter_max).exp()
|
| 237 |
+
coords *= jitter_hw[None, :]
|
| 238 |
+
|
| 239 |
+
if self.training and self.rescale_coords is not None:
|
| 240 |
+
rescale_max = np.log(self.rescale_coords)
|
| 241 |
+
rescale_min = -rescale_max
|
| 242 |
+
rescale_hw = torch.empty(1, **dd).uniform_(rescale_min, rescale_max).exp()
|
| 243 |
+
coords *= rescale_hw
|
| 244 |
+
|
| 245 |
+
angles = 2 * math.pi * coords[:, :, None] / self.periods[None, None, :]
|
| 246 |
+
angles = angles.flatten(1, 2)
|
| 247 |
+
angles = angles.tile(2)
|
| 248 |
+
cos = torch.cos(angles)
|
| 249 |
+
sin = torch.sin(angles)
|
| 250 |
+
return (sin, cos)
|
| 251 |
+
|
| 252 |
+
def _init_weights(self):
|
| 253 |
+
device = self.periods.device
|
| 254 |
+
dtype = self.dtype
|
| 255 |
+
if self.base is not None:
|
| 256 |
+
periods = self.base ** (
|
| 257 |
+
2 * torch.arange(self.D_head // 4, device=device, dtype=dtype) / (self.D_head // 2)
|
| 258 |
+
)
|
| 259 |
+
else:
|
| 260 |
+
base = self.max_period / self.min_period
|
| 261 |
+
exponents = torch.linspace(0, 1, self.D_head // 4, device=device, dtype=dtype)
|
| 262 |
+
periods = base ** exponents
|
| 263 |
+
periods = periods / base
|
| 264 |
+
periods = periods * self.max_period
|
| 265 |
+
self.periods.data = periods
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
# ---------- FFN layers (from eupe/layers/ffn_layers.py) --------------------
|
| 269 |
+
|
| 270 |
+
class ListForwardMixin(object):
|
| 271 |
+
def forward(self, x: Tensor):
|
| 272 |
+
raise NotImplementedError
|
| 273 |
+
|
| 274 |
+
def forward_list(self, x_list: List[Tensor]) -> List[Tensor]:
|
| 275 |
+
x_flat, shapes, num_tokens = cat_keep_shapes(x_list)
|
| 276 |
+
x_flat = self.forward(x_flat)
|
| 277 |
+
return uncat_with_shapes(x_flat, shapes, num_tokens)
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
class Mlp(nn.Module, ListForwardMixin):
|
| 281 |
+
def __init__(
|
| 282 |
+
self,
|
| 283 |
+
in_features: int,
|
| 284 |
+
hidden_features: Optional[int] = None,
|
| 285 |
+
out_features: Optional[int] = None,
|
| 286 |
+
act_layer: Callable[..., nn.Module] = nn.GELU,
|
| 287 |
+
drop: float = 0.0,
|
| 288 |
+
bias: bool = True,
|
| 289 |
+
device=None,
|
| 290 |
+
) -> None:
|
| 291 |
+
super().__init__()
|
| 292 |
+
out_features = out_features or in_features
|
| 293 |
+
hidden_features = hidden_features or in_features
|
| 294 |
+
self.fc1 = nn.Linear(in_features, hidden_features, bias=bias, device=device)
|
| 295 |
+
self.act = act_layer()
|
| 296 |
+
self.fc2 = nn.Linear(hidden_features, out_features, bias=bias, device=device)
|
| 297 |
+
self.drop = nn.Dropout(drop)
|
| 298 |
+
|
| 299 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 300 |
+
x = self.fc1(x)
|
| 301 |
+
x = self.act(x)
|
| 302 |
+
x = self.drop(x)
|
| 303 |
+
x = self.fc2(x)
|
| 304 |
+
x = self.drop(x)
|
| 305 |
+
return x
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
class SwiGLUFFN(nn.Module, ListForwardMixin):
|
| 309 |
+
def __init__(
|
| 310 |
+
self,
|
| 311 |
+
in_features: int,
|
| 312 |
+
hidden_features: Optional[int] = None,
|
| 313 |
+
out_features: Optional[int] = None,
|
| 314 |
+
act_layer: Optional[Callable[..., nn.Module]] = None,
|
| 315 |
+
drop: float = 0.0,
|
| 316 |
+
bias: bool = True,
|
| 317 |
+
align_to: int = 8,
|
| 318 |
+
device=None,
|
| 319 |
+
) -> None:
|
| 320 |
+
super().__init__()
|
| 321 |
+
out_features = out_features or in_features
|
| 322 |
+
hidden_features = hidden_features or in_features
|
| 323 |
+
d = int(hidden_features * 2 / 3)
|
| 324 |
+
swiglu_hidden_features = d + (-d % align_to)
|
| 325 |
+
self.w1 = nn.Linear(in_features, swiglu_hidden_features, bias=bias, device=device)
|
| 326 |
+
self.w2 = nn.Linear(in_features, swiglu_hidden_features, bias=bias, device=device)
|
| 327 |
+
self.w3 = nn.Linear(swiglu_hidden_features, out_features, bias=bias, device=device)
|
| 328 |
+
|
| 329 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 330 |
+
x1 = self.w1(x)
|
| 331 |
+
x2 = self.w2(x)
|
| 332 |
+
hidden = F.silu(x1) * x2
|
| 333 |
+
return self.w3(hidden)
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
# ---------- Attention (from eupe/layers/attention.py) ----------------------
|
| 337 |
+
|
| 338 |
+
def rope_rotate_half(x: Tensor) -> Tensor:
|
| 339 |
+
x1, x2 = x.chunk(2, dim=-1)
|
| 340 |
+
return torch.cat([-x2, x1], dim=-1)
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def rope_apply(x: Tensor, sin: Tensor, cos: Tensor) -> Tensor:
|
| 344 |
+
return (x * cos) + (rope_rotate_half(x) * sin)
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
class LinearKMaskedBias(nn.Linear):
|
| 348 |
+
def __init__(self, *args, **kwargs):
|
| 349 |
+
super().__init__(*args, **kwargs)
|
| 350 |
+
o = self.out_features
|
| 351 |
+
assert o % 3 == 0
|
| 352 |
+
if self.bias is not None:
|
| 353 |
+
self.register_buffer("bias_mask", torch.full_like(self.bias, fill_value=math.nan))
|
| 354 |
+
|
| 355 |
+
def forward(self, input: Tensor) -> Tensor:
|
| 356 |
+
masked_bias = self.bias * self.bias_mask.to(self.bias.dtype) if self.bias is not None else None
|
| 357 |
+
return F.linear(input, self.weight, masked_bias)
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
class SelfAttention(nn.Module):
|
| 361 |
+
def __init__(
|
| 362 |
+
self,
|
| 363 |
+
dim: int,
|
| 364 |
+
num_heads: int = 8,
|
| 365 |
+
qkv_bias: bool = False,
|
| 366 |
+
proj_bias: bool = True,
|
| 367 |
+
attn_drop: float = 0.0,
|
| 368 |
+
proj_drop: float = 0.0,
|
| 369 |
+
mask_k_bias: bool = False,
|
| 370 |
+
device=None,
|
| 371 |
+
) -> None:
|
| 372 |
+
super().__init__()
|
| 373 |
+
self.num_heads = num_heads
|
| 374 |
+
head_dim = dim // num_heads
|
| 375 |
+
self.scale = head_dim ** -0.5
|
| 376 |
+
|
| 377 |
+
linear_class = LinearKMaskedBias if mask_k_bias else nn.Linear
|
| 378 |
+
self.qkv = linear_class(dim, dim * 3, bias=qkv_bias, device=device)
|
| 379 |
+
self.attn_drop = nn.Dropout(attn_drop)
|
| 380 |
+
self.proj = nn.Linear(dim, dim, bias=proj_bias, device=device)
|
| 381 |
+
self.proj_drop = nn.Dropout(proj_drop)
|
| 382 |
+
|
| 383 |
+
def apply_rope(self, q: Tensor, k: Tensor, rope) -> Tuple[Tensor, Tensor]:
|
| 384 |
+
q_dtype = q.dtype
|
| 385 |
+
k_dtype = k.dtype
|
| 386 |
+
sin, cos = rope
|
| 387 |
+
rope_dtype = sin.dtype
|
| 388 |
+
q = q.to(dtype=rope_dtype)
|
| 389 |
+
k = k.to(dtype=rope_dtype)
|
| 390 |
+
N = q.shape[-2]
|
| 391 |
+
prefix = N - sin.shape[-2]
|
| 392 |
+
assert prefix >= 0
|
| 393 |
+
q_prefix = q[:, :, :prefix, :]
|
| 394 |
+
q = rope_apply(q[:, :, prefix:, :], sin, cos)
|
| 395 |
+
q = torch.cat((q_prefix, q), dim=-2)
|
| 396 |
+
k_prefix = k[:, :, :prefix, :]
|
| 397 |
+
k = rope_apply(k[:, :, prefix:, :], sin, cos)
|
| 398 |
+
k = torch.cat((k_prefix, k), dim=-2)
|
| 399 |
+
q = q.to(dtype=q_dtype)
|
| 400 |
+
k = k.to(dtype=k_dtype)
|
| 401 |
+
return q, k
|
| 402 |
+
|
| 403 |
+
def forward(self, x: Tensor, attn_bias=None, rope=None) -> Tensor:
|
| 404 |
+
qkv = self.qkv(x)
|
| 405 |
+
attn_v = self.compute_attention(qkv=qkv, attn_bias=attn_bias, rope=rope)
|
| 406 |
+
x = self.proj(attn_v)
|
| 407 |
+
x = self.proj_drop(x)
|
| 408 |
+
return x
|
| 409 |
+
|
| 410 |
+
def forward_list(self, x_list, attn_bias=None, rope_list=None) -> List[Tensor]:
|
| 411 |
+
assert len(x_list) == len(rope_list)
|
| 412 |
+
x_flat, shapes, num_tokens = cat_keep_shapes(x_list)
|
| 413 |
+
qkv_flat = self.qkv(x_flat)
|
| 414 |
+
qkv_list = uncat_with_shapes(qkv_flat, shapes, num_tokens)
|
| 415 |
+
att_out = []
|
| 416 |
+
for _, (qkv, _, rope) in enumerate(zip(qkv_list, shapes, rope_list)):
|
| 417 |
+
att_out.append(self.compute_attention(qkv, attn_bias=attn_bias, rope=rope))
|
| 418 |
+
x_flat, shapes, num_tokens = cat_keep_shapes(att_out)
|
| 419 |
+
x_flat = self.proj(x_flat)
|
| 420 |
+
return uncat_with_shapes(x_flat, shapes, num_tokens)
|
| 421 |
+
|
| 422 |
+
def compute_attention(self, qkv: Tensor, attn_bias=None, rope=None) -> Tensor:
|
| 423 |
+
assert attn_bias is None
|
| 424 |
+
B, N, _ = qkv.shape
|
| 425 |
+
C = self.qkv.in_features
|
| 426 |
+
qkv = qkv.reshape(B, N, 3, self.num_heads, C // self.num_heads)
|
| 427 |
+
q, k, v = torch.unbind(qkv, 2)
|
| 428 |
+
q, k, v = [t.transpose(1, 2) for t in [q, k, v]]
|
| 429 |
+
if rope is not None:
|
| 430 |
+
q, k = self.apply_rope(q, k, rope)
|
| 431 |
+
x = torch.nn.functional.scaled_dot_product_attention(q, k, v)
|
| 432 |
+
x = x.transpose(1, 2)
|
| 433 |
+
return x.reshape([B, N, C])
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
# ---------- Block (from eupe/layers/block.py) ------------------------------
|
| 437 |
+
|
| 438 |
+
class SelfAttentionBlock(nn.Module):
|
| 439 |
+
def __init__(
|
| 440 |
+
self,
|
| 441 |
+
dim: int,
|
| 442 |
+
num_heads: int,
|
| 443 |
+
ffn_ratio: float = 4.0,
|
| 444 |
+
qkv_bias: bool = False,
|
| 445 |
+
proj_bias: bool = True,
|
| 446 |
+
ffn_bias: bool = True,
|
| 447 |
+
drop: float = 0.0,
|
| 448 |
+
attn_drop: float = 0.0,
|
| 449 |
+
init_values=None,
|
| 450 |
+
drop_path: float = 0.0,
|
| 451 |
+
act_layer: Callable[..., nn.Module] = nn.GELU,
|
| 452 |
+
norm_layer: Callable[..., nn.Module] = nn.LayerNorm,
|
| 453 |
+
attn_class: Callable[..., nn.Module] = SelfAttention,
|
| 454 |
+
ffn_layer: Callable[..., nn.Module] = Mlp,
|
| 455 |
+
mask_k_bias: bool = False,
|
| 456 |
+
device=None,
|
| 457 |
+
) -> None:
|
| 458 |
+
super().__init__()
|
| 459 |
+
self.norm1 = norm_layer(dim)
|
| 460 |
+
self.attn = attn_class(
|
| 461 |
+
dim,
|
| 462 |
+
num_heads=num_heads,
|
| 463 |
+
qkv_bias=qkv_bias,
|
| 464 |
+
proj_bias=proj_bias,
|
| 465 |
+
attn_drop=attn_drop,
|
| 466 |
+
proj_drop=drop,
|
| 467 |
+
mask_k_bias=mask_k_bias,
|
| 468 |
+
device=device,
|
| 469 |
+
)
|
| 470 |
+
self.ls1 = LayerScale(dim, init_values=init_values, device=device) if init_values else nn.Identity()
|
| 471 |
+
self.norm2 = norm_layer(dim)
|
| 472 |
+
mlp_hidden_dim = int(dim * ffn_ratio)
|
| 473 |
+
self.mlp = ffn_layer(
|
| 474 |
+
in_features=dim,
|
| 475 |
+
hidden_features=mlp_hidden_dim,
|
| 476 |
+
act_layer=act_layer,
|
| 477 |
+
drop=drop,
|
| 478 |
+
bias=ffn_bias,
|
| 479 |
+
device=device,
|
| 480 |
+
)
|
| 481 |
+
self.ls2 = LayerScale(dim, init_values=init_values, device=device) if init_values else nn.Identity()
|
| 482 |
+
self.sample_drop_ratio = drop_path
|
| 483 |
+
|
| 484 |
+
@staticmethod
|
| 485 |
+
def _maybe_index_rope(rope, indices: Tensor):
|
| 486 |
+
if rope is None:
|
| 487 |
+
return None
|
| 488 |
+
sin, cos = rope
|
| 489 |
+
assert sin.ndim == cos.ndim
|
| 490 |
+
if sin.ndim == 4:
|
| 491 |
+
return sin[indices], cos[indices]
|
| 492 |
+
return sin, cos
|
| 493 |
+
|
| 494 |
+
def _forward_list(self, x_list: List[Tensor], rope_list=None) -> List[Tensor]:
|
| 495 |
+
b_list = [x.shape[0] for x in x_list]
|
| 496 |
+
sample_subset_sizes = [max(int(b * (1 - self.sample_drop_ratio)), 1) for b in b_list]
|
| 497 |
+
|
| 498 |
+
if self.training and self.sample_drop_ratio > 0.0:
|
| 499 |
+
residual_scale_factors = [b / s for b, s in zip(b_list, sample_subset_sizes)]
|
| 500 |
+
indices_1_list = [
|
| 501 |
+
torch.randperm(b, device=x.device)[:s]
|
| 502 |
+
for x, b, s in zip(x_list, b_list, sample_subset_sizes)
|
| 503 |
+
]
|
| 504 |
+
x_subset_1_list = [x[i] for x, i in zip(x_list, indices_1_list)]
|
| 505 |
+
if rope_list is not None:
|
| 506 |
+
rope_subset_list = [
|
| 507 |
+
self._maybe_index_rope(r, i) for r, i in zip(rope_list, indices_1_list)
|
| 508 |
+
]
|
| 509 |
+
else:
|
| 510 |
+
rope_subset_list = rope_list
|
| 511 |
+
|
| 512 |
+
flattened, shapes, num_tokens = cat_keep_shapes(x_subset_1_list)
|
| 513 |
+
norm1 = uncat_with_shapes(self.norm1(flattened), shapes, num_tokens)
|
| 514 |
+
residual_1_list = self.attn.forward_list(norm1, rope_list=rope_subset_list)
|
| 515 |
+
|
| 516 |
+
x_attn_list = [
|
| 517 |
+
torch.index_add(x, dim=0, source=self.ls1(r1), index=i1, alpha=rsf)
|
| 518 |
+
for x, r1, i1, rsf in zip(x_list, residual_1_list, indices_1_list, residual_scale_factors)
|
| 519 |
+
]
|
| 520 |
+
|
| 521 |
+
indices_2_list = [
|
| 522 |
+
torch.randperm(b, device=x.device)[:s]
|
| 523 |
+
for x, b, s in zip(x_list, b_list, sample_subset_sizes)
|
| 524 |
+
]
|
| 525 |
+
x_subset_2_list = [x[i] for x, i in zip(x_attn_list, indices_2_list)]
|
| 526 |
+
flattened, shapes, num_tokens = cat_keep_shapes(x_subset_2_list)
|
| 527 |
+
norm2_list = uncat_with_shapes(self.norm2(flattened), shapes, num_tokens)
|
| 528 |
+
residual_2_list = self.mlp.forward_list(norm2_list)
|
| 529 |
+
|
| 530 |
+
x_ffn = [
|
| 531 |
+
torch.index_add(xa, dim=0, source=self.ls2(r2), index=i2, alpha=rsf)
|
| 532 |
+
for xa, r2, i2, rsf in zip(x_attn_list, residual_2_list, indices_2_list, residual_scale_factors)
|
| 533 |
+
]
|
| 534 |
+
else:
|
| 535 |
+
x_out = []
|
| 536 |
+
for x, rope in zip(x_list, rope_list):
|
| 537 |
+
x_attn = x + self.ls1(self.attn(self.norm1(x), rope=rope))
|
| 538 |
+
x_ffn = x_attn + self.ls2(self.mlp(self.norm2(x_attn)))
|
| 539 |
+
x_out.append(x_ffn)
|
| 540 |
+
x_ffn = x_out
|
| 541 |
+
return x_ffn
|
| 542 |
+
|
| 543 |
+
def forward(self, x_or_x_list, rope_or_rope_list=None) -> List[Tensor]:
|
| 544 |
+
if isinstance(x_or_x_list, Tensor):
|
| 545 |
+
return self._forward_list([x_or_x_list], rope_list=[rope_or_rope_list])[0]
|
| 546 |
+
elif isinstance(x_or_x_list, list):
|
| 547 |
+
if rope_or_rope_list is None:
|
| 548 |
+
rope_or_rope_list = [None for _ in x_or_x_list]
|
| 549 |
+
return self._forward_list(x_or_x_list, rope_list=rope_or_rope_list)
|
| 550 |
+
raise AssertionError
|
| 551 |
+
|
| 552 |
+
|
| 553 |
+
# ---------- DinoVisionTransformer (from eupe/models/vision_transformer.py)
|
| 554 |
+
|
| 555 |
+
ffn_layer_dict = {
|
| 556 |
+
"mlp": Mlp,
|
| 557 |
+
"swiglu": SwiGLUFFN,
|
| 558 |
+
"swiglu32": partial(SwiGLUFFN, align_to=32),
|
| 559 |
+
"swiglu64": partial(SwiGLUFFN, align_to=64),
|
| 560 |
+
"swiglu128": partial(SwiGLUFFN, align_to=128),
|
| 561 |
+
}
|
| 562 |
+
|
| 563 |
+
norm_layer_dict = {
|
| 564 |
+
"layernorm": partial(nn.LayerNorm, eps=1e-6),
|
| 565 |
+
"layernormbf16": partial(nn.LayerNorm, eps=1e-5),
|
| 566 |
+
"rmsnorm": RMSNorm,
|
| 567 |
+
}
|
| 568 |
+
|
| 569 |
+
dtype_dict = {
|
| 570 |
+
"fp32": torch.float32,
|
| 571 |
+
"fp16": torch.float16,
|
| 572 |
+
"bf16": torch.bfloat16,
|
| 573 |
+
}
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def init_weights_vit(module: nn.Module, name: str = ""):
|
| 577 |
+
if isinstance(module, nn.Linear):
|
| 578 |
+
torch.nn.init.trunc_normal_(module.weight, std=0.02)
|
| 579 |
+
if module.bias is not None:
|
| 580 |
+
nn.init.zeros_(module.bias)
|
| 581 |
+
if hasattr(module, "bias_mask") and module.bias_mask is not None:
|
| 582 |
+
o = module.out_features
|
| 583 |
+
module.bias_mask.fill_(1)
|
| 584 |
+
module.bias_mask[o // 3 : 2 * o // 3].fill_(0)
|
| 585 |
+
if isinstance(module, nn.LayerNorm):
|
| 586 |
+
module.reset_parameters()
|
| 587 |
+
if isinstance(module, LayerScale):
|
| 588 |
+
module.reset_parameters()
|
| 589 |
+
if isinstance(module, PatchEmbed):
|
| 590 |
+
module.reset_parameters()
|
| 591 |
+
if isinstance(module, RMSNorm):
|
| 592 |
+
module.reset_parameters()
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
class DinoVisionTransformer(nn.Module):
|
| 596 |
+
def __init__(
|
| 597 |
+
self,
|
| 598 |
+
*,
|
| 599 |
+
img_size: int = 224,
|
| 600 |
+
patch_size: int = 16,
|
| 601 |
+
in_chans: int = 3,
|
| 602 |
+
pos_embed_rope_base: float = 100.0,
|
| 603 |
+
pos_embed_rope_min_period: Optional[float] = None,
|
| 604 |
+
pos_embed_rope_max_period: Optional[float] = None,
|
| 605 |
+
pos_embed_rope_normalize_coords: Literal["min", "max", "separate"] = "separate",
|
| 606 |
+
pos_embed_rope_shift_coords: Optional[float] = None,
|
| 607 |
+
pos_embed_rope_jitter_coords: Optional[float] = None,
|
| 608 |
+
pos_embed_rope_rescale_coords: Optional[float] = None,
|
| 609 |
+
pos_embed_rope_dtype: str = "bf16",
|
| 610 |
+
embed_dim: int = 768,
|
| 611 |
+
depth: int = 12,
|
| 612 |
+
num_heads: int = 12,
|
| 613 |
+
ffn_ratio: float = 4.0,
|
| 614 |
+
qkv_bias: bool = True,
|
| 615 |
+
drop_path_rate: float = 0.0,
|
| 616 |
+
layerscale_init: Optional[float] = None,
|
| 617 |
+
norm_layer: str = "layernorm",
|
| 618 |
+
ffn_layer: str = "mlp",
|
| 619 |
+
ffn_bias: bool = True,
|
| 620 |
+
proj_bias: bool = True,
|
| 621 |
+
n_storage_tokens: int = 0,
|
| 622 |
+
mask_k_bias: bool = False,
|
| 623 |
+
untie_cls_and_patch_norms: bool = False,
|
| 624 |
+
untie_global_and_local_cls_norm: bool = False,
|
| 625 |
+
device: Any = None,
|
| 626 |
+
**ignored_kwargs,
|
| 627 |
+
):
|
| 628 |
+
super().__init__()
|
| 629 |
+
del ignored_kwargs
|
| 630 |
+
|
| 631 |
+
norm_layer_cls = norm_layer_dict[norm_layer]
|
| 632 |
+
|
| 633 |
+
self.num_features = self.embed_dim = embed_dim
|
| 634 |
+
self.n_blocks = depth
|
| 635 |
+
self.num_heads = num_heads
|
| 636 |
+
self.patch_size = patch_size
|
| 637 |
+
|
| 638 |
+
self.patch_embed = PatchEmbed(
|
| 639 |
+
img_size=img_size,
|
| 640 |
+
patch_size=patch_size,
|
| 641 |
+
in_chans=in_chans,
|
| 642 |
+
embed_dim=embed_dim,
|
| 643 |
+
flatten_embedding=False,
|
| 644 |
+
)
|
| 645 |
+
|
| 646 |
+
self.cls_token = nn.Parameter(torch.empty(1, 1, embed_dim, device=device))
|
| 647 |
+
self.n_storage_tokens = n_storage_tokens
|
| 648 |
+
if self.n_storage_tokens > 0:
|
| 649 |
+
self.storage_tokens = nn.Parameter(torch.empty(1, n_storage_tokens, embed_dim, device=device))
|
| 650 |
+
|
| 651 |
+
self.rope_embed = RopePositionEmbedding(
|
| 652 |
+
embed_dim=embed_dim,
|
| 653 |
+
num_heads=num_heads,
|
| 654 |
+
base=pos_embed_rope_base,
|
| 655 |
+
min_period=pos_embed_rope_min_period,
|
| 656 |
+
max_period=pos_embed_rope_max_period,
|
| 657 |
+
normalize_coords=pos_embed_rope_normalize_coords,
|
| 658 |
+
shift_coords=pos_embed_rope_shift_coords,
|
| 659 |
+
jitter_coords=pos_embed_rope_jitter_coords,
|
| 660 |
+
rescale_coords=pos_embed_rope_rescale_coords,
|
| 661 |
+
dtype=dtype_dict[pos_embed_rope_dtype],
|
| 662 |
+
device=device,
|
| 663 |
+
)
|
| 664 |
+
|
| 665 |
+
ffn_layer_cls = ffn_layer_dict[ffn_layer]
|
| 666 |
+
ffn_ratio_sequence = [ffn_ratio] * depth
|
| 667 |
+
blocks_list = [
|
| 668 |
+
SelfAttentionBlock(
|
| 669 |
+
dim=embed_dim,
|
| 670 |
+
num_heads=num_heads,
|
| 671 |
+
ffn_ratio=ffn_ratio_sequence[i],
|
| 672 |
+
qkv_bias=qkv_bias,
|
| 673 |
+
proj_bias=proj_bias,
|
| 674 |
+
ffn_bias=ffn_bias,
|
| 675 |
+
drop_path=drop_path_rate,
|
| 676 |
+
norm_layer=norm_layer_cls,
|
| 677 |
+
act_layer=nn.GELU,
|
| 678 |
+
ffn_layer=ffn_layer_cls,
|
| 679 |
+
init_values=layerscale_init,
|
| 680 |
+
mask_k_bias=mask_k_bias,
|
| 681 |
+
device=device,
|
| 682 |
+
)
|
| 683 |
+
for i in range(depth)
|
| 684 |
+
]
|
| 685 |
+
|
| 686 |
+
self.chunked_blocks = False
|
| 687 |
+
self.blocks = nn.ModuleList(blocks_list)
|
| 688 |
+
self.norm = norm_layer_cls(embed_dim)
|
| 689 |
+
|
| 690 |
+
self.untie_cls_and_patch_norms = untie_cls_and_patch_norms
|
| 691 |
+
self.cls_norm = norm_layer_cls(embed_dim) if untie_cls_and_patch_norms else None
|
| 692 |
+
|
| 693 |
+
self.untie_global_and_local_cls_norm = untie_global_and_local_cls_norm
|
| 694 |
+
self.local_cls_norm = norm_layer_cls(embed_dim) if untie_global_and_local_cls_norm else None
|
| 695 |
+
|
| 696 |
+
self.head = nn.Identity()
|
| 697 |
+
self.mask_token = nn.Parameter(torch.empty(1, embed_dim, device=device))
|
| 698 |
+
|
| 699 |
+
def init_weights(self):
|
| 700 |
+
self.rope_embed._init_weights()
|
| 701 |
+
nn.init.normal_(self.cls_token, std=0.02)
|
| 702 |
+
if self.n_storage_tokens > 0:
|
| 703 |
+
nn.init.normal_(self.storage_tokens, std=0.02)
|
| 704 |
+
nn.init.zeros_(self.mask_token)
|
| 705 |
+
named_apply(init_weights_vit, self)
|
| 706 |
+
|
| 707 |
+
def prepare_tokens_with_masks(self, x: Tensor, masks=None) -> Tuple[Tensor, Tuple[int, int]]:
|
| 708 |
+
x = self.patch_embed(x)
|
| 709 |
+
B, H, W, _ = x.shape
|
| 710 |
+
x = x.flatten(1, 2)
|
| 711 |
+
|
| 712 |
+
if masks is not None:
|
| 713 |
+
x = torch.where(masks.unsqueeze(-1), self.mask_token.to(x.dtype).unsqueeze(0), x)
|
| 714 |
+
cls_token = self.cls_token
|
| 715 |
+
else:
|
| 716 |
+
cls_token = self.cls_token + 0 * self.mask_token
|
| 717 |
+
|
| 718 |
+
if self.n_storage_tokens > 0:
|
| 719 |
+
storage_tokens = self.storage_tokens
|
| 720 |
+
else:
|
| 721 |
+
storage_tokens = torch.empty(
|
| 722 |
+
1, 0, cls_token.shape[-1],
|
| 723 |
+
dtype=cls_token.dtype, device=cls_token.device,
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
x = torch.cat(
|
| 727 |
+
[cls_token.expand(B, -1, -1), storage_tokens.expand(B, -1, -1), x],
|
| 728 |
+
dim=1,
|
| 729 |
+
)
|
| 730 |
+
return x, (H, W)
|
| 731 |
+
|
| 732 |
+
def forward_features_list(self, x_list: List[Tensor], masks_list: List[Tensor]) -> List[Dict[str, Tensor]]:
|
| 733 |
+
x = []
|
| 734 |
+
rope = []
|
| 735 |
+
for t_x, t_masks in zip(x_list, masks_list):
|
| 736 |
+
t2_x, hw_tuple = self.prepare_tokens_with_masks(t_x, t_masks)
|
| 737 |
+
x.append(t2_x)
|
| 738 |
+
rope.append(hw_tuple)
|
| 739 |
+
for blk in self.blocks:
|
| 740 |
+
if self.rope_embed is not None:
|
| 741 |
+
rope_sincos = [self.rope_embed(H=H, W=W) for H, W in rope]
|
| 742 |
+
else:
|
| 743 |
+
rope_sincos = [None for _ in rope]
|
| 744 |
+
x = blk(x, rope_sincos)
|
| 745 |
+
all_x = x
|
| 746 |
+
output = []
|
| 747 |
+
for idx, (x, masks) in enumerate(zip(all_x, masks_list)):
|
| 748 |
+
if self.untie_cls_and_patch_norms or self.untie_global_and_local_cls_norm:
|
| 749 |
+
if self.untie_global_and_local_cls_norm and self.training and idx == 1:
|
| 750 |
+
x_norm_cls_reg = self.local_cls_norm(x[:, : self.n_storage_tokens + 1])
|
| 751 |
+
elif self.untie_cls_and_patch_norms:
|
| 752 |
+
x_norm_cls_reg = self.cls_norm(x[:, : self.n_storage_tokens + 1])
|
| 753 |
+
else:
|
| 754 |
+
x_norm_cls_reg = self.norm(x[:, : self.n_storage_tokens + 1])
|
| 755 |
+
x_norm_patch = self.norm(x[:, self.n_storage_tokens + 1 :])
|
| 756 |
+
else:
|
| 757 |
+
x_norm = self.norm(x)
|
| 758 |
+
x_norm_cls_reg = x_norm[:, : self.n_storage_tokens + 1]
|
| 759 |
+
x_norm_patch = x_norm[:, self.n_storage_tokens + 1 :]
|
| 760 |
+
output.append({
|
| 761 |
+
"x_norm_clstoken": x_norm_cls_reg[:, 0],
|
| 762 |
+
"x_storage_tokens": x_norm_cls_reg[:, 1:],
|
| 763 |
+
"x_norm_patchtokens": x_norm_patch,
|
| 764 |
+
"x_prenorm": x,
|
| 765 |
+
"masks": masks,
|
| 766 |
+
})
|
| 767 |
+
return output
|
| 768 |
+
|
| 769 |
+
def forward_features(self, x, masks: Optional[Tensor] = None):
|
| 770 |
+
if isinstance(x, torch.Tensor):
|
| 771 |
+
return self.forward_features_list([x], [masks])[0]
|
| 772 |
+
return self.forward_features_list(x, masks)
|
| 773 |
+
|
| 774 |
+
def forward(self, *args, is_training: bool = False, **kwargs):
|
| 775 |
+
ret = self.forward_features(*args, **kwargs)
|
| 776 |
+
if is_training:
|
| 777 |
+
return ret
|
| 778 |
+
return self.head(ret["x_norm_clstoken"])
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
def build_eupe_vitb16() -> DinoVisionTransformer:
|
| 782 |
+
return DinoVisionTransformer(
|
| 783 |
+
img_size=224,
|
| 784 |
+
patch_size=16,
|
| 785 |
+
in_chans=3,
|
| 786 |
+
pos_embed_rope_base=100,
|
| 787 |
+
pos_embed_rope_normalize_coords="separate",
|
| 788 |
+
pos_embed_rope_rescale_coords=2,
|
| 789 |
+
pos_embed_rope_dtype="fp32",
|
| 790 |
+
embed_dim=768,
|
| 791 |
+
depth=12,
|
| 792 |
+
num_heads=12,
|
| 793 |
+
ffn_ratio=4,
|
| 794 |
+
qkv_bias=True,
|
| 795 |
+
drop_path_rate=0.0,
|
| 796 |
+
layerscale_init=1.0e-05,
|
| 797 |
+
norm_layer="layernormbf16",
|
| 798 |
+
ffn_layer="mlp",
|
| 799 |
+
ffn_bias=True,
|
| 800 |
+
proj_bias=True,
|
| 801 |
+
n_storage_tokens=4,
|
| 802 |
+
mask_k_bias=True,
|
| 803 |
+
)
|
| 804 |
+
|
| 805 |
+
|
| 806 |
+
# ===========================================================================
|
| 807 |
+
# Argus task heads
|
| 808 |
+
# ===========================================================================
|
| 809 |
+
|
| 810 |
+
def make_eupe_transform(resize_size: int):
|
| 811 |
+
return v2.Compose([
|
| 812 |
+
v2.ToImage(),
|
| 813 |
+
v2.Resize((resize_size, resize_size), antialias=True),
|
| 814 |
+
v2.ToDtype(torch.float32, scale=True),
|
| 815 |
+
v2.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
|
| 816 |
+
])
|
| 817 |
+
|
| 818 |
+
|
| 819 |
+
class SegmentationHead(nn.Module):
|
| 820 |
+
def __init__(self, in_dim: int = 768, num_classes: int = 150):
|
| 821 |
+
super().__init__()
|
| 822 |
+
self.batchnorm_layer = nn.BatchNorm2d(in_dim)
|
| 823 |
+
self.conv = nn.Conv2d(in_dim, num_classes, kernel_size=1)
|
| 824 |
+
|
| 825 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 826 |
+
return self.conv(self.batchnorm_layer(x))
|
| 827 |
+
|
| 828 |
+
|
| 829 |
+
class DepthHead(nn.Module):
|
| 830 |
+
def __init__(self, in_dim: int = 768, n_bins: int = 256,
|
| 831 |
+
min_depth: float = 0.001, max_depth: float = 10.0):
|
| 832 |
+
super().__init__()
|
| 833 |
+
self.batchnorm_layer = nn.BatchNorm2d(in_dim)
|
| 834 |
+
self.conv_depth = nn.Conv2d(in_dim, n_bins, kernel_size=1)
|
| 835 |
+
self.min_depth = min_depth
|
| 836 |
+
self.max_depth = max_depth
|
| 837 |
+
self.n_bins = n_bins
|
| 838 |
+
|
| 839 |
+
def forward(self, x: Tensor) -> Tensor:
|
| 840 |
+
logits = self.conv_depth(self.batchnorm_layer(x))
|
| 841 |
+
logit = torch.relu(logits) + 0.1
|
| 842 |
+
logit = logit / logit.sum(dim=1, keepdim=True)
|
| 843 |
+
bins = torch.linspace(self.min_depth, self.max_depth, self.n_bins, device=x.device)
|
| 844 |
+
return torch.einsum("bkhw,k->bhw", logit, bins).unsqueeze(1)
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
# ===========================================================================
|
| 848 |
+
# Argus model (transformers-compatible)
|
| 849 |
+
# ===========================================================================
|
| 850 |
+
|
| 851 |
+
|
| 852 |
+
class ArgusConfig(PretrainedConfig):
|
| 853 |
+
model_type = "argus"
|
| 854 |
+
|
| 855 |
+
def __init__(
|
| 856 |
+
self,
|
| 857 |
+
embed_dim: int = 768,
|
| 858 |
+
patch_size: int = 16,
|
| 859 |
+
num_seg_classes: int = 150,
|
| 860 |
+
depth_n_bins: int = 256,
|
| 861 |
+
depth_min_depth: float = 0.001,
|
| 862 |
+
depth_max_depth: float = 10.0,
|
| 863 |
+
num_imagenet_classes: int = 1000,
|
| 864 |
+
class_ids: Optional[list] = None,
|
| 865 |
+
class_names: Optional[list] = None,
|
| 866 |
+
**kwargs,
|
| 867 |
+
):
|
| 868 |
+
super().__init__(**kwargs)
|
| 869 |
+
self.embed_dim = embed_dim
|
| 870 |
+
self.patch_size = patch_size
|
| 871 |
+
self.num_seg_classes = num_seg_classes
|
| 872 |
+
self.depth_n_bins = depth_n_bins
|
| 873 |
+
self.depth_min_depth = depth_min_depth
|
| 874 |
+
self.depth_max_depth = depth_max_depth
|
| 875 |
+
self.num_imagenet_classes = num_imagenet_classes
|
| 876 |
+
self.class_ids = class_ids or []
|
| 877 |
+
self.class_names = class_names or []
|
| 878 |
+
|
| 879 |
+
|
| 880 |
+
class Argus(PreTrainedModel):
|
| 881 |
+
config_class = ArgusConfig
|
| 882 |
+
base_model_prefix = "argus"
|
| 883 |
+
supports_gradient_checkpointing = False
|
| 884 |
+
_tied_weights_keys: list = []
|
| 885 |
+
all_tied_weights_keys: dict = {}
|
| 886 |
+
|
| 887 |
+
def __init__(self, config: ArgusConfig):
|
| 888 |
+
super().__init__(config)
|
| 889 |
+
self.backbone = build_eupe_vitb16()
|
| 890 |
+
self.seg_head = SegmentationHead(config.embed_dim, config.num_seg_classes)
|
| 891 |
+
self.depth_head = DepthHead(
|
| 892 |
+
in_dim=config.embed_dim,
|
| 893 |
+
n_bins=config.depth_n_bins,
|
| 894 |
+
min_depth=config.depth_min_depth,
|
| 895 |
+
max_depth=config.depth_max_depth,
|
| 896 |
+
)
|
| 897 |
+
self.register_buffer(
|
| 898 |
+
"class_prototypes",
|
| 899 |
+
torch.zeros(config.num_imagenet_classes, config.embed_dim),
|
| 900 |
+
persistent=True,
|
| 901 |
+
)
|
| 902 |
+
|
| 903 |
+
for p in self.backbone.parameters():
|
| 904 |
+
p.requires_grad = False
|
| 905 |
+
self.backbone.eval()
|
| 906 |
+
self.seg_head.eval()
|
| 907 |
+
self.depth_head.eval()
|
| 908 |
+
|
| 909 |
+
def _init_weights(self, module):
|
| 910 |
+
pass
|
| 911 |
+
|
| 912 |
+
@property
|
| 913 |
+
def class_ids(self):
|
| 914 |
+
return self.config.class_ids
|
| 915 |
+
|
| 916 |
+
@property
|
| 917 |
+
def class_names(self):
|
| 918 |
+
return self.config.class_names
|
| 919 |
+
|
| 920 |
+
@torch.inference_mode()
|
| 921 |
+
def _extract(self, image_tensor: Tensor) -> Tuple[Tensor, Tensor]:
|
| 922 |
+
with torch.autocast(self.device.type, dtype=torch.bfloat16, enabled=self.device.type == "cuda"):
|
| 923 |
+
out = self.backbone.forward_features(image_tensor)
|
| 924 |
+
cls = out["x_norm_clstoken"].float()
|
| 925 |
+
patches = out["x_norm_patchtokens"].float()
|
| 926 |
+
B, N, D = patches.shape
|
| 927 |
+
h = w = int(N ** 0.5)
|
| 928 |
+
spatial = patches.permute(0, 2, 1).reshape(B, D, h, w)
|
| 929 |
+
return cls, spatial
|
| 930 |
+
|
| 931 |
+
@torch.inference_mode()
|
| 932 |
+
def classify(self, image: Image.Image, top_k: int = 5):
|
| 933 |
+
x = make_eupe_transform(224)(image).unsqueeze(0).to(self.device)
|
| 934 |
+
cls, _ = self._extract(x)
|
| 935 |
+
cls = F.normalize(cls, dim=-1)
|
| 936 |
+
sims = cls @ self.class_prototypes.T
|
| 937 |
+
topk = sims[0].topk(top_k)
|
| 938 |
+
return [
|
| 939 |
+
{
|
| 940 |
+
"class_id": self.class_ids[idx],
|
| 941 |
+
"class_name": self.class_names[idx],
|
| 942 |
+
"score": float(score),
|
| 943 |
+
}
|
| 944 |
+
for score, idx in zip(topk.values.tolist(), topk.indices.tolist())
|
| 945 |
+
]
|
| 946 |
+
|
| 947 |
+
@torch.inference_mode()
|
| 948 |
+
def segment(self, image: Image.Image, resolution: int = 512) -> Tensor:
|
| 949 |
+
x = make_eupe_transform(resolution)(image).unsqueeze(0).to(self.device)
|
| 950 |
+
_, spatial = self._extract(x)
|
| 951 |
+
with torch.autocast(self.device.type, dtype=torch.bfloat16, enabled=self.device.type == "cuda"):
|
| 952 |
+
logits = self.seg_head(spatial)
|
| 953 |
+
logits = F.interpolate(logits, size=(resolution, resolution), mode="bilinear", align_corners=False)
|
| 954 |
+
return logits.argmax(dim=1)[0]
|
| 955 |
+
|
| 956 |
+
@torch.inference_mode()
|
| 957 |
+
def depth(self, image: Image.Image, resolution: int = 416) -> Tensor:
|
| 958 |
+
x = make_eupe_transform(resolution)(image).unsqueeze(0).to(self.device)
|
| 959 |
+
_, spatial = self._extract(x)
|
| 960 |
+
with torch.autocast(self.device.type, dtype=torch.bfloat16, enabled=self.device.type == "cuda"):
|
| 961 |
+
depth = self.depth_head(spatial)
|
| 962 |
+
depth = F.interpolate(depth, size=(resolution, resolution), mode="bilinear", align_corners=False)
|
| 963 |
+
return depth[0, 0].float()
|
| 964 |
+
|
| 965 |
+
@torch.inference_mode()
|
| 966 |
+
def correspond(
|
| 967 |
+
self,
|
| 968 |
+
src_image: Image.Image,
|
| 969 |
+
tgt_image: Image.Image,
|
| 970 |
+
src_keypoints: list,
|
| 971 |
+
resolution: int = 512,
|
| 972 |
+
):
|
| 973 |
+
sw, sh = src_image.size
|
| 974 |
+
tw, th = tgt_image.size
|
| 975 |
+
transform = make_eupe_transform(resolution)
|
| 976 |
+
src_t = transform(src_image).unsqueeze(0).to(self.device)
|
| 977 |
+
tgt_t = transform(tgt_image).unsqueeze(0).to(self.device)
|
| 978 |
+
|
| 979 |
+
_, src_feats = self._extract(src_t)
|
| 980 |
+
_, tgt_feats = self._extract(tgt_t)
|
| 981 |
+
|
| 982 |
+
src_feats = F.interpolate(src_feats, size=(resolution, resolution), mode="bilinear", align_corners=False)
|
| 983 |
+
tgt_feats = F.interpolate(tgt_feats, size=(resolution, resolution), mode="bilinear", align_corners=False)
|
| 984 |
+
|
| 985 |
+
src_feats = F.normalize(src_feats[0].permute(1, 2, 0), dim=-1)
|
| 986 |
+
tgt_feats = F.normalize(tgt_feats[0].permute(1, 2, 0), dim=-1)
|
| 987 |
+
|
| 988 |
+
preds = []
|
| 989 |
+
for kp in src_keypoints:
|
| 990 |
+
sx = min(max(int(kp[0] / sw * resolution), 0), resolution - 1)
|
| 991 |
+
sy = min(max(int(kp[1] / sh * resolution), 0), resolution - 1)
|
| 992 |
+
src_vec = src_feats[sy, sx]
|
| 993 |
+
sim_map = torch.einsum("d,hwd->hw", src_vec, tgt_feats)
|
| 994 |
+
flat = sim_map.argmax().item()
|
| 995 |
+
py, px = flat // resolution, flat % resolution
|
| 996 |
+
preds.append([px / resolution * tw, py / resolution * th])
|
| 997 |
+
return preds
|
| 998 |
+
|
| 999 |
+
def perceive(self, image: Image.Image):
|
| 1000 |
+
t0 = time.time()
|
| 1001 |
+
classif = self.classify(image, top_k=5)
|
| 1002 |
+
t1 = time.time()
|
| 1003 |
+
seg = self.segment(image, resolution=512)
|
| 1004 |
+
t2 = time.time()
|
| 1005 |
+
dep = self.depth(image, resolution=416)
|
| 1006 |
+
t3 = time.time()
|
| 1007 |
+
return {
|
| 1008 |
+
"classification": classif,
|
| 1009 |
+
"segmentation": seg.cpu().numpy(),
|
| 1010 |
+
"depth": dep.cpu().numpy(),
|
| 1011 |
+
"timings_ms": {
|
| 1012 |
+
"classify": (t1 - t0) * 1000,
|
| 1013 |
+
"segment": (t2 - t1) * 1000,
|
| 1014 |
+
"depth": (t3 - t2) * 1000,
|
| 1015 |
+
"total": (t3 - t0) * 1000,
|
| 1016 |
+
},
|
| 1017 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,2022 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Argus"
|
| 4 |
+
],
|
| 5 |
+
"auto_map": {
|
| 6 |
+
"AutoConfig": "argus.ArgusConfig",
|
| 7 |
+
"AutoModel": "argus.Argus"
|
| 8 |
+
},
|
| 9 |
+
"model_type": "argus",
|
| 10 |
+
"embed_dim": 768,
|
| 11 |
+
"patch_size": 16,
|
| 12 |
+
"num_seg_classes": 150,
|
| 13 |
+
"depth_n_bins": 256,
|
| 14 |
+
"depth_min_depth": 0.001,
|
| 15 |
+
"depth_max_depth": 10.0,
|
| 16 |
+
"num_imagenet_classes": 1000,
|
| 17 |
+
"class_ids": [
|
| 18 |
+
"n01440764",
|
| 19 |
+
"n01443537",
|
| 20 |
+
"n01484850",
|
| 21 |
+
"n01491361",
|
| 22 |
+
"n01494475",
|
| 23 |
+
"n01496331",
|
| 24 |
+
"n01498041",
|
| 25 |
+
"n01514668",
|
| 26 |
+
"n01514859",
|
| 27 |
+
"n01518878",
|
| 28 |
+
"n01530575",
|
| 29 |
+
"n01531178",
|
| 30 |
+
"n01532829",
|
| 31 |
+
"n01534433",
|
| 32 |
+
"n01537544",
|
| 33 |
+
"n01558993",
|
| 34 |
+
"n01560419",
|
| 35 |
+
"n01580077",
|
| 36 |
+
"n01582220",
|
| 37 |
+
"n01592084",
|
| 38 |
+
"n01601694",
|
| 39 |
+
"n01608432",
|
| 40 |
+
"n01614925",
|
| 41 |
+
"n01616318",
|
| 42 |
+
"n01622779",
|
| 43 |
+
"n01629819",
|
| 44 |
+
"n01630670",
|
| 45 |
+
"n01631663",
|
| 46 |
+
"n01632458",
|
| 47 |
+
"n01632777",
|
| 48 |
+
"n01641577",
|
| 49 |
+
"n01644373",
|
| 50 |
+
"n01644900",
|
| 51 |
+
"n01664065",
|
| 52 |
+
"n01665541",
|
| 53 |
+
"n01667114",
|
| 54 |
+
"n01667778",
|
| 55 |
+
"n01669191",
|
| 56 |
+
"n01675722",
|
| 57 |
+
"n01677366",
|
| 58 |
+
"n01682714",
|
| 59 |
+
"n01685808",
|
| 60 |
+
"n01687978",
|
| 61 |
+
"n01688243",
|
| 62 |
+
"n01689811",
|
| 63 |
+
"n01692333",
|
| 64 |
+
"n01693334",
|
| 65 |
+
"n01694178",
|
| 66 |
+
"n01695060",
|
| 67 |
+
"n01697457",
|
| 68 |
+
"n01698640",
|
| 69 |
+
"n01704323",
|
| 70 |
+
"n01728572",
|
| 71 |
+
"n01728920",
|
| 72 |
+
"n01729322",
|
| 73 |
+
"n01729977",
|
| 74 |
+
"n01734418",
|
| 75 |
+
"n01735189",
|
| 76 |
+
"n01737021",
|
| 77 |
+
"n01739381",
|
| 78 |
+
"n01740131",
|
| 79 |
+
"n01742172",
|
| 80 |
+
"n01744401",
|
| 81 |
+
"n01748264",
|
| 82 |
+
"n01749939",
|
| 83 |
+
"n01751748",
|
| 84 |
+
"n01753488",
|
| 85 |
+
"n01755581",
|
| 86 |
+
"n01756291",
|
| 87 |
+
"n01768244",
|
| 88 |
+
"n01770081",
|
| 89 |
+
"n01770393",
|
| 90 |
+
"n01773157",
|
| 91 |
+
"n01773549",
|
| 92 |
+
"n01773797",
|
| 93 |
+
"n01774384",
|
| 94 |
+
"n01774750",
|
| 95 |
+
"n01775062",
|
| 96 |
+
"n01776313",
|
| 97 |
+
"n01784675",
|
| 98 |
+
"n01795545",
|
| 99 |
+
"n01796340",
|
| 100 |
+
"n01797886",
|
| 101 |
+
"n01798484",
|
| 102 |
+
"n01806143",
|
| 103 |
+
"n01806567",
|
| 104 |
+
"n01807496",
|
| 105 |
+
"n01817953",
|
| 106 |
+
"n01818515",
|
| 107 |
+
"n01819313",
|
| 108 |
+
"n01820546",
|
| 109 |
+
"n01824575",
|
| 110 |
+
"n01828970",
|
| 111 |
+
"n01829413",
|
| 112 |
+
"n01833805",
|
| 113 |
+
"n01843065",
|
| 114 |
+
"n01843383",
|
| 115 |
+
"n01847000",
|
| 116 |
+
"n01855032",
|
| 117 |
+
"n01855672",
|
| 118 |
+
"n01860187",
|
| 119 |
+
"n01871265",
|
| 120 |
+
"n01872401",
|
| 121 |
+
"n01873310",
|
| 122 |
+
"n01877812",
|
| 123 |
+
"n01882714",
|
| 124 |
+
"n01883070",
|
| 125 |
+
"n01910747",
|
| 126 |
+
"n01914609",
|
| 127 |
+
"n01917289",
|
| 128 |
+
"n01924916",
|
| 129 |
+
"n01930112",
|
| 130 |
+
"n01943899",
|
| 131 |
+
"n01944390",
|
| 132 |
+
"n01945685",
|
| 133 |
+
"n01950731",
|
| 134 |
+
"n01955084",
|
| 135 |
+
"n01968897",
|
| 136 |
+
"n01978287",
|
| 137 |
+
"n01978455",
|
| 138 |
+
"n01980166",
|
| 139 |
+
"n01981276",
|
| 140 |
+
"n01983481",
|
| 141 |
+
"n01984695",
|
| 142 |
+
"n01985128",
|
| 143 |
+
"n01986214",
|
| 144 |
+
"n01990800",
|
| 145 |
+
"n02002556",
|
| 146 |
+
"n02002724",
|
| 147 |
+
"n02006656",
|
| 148 |
+
"n02007558",
|
| 149 |
+
"n02009229",
|
| 150 |
+
"n02009912",
|
| 151 |
+
"n02011460",
|
| 152 |
+
"n02012849",
|
| 153 |
+
"n02013706",
|
| 154 |
+
"n02017213",
|
| 155 |
+
"n02018207",
|
| 156 |
+
"n02018795",
|
| 157 |
+
"n02025239",
|
| 158 |
+
"n02027492",
|
| 159 |
+
"n02028035",
|
| 160 |
+
"n02033041",
|
| 161 |
+
"n02037110",
|
| 162 |
+
"n02051845",
|
| 163 |
+
"n02056570",
|
| 164 |
+
"n02058221",
|
| 165 |
+
"n02066245",
|
| 166 |
+
"n02071294",
|
| 167 |
+
"n02074367",
|
| 168 |
+
"n02077923",
|
| 169 |
+
"n02085620",
|
| 170 |
+
"n02085782",
|
| 171 |
+
"n02085936",
|
| 172 |
+
"n02086079",
|
| 173 |
+
"n02086240",
|
| 174 |
+
"n02086646",
|
| 175 |
+
"n02086910",
|
| 176 |
+
"n02087046",
|
| 177 |
+
"n02087394",
|
| 178 |
+
"n02088094",
|
| 179 |
+
"n02088238",
|
| 180 |
+
"n02088364",
|
| 181 |
+
"n02088466",
|
| 182 |
+
"n02088632",
|
| 183 |
+
"n02089078",
|
| 184 |
+
"n02089867",
|
| 185 |
+
"n02089973",
|
| 186 |
+
"n02090379",
|
| 187 |
+
"n02090622",
|
| 188 |
+
"n02090721",
|
| 189 |
+
"n02091032",
|
| 190 |
+
"n02091134",
|
| 191 |
+
"n02091244",
|
| 192 |
+
"n02091467",
|
| 193 |
+
"n02091635",
|
| 194 |
+
"n02091831",
|
| 195 |
+
"n02092002",
|
| 196 |
+
"n02092339",
|
| 197 |
+
"n02093256",
|
| 198 |
+
"n02093428",
|
| 199 |
+
"n02093647",
|
| 200 |
+
"n02093754",
|
| 201 |
+
"n02093859",
|
| 202 |
+
"n02093991",
|
| 203 |
+
"n02094114",
|
| 204 |
+
"n02094258",
|
| 205 |
+
"n02094433",
|
| 206 |
+
"n02095314",
|
| 207 |
+
"n02095570",
|
| 208 |
+
"n02095889",
|
| 209 |
+
"n02096051",
|
| 210 |
+
"n02096177",
|
| 211 |
+
"n02096294",
|
| 212 |
+
"n02096437",
|
| 213 |
+
"n02096585",
|
| 214 |
+
"n02097047",
|
| 215 |
+
"n02097130",
|
| 216 |
+
"n02097209",
|
| 217 |
+
"n02097298",
|
| 218 |
+
"n02097474",
|
| 219 |
+
"n02097658",
|
| 220 |
+
"n02098105",
|
| 221 |
+
"n02098286",
|
| 222 |
+
"n02098413",
|
| 223 |
+
"n02099267",
|
| 224 |
+
"n02099429",
|
| 225 |
+
"n02099601",
|
| 226 |
+
"n02099712",
|
| 227 |
+
"n02099849",
|
| 228 |
+
"n02100236",
|
| 229 |
+
"n02100583",
|
| 230 |
+
"n02100735",
|
| 231 |
+
"n02100877",
|
| 232 |
+
"n02101006",
|
| 233 |
+
"n02101388",
|
| 234 |
+
"n02101556",
|
| 235 |
+
"n02102040",
|
| 236 |
+
"n02102177",
|
| 237 |
+
"n02102318",
|
| 238 |
+
"n02102480",
|
| 239 |
+
"n02102973",
|
| 240 |
+
"n02104029",
|
| 241 |
+
"n02104365",
|
| 242 |
+
"n02105056",
|
| 243 |
+
"n02105162",
|
| 244 |
+
"n02105251",
|
| 245 |
+
"n02105412",
|
| 246 |
+
"n02105505",
|
| 247 |
+
"n02105641",
|
| 248 |
+
"n02105855",
|
| 249 |
+
"n02106030",
|
| 250 |
+
"n02106166",
|
| 251 |
+
"n02106382",
|
| 252 |
+
"n02106550",
|
| 253 |
+
"n02106662",
|
| 254 |
+
"n02107142",
|
| 255 |
+
"n02107312",
|
| 256 |
+
"n02107574",
|
| 257 |
+
"n02107683",
|
| 258 |
+
"n02107908",
|
| 259 |
+
"n02108000",
|
| 260 |
+
"n02108089",
|
| 261 |
+
"n02108422",
|
| 262 |
+
"n02108551",
|
| 263 |
+
"n02108915",
|
| 264 |
+
"n02109047",
|
| 265 |
+
"n02109525",
|
| 266 |
+
"n02109961",
|
| 267 |
+
"n02110063",
|
| 268 |
+
"n02110185",
|
| 269 |
+
"n02110341",
|
| 270 |
+
"n02110627",
|
| 271 |
+
"n02110806",
|
| 272 |
+
"n02110958",
|
| 273 |
+
"n02111129",
|
| 274 |
+
"n02111277",
|
| 275 |
+
"n02111500",
|
| 276 |
+
"n02111889",
|
| 277 |
+
"n02112018",
|
| 278 |
+
"n02112137",
|
| 279 |
+
"n02112350",
|
| 280 |
+
"n02112706",
|
| 281 |
+
"n02113023",
|
| 282 |
+
"n02113186",
|
| 283 |
+
"n02113624",
|
| 284 |
+
"n02113712",
|
| 285 |
+
"n02113799",
|
| 286 |
+
"n02113978",
|
| 287 |
+
"n02114367",
|
| 288 |
+
"n02114548",
|
| 289 |
+
"n02114712",
|
| 290 |
+
"n02114855",
|
| 291 |
+
"n02115641",
|
| 292 |
+
"n02115913",
|
| 293 |
+
"n02116738",
|
| 294 |
+
"n02117135",
|
| 295 |
+
"n02119022",
|
| 296 |
+
"n02119789",
|
| 297 |
+
"n02120079",
|
| 298 |
+
"n02120505",
|
| 299 |
+
"n02123045",
|
| 300 |
+
"n02123159",
|
| 301 |
+
"n02123394",
|
| 302 |
+
"n02123597",
|
| 303 |
+
"n02124075",
|
| 304 |
+
"n02125311",
|
| 305 |
+
"n02127052",
|
| 306 |
+
"n02128385",
|
| 307 |
+
"n02128757",
|
| 308 |
+
"n02128925",
|
| 309 |
+
"n02129165",
|
| 310 |
+
"n02129604",
|
| 311 |
+
"n02130308",
|
| 312 |
+
"n02132136",
|
| 313 |
+
"n02133161",
|
| 314 |
+
"n02134084",
|
| 315 |
+
"n02134418",
|
| 316 |
+
"n02137549",
|
| 317 |
+
"n02138441",
|
| 318 |
+
"n02165105",
|
| 319 |
+
"n02165456",
|
| 320 |
+
"n02167151",
|
| 321 |
+
"n02168699",
|
| 322 |
+
"n02169497",
|
| 323 |
+
"n02172182",
|
| 324 |
+
"n02174001",
|
| 325 |
+
"n02177972",
|
| 326 |
+
"n02190166",
|
| 327 |
+
"n02206856",
|
| 328 |
+
"n02219486",
|
| 329 |
+
"n02226429",
|
| 330 |
+
"n02229544",
|
| 331 |
+
"n02231487",
|
| 332 |
+
"n02233338",
|
| 333 |
+
"n02236044",
|
| 334 |
+
"n02256656",
|
| 335 |
+
"n02259212",
|
| 336 |
+
"n02264363",
|
| 337 |
+
"n02268443",
|
| 338 |
+
"n02268853",
|
| 339 |
+
"n02276258",
|
| 340 |
+
"n02277742",
|
| 341 |
+
"n02279972",
|
| 342 |
+
"n02280649",
|
| 343 |
+
"n02281406",
|
| 344 |
+
"n02281787",
|
| 345 |
+
"n02317335",
|
| 346 |
+
"n02319095",
|
| 347 |
+
"n02321529",
|
| 348 |
+
"n02325366",
|
| 349 |
+
"n02326432",
|
| 350 |
+
"n02328150",
|
| 351 |
+
"n02342885",
|
| 352 |
+
"n02346627",
|
| 353 |
+
"n02356798",
|
| 354 |
+
"n02361337",
|
| 355 |
+
"n02363005",
|
| 356 |
+
"n02364673",
|
| 357 |
+
"n02389026",
|
| 358 |
+
"n02391049",
|
| 359 |
+
"n02395406",
|
| 360 |
+
"n02396427",
|
| 361 |
+
"n02397096",
|
| 362 |
+
"n02398521",
|
| 363 |
+
"n02403003",
|
| 364 |
+
"n02408429",
|
| 365 |
+
"n02410509",
|
| 366 |
+
"n02412080",
|
| 367 |
+
"n02415577",
|
| 368 |
+
"n02417914",
|
| 369 |
+
"n02422106",
|
| 370 |
+
"n02422699",
|
| 371 |
+
"n02423022",
|
| 372 |
+
"n02437312",
|
| 373 |
+
"n02437616",
|
| 374 |
+
"n02441942",
|
| 375 |
+
"n02442845",
|
| 376 |
+
"n02443114",
|
| 377 |
+
"n02443484",
|
| 378 |
+
"n02444819",
|
| 379 |
+
"n02445715",
|
| 380 |
+
"n02447366",
|
| 381 |
+
"n02454379",
|
| 382 |
+
"n02457408",
|
| 383 |
+
"n02480495",
|
| 384 |
+
"n02480855",
|
| 385 |
+
"n02481823",
|
| 386 |
+
"n02483362",
|
| 387 |
+
"n02483708",
|
| 388 |
+
"n02484975",
|
| 389 |
+
"n02486261",
|
| 390 |
+
"n02486410",
|
| 391 |
+
"n02487347",
|
| 392 |
+
"n02488291",
|
| 393 |
+
"n02488702",
|
| 394 |
+
"n02489166",
|
| 395 |
+
"n02490219",
|
| 396 |
+
"n02492035",
|
| 397 |
+
"n02492660",
|
| 398 |
+
"n02493509",
|
| 399 |
+
"n02493793",
|
| 400 |
+
"n02494079",
|
| 401 |
+
"n02497673",
|
| 402 |
+
"n02500267",
|
| 403 |
+
"n02504013",
|
| 404 |
+
"n02504458",
|
| 405 |
+
"n02509815",
|
| 406 |
+
"n02510455",
|
| 407 |
+
"n02514041",
|
| 408 |
+
"n02526121",
|
| 409 |
+
"n02536864",
|
| 410 |
+
"n02606052",
|
| 411 |
+
"n02607072",
|
| 412 |
+
"n02640242",
|
| 413 |
+
"n02641379",
|
| 414 |
+
"n02643566",
|
| 415 |
+
"n02655020",
|
| 416 |
+
"n02666196",
|
| 417 |
+
"n02667093",
|
| 418 |
+
"n02669723",
|
| 419 |
+
"n02672831",
|
| 420 |
+
"n02676566",
|
| 421 |
+
"n02687172",
|
| 422 |
+
"n02690373",
|
| 423 |
+
"n02692877",
|
| 424 |
+
"n02699494",
|
| 425 |
+
"n02701002",
|
| 426 |
+
"n02704792",
|
| 427 |
+
"n02708093",
|
| 428 |
+
"n02727426",
|
| 429 |
+
"n02730930",
|
| 430 |
+
"n02747177",
|
| 431 |
+
"n02749479",
|
| 432 |
+
"n02769748",
|
| 433 |
+
"n02776631",
|
| 434 |
+
"n02777292",
|
| 435 |
+
"n02782093",
|
| 436 |
+
"n02783161",
|
| 437 |
+
"n02786058",
|
| 438 |
+
"n02787622",
|
| 439 |
+
"n02788148",
|
| 440 |
+
"n02790996",
|
| 441 |
+
"n02791124",
|
| 442 |
+
"n02791270",
|
| 443 |
+
"n02793495",
|
| 444 |
+
"n02794156",
|
| 445 |
+
"n02795169",
|
| 446 |
+
"n02797295",
|
| 447 |
+
"n02799071",
|
| 448 |
+
"n02802426",
|
| 449 |
+
"n02804414",
|
| 450 |
+
"n02804610",
|
| 451 |
+
"n02807133",
|
| 452 |
+
"n02808304",
|
| 453 |
+
"n02808440",
|
| 454 |
+
"n02814533",
|
| 455 |
+
"n02814860",
|
| 456 |
+
"n02815834",
|
| 457 |
+
"n02817516",
|
| 458 |
+
"n02823428",
|
| 459 |
+
"n02823750",
|
| 460 |
+
"n02825657",
|
| 461 |
+
"n02834397",
|
| 462 |
+
"n02835271",
|
| 463 |
+
"n02837789",
|
| 464 |
+
"n02840245",
|
| 465 |
+
"n02841315",
|
| 466 |
+
"n02843684",
|
| 467 |
+
"n02859443",
|
| 468 |
+
"n02860847",
|
| 469 |
+
"n02865351",
|
| 470 |
+
"n02869837",
|
| 471 |
+
"n02870880",
|
| 472 |
+
"n02871525",
|
| 473 |
+
"n02877765",
|
| 474 |
+
"n02879718",
|
| 475 |
+
"n02883205",
|
| 476 |
+
"n02892201",
|
| 477 |
+
"n02892767",
|
| 478 |
+
"n02894605",
|
| 479 |
+
"n02895154",
|
| 480 |
+
"n02906734",
|
| 481 |
+
"n02909870",
|
| 482 |
+
"n02910353",
|
| 483 |
+
"n02916936",
|
| 484 |
+
"n02917067",
|
| 485 |
+
"n02927161",
|
| 486 |
+
"n02930766",
|
| 487 |
+
"n02939185",
|
| 488 |
+
"n02948072",
|
| 489 |
+
"n02950826",
|
| 490 |
+
"n02951358",
|
| 491 |
+
"n02951585",
|
| 492 |
+
"n02963159",
|
| 493 |
+
"n02965783",
|
| 494 |
+
"n02966193",
|
| 495 |
+
"n02966687",
|
| 496 |
+
"n02971356",
|
| 497 |
+
"n02974003",
|
| 498 |
+
"n02977058",
|
| 499 |
+
"n02978881",
|
| 500 |
+
"n02979186",
|
| 501 |
+
"n02980441",
|
| 502 |
+
"n02981792",
|
| 503 |
+
"n02988304",
|
| 504 |
+
"n02992211",
|
| 505 |
+
"n02992529",
|
| 506 |
+
"n02999410",
|
| 507 |
+
"n03000134",
|
| 508 |
+
"n03000247",
|
| 509 |
+
"n03000684",
|
| 510 |
+
"n03014705",
|
| 511 |
+
"n03016953",
|
| 512 |
+
"n03017168",
|
| 513 |
+
"n03018349",
|
| 514 |
+
"n03026506",
|
| 515 |
+
"n03028079",
|
| 516 |
+
"n03032252",
|
| 517 |
+
"n03041632",
|
| 518 |
+
"n03042490",
|
| 519 |
+
"n03045698",
|
| 520 |
+
"n03047690",
|
| 521 |
+
"n03062245",
|
| 522 |
+
"n03063599",
|
| 523 |
+
"n03063689",
|
| 524 |
+
"n03065424",
|
| 525 |
+
"n03075370",
|
| 526 |
+
"n03085013",
|
| 527 |
+
"n03089624",
|
| 528 |
+
"n03095699",
|
| 529 |
+
"n03100240",
|
| 530 |
+
"n03109150",
|
| 531 |
+
"n03110669",
|
| 532 |
+
"n03124043",
|
| 533 |
+
"n03124170",
|
| 534 |
+
"n03125729",
|
| 535 |
+
"n03126707",
|
| 536 |
+
"n03127747",
|
| 537 |
+
"n03127925",
|
| 538 |
+
"n03131574",
|
| 539 |
+
"n03133878",
|
| 540 |
+
"n03134739",
|
| 541 |
+
"n03141823",
|
| 542 |
+
"n03146219",
|
| 543 |
+
"n03160309",
|
| 544 |
+
"n03179701",
|
| 545 |
+
"n03180011",
|
| 546 |
+
"n03187595",
|
| 547 |
+
"n03188531",
|
| 548 |
+
"n03196217",
|
| 549 |
+
"n03197337",
|
| 550 |
+
"n03201208",
|
| 551 |
+
"n03207743",
|
| 552 |
+
"n03207941",
|
| 553 |
+
"n03208938",
|
| 554 |
+
"n03216828",
|
| 555 |
+
"n03218198",
|
| 556 |
+
"n03220513",
|
| 557 |
+
"n03223299",
|
| 558 |
+
"n03240683",
|
| 559 |
+
"n03249569",
|
| 560 |
+
"n03250847",
|
| 561 |
+
"n03255030",
|
| 562 |
+
"n03259280",
|
| 563 |
+
"n03271574",
|
| 564 |
+
"n03272010",
|
| 565 |
+
"n03272562",
|
| 566 |
+
"n03290653",
|
| 567 |
+
"n03291819",
|
| 568 |
+
"n03297495",
|
| 569 |
+
"n03314780",
|
| 570 |
+
"n03325584",
|
| 571 |
+
"n03337140",
|
| 572 |
+
"n03344393",
|
| 573 |
+
"n03345487",
|
| 574 |
+
"n03347037",
|
| 575 |
+
"n03355925",
|
| 576 |
+
"n03372029",
|
| 577 |
+
"n03376595",
|
| 578 |
+
"n03379051",
|
| 579 |
+
"n03384352",
|
| 580 |
+
"n03388043",
|
| 581 |
+
"n03388183",
|
| 582 |
+
"n03388549",
|
| 583 |
+
"n03393912",
|
| 584 |
+
"n03394916",
|
| 585 |
+
"n03400231",
|
| 586 |
+
"n03404251",
|
| 587 |
+
"n03417042",
|
| 588 |
+
"n03424325",
|
| 589 |
+
"n03425413",
|
| 590 |
+
"n03443371",
|
| 591 |
+
"n03444034",
|
| 592 |
+
"n03445777",
|
| 593 |
+
"n03445924",
|
| 594 |
+
"n03447447",
|
| 595 |
+
"n03447721",
|
| 596 |
+
"n03450230",
|
| 597 |
+
"n03452741",
|
| 598 |
+
"n03457902",
|
| 599 |
+
"n03459775",
|
| 600 |
+
"n03461385",
|
| 601 |
+
"n03467068",
|
| 602 |
+
"n03476684",
|
| 603 |
+
"n03476991",
|
| 604 |
+
"n03478589",
|
| 605 |
+
"n03481172",
|
| 606 |
+
"n03482405",
|
| 607 |
+
"n03483316",
|
| 608 |
+
"n03485407",
|
| 609 |
+
"n03485794",
|
| 610 |
+
"n03492542",
|
| 611 |
+
"n03494278",
|
| 612 |
+
"n03495258",
|
| 613 |
+
"n03496892",
|
| 614 |
+
"n03498962",
|
| 615 |
+
"n03527444",
|
| 616 |
+
"n03529860",
|
| 617 |
+
"n03530642",
|
| 618 |
+
"n03532672",
|
| 619 |
+
"n03534580",
|
| 620 |
+
"n03535780",
|
| 621 |
+
"n03538406",
|
| 622 |
+
"n03544143",
|
| 623 |
+
"n03584254",
|
| 624 |
+
"n03584829",
|
| 625 |
+
"n03590841",
|
| 626 |
+
"n03594734",
|
| 627 |
+
"n03594945",
|
| 628 |
+
"n03595614",
|
| 629 |
+
"n03598930",
|
| 630 |
+
"n03599486",
|
| 631 |
+
"n03602883",
|
| 632 |
+
"n03617480",
|
| 633 |
+
"n03623198",
|
| 634 |
+
"n03627232",
|
| 635 |
+
"n03630383",
|
| 636 |
+
"n03633091",
|
| 637 |
+
"n03637318",
|
| 638 |
+
"n03642806",
|
| 639 |
+
"n03649909",
|
| 640 |
+
"n03657121",
|
| 641 |
+
"n03658185",
|
| 642 |
+
"n03661043",
|
| 643 |
+
"n03662601",
|
| 644 |
+
"n03666591",
|
| 645 |
+
"n03670208",
|
| 646 |
+
"n03673027",
|
| 647 |
+
"n03676483",
|
| 648 |
+
"n03680355",
|
| 649 |
+
"n03690938",
|
| 650 |
+
"n03691459",
|
| 651 |
+
"n03692522",
|
| 652 |
+
"n03697007",
|
| 653 |
+
"n03706229",
|
| 654 |
+
"n03709823",
|
| 655 |
+
"n03710193",
|
| 656 |
+
"n03710637",
|
| 657 |
+
"n03710721",
|
| 658 |
+
"n03717622",
|
| 659 |
+
"n03720891",
|
| 660 |
+
"n03721384",
|
| 661 |
+
"n03724870",
|
| 662 |
+
"n03729826",
|
| 663 |
+
"n03733131",
|
| 664 |
+
"n03733281",
|
| 665 |
+
"n03733805",
|
| 666 |
+
"n03742115",
|
| 667 |
+
"n03743016",
|
| 668 |
+
"n03759954",
|
| 669 |
+
"n03761084",
|
| 670 |
+
"n03763968",
|
| 671 |
+
"n03764736",
|
| 672 |
+
"n03769881",
|
| 673 |
+
"n03770439",
|
| 674 |
+
"n03770679",
|
| 675 |
+
"n03773504",
|
| 676 |
+
"n03775071",
|
| 677 |
+
"n03775546",
|
| 678 |
+
"n03776460",
|
| 679 |
+
"n03777568",
|
| 680 |
+
"n03777754",
|
| 681 |
+
"n03781244",
|
| 682 |
+
"n03782006",
|
| 683 |
+
"n03785016",
|
| 684 |
+
"n03786901",
|
| 685 |
+
"n03787032",
|
| 686 |
+
"n03788195",
|
| 687 |
+
"n03788365",
|
| 688 |
+
"n03791053",
|
| 689 |
+
"n03792782",
|
| 690 |
+
"n03792972",
|
| 691 |
+
"n03793489",
|
| 692 |
+
"n03794056",
|
| 693 |
+
"n03796401",
|
| 694 |
+
"n03803284",
|
| 695 |
+
"n03804744",
|
| 696 |
+
"n03814639",
|
| 697 |
+
"n03814906",
|
| 698 |
+
"n03825788",
|
| 699 |
+
"n03832673",
|
| 700 |
+
"n03837869",
|
| 701 |
+
"n03838899",
|
| 702 |
+
"n03840681",
|
| 703 |
+
"n03841143",
|
| 704 |
+
"n03843555",
|
| 705 |
+
"n03854065",
|
| 706 |
+
"n03857828",
|
| 707 |
+
"n03866082",
|
| 708 |
+
"n03868242",
|
| 709 |
+
"n03868863",
|
| 710 |
+
"n03871628",
|
| 711 |
+
"n03873416",
|
| 712 |
+
"n03874293",
|
| 713 |
+
"n03874599",
|
| 714 |
+
"n03876231",
|
| 715 |
+
"n03877472",
|
| 716 |
+
"n03877845",
|
| 717 |
+
"n03884397",
|
| 718 |
+
"n03887697",
|
| 719 |
+
"n03888257",
|
| 720 |
+
"n03888605",
|
| 721 |
+
"n03891251",
|
| 722 |
+
"n03891332",
|
| 723 |
+
"n03895866",
|
| 724 |
+
"n03899768",
|
| 725 |
+
"n03902125",
|
| 726 |
+
"n03903868",
|
| 727 |
+
"n03908618",
|
| 728 |
+
"n03908714",
|
| 729 |
+
"n03916031",
|
| 730 |
+
"n03920288",
|
| 731 |
+
"n03924679",
|
| 732 |
+
"n03929660",
|
| 733 |
+
"n03929855",
|
| 734 |
+
"n03930313",
|
| 735 |
+
"n03930630",
|
| 736 |
+
"n03933933",
|
| 737 |
+
"n03935335",
|
| 738 |
+
"n03937543",
|
| 739 |
+
"n03938244",
|
| 740 |
+
"n03942813",
|
| 741 |
+
"n03944341",
|
| 742 |
+
"n03947888",
|
| 743 |
+
"n03950228",
|
| 744 |
+
"n03954731",
|
| 745 |
+
"n03956157",
|
| 746 |
+
"n03958227",
|
| 747 |
+
"n03961711",
|
| 748 |
+
"n03967562",
|
| 749 |
+
"n03970156",
|
| 750 |
+
"n03976467",
|
| 751 |
+
"n03976657",
|
| 752 |
+
"n03977966",
|
| 753 |
+
"n03980874",
|
| 754 |
+
"n03982430",
|
| 755 |
+
"n03983396",
|
| 756 |
+
"n03991062",
|
| 757 |
+
"n03992509",
|
| 758 |
+
"n03995372",
|
| 759 |
+
"n03998194",
|
| 760 |
+
"n04004767",
|
| 761 |
+
"n04005630",
|
| 762 |
+
"n04008634",
|
| 763 |
+
"n04009552",
|
| 764 |
+
"n04019541",
|
| 765 |
+
"n04023962",
|
| 766 |
+
"n04026417",
|
| 767 |
+
"n04033901",
|
| 768 |
+
"n04033995",
|
| 769 |
+
"n04037443",
|
| 770 |
+
"n04039381",
|
| 771 |
+
"n04040759",
|
| 772 |
+
"n04041544",
|
| 773 |
+
"n04044716",
|
| 774 |
+
"n04049303",
|
| 775 |
+
"n04065272",
|
| 776 |
+
"n04067472",
|
| 777 |
+
"n04069434",
|
| 778 |
+
"n04070727",
|
| 779 |
+
"n04074963",
|
| 780 |
+
"n04081281",
|
| 781 |
+
"n04086273",
|
| 782 |
+
"n04090263",
|
| 783 |
+
"n04099969",
|
| 784 |
+
"n04111531",
|
| 785 |
+
"n04116512",
|
| 786 |
+
"n04118538",
|
| 787 |
+
"n04118776",
|
| 788 |
+
"n04120489",
|
| 789 |
+
"n04125021",
|
| 790 |
+
"n04127249",
|
| 791 |
+
"n04131690",
|
| 792 |
+
"n04133789",
|
| 793 |
+
"n04136333",
|
| 794 |
+
"n04141076",
|
| 795 |
+
"n04141327",
|
| 796 |
+
"n04141975",
|
| 797 |
+
"n04146614",
|
| 798 |
+
"n04147183",
|
| 799 |
+
"n04149813",
|
| 800 |
+
"n04152593",
|
| 801 |
+
"n04153751",
|
| 802 |
+
"n04154565",
|
| 803 |
+
"n04162706",
|
| 804 |
+
"n04179913",
|
| 805 |
+
"n04192698",
|
| 806 |
+
"n04200800",
|
| 807 |
+
"n04201297",
|
| 808 |
+
"n04204238",
|
| 809 |
+
"n04204347",
|
| 810 |
+
"n04208210",
|
| 811 |
+
"n04209133",
|
| 812 |
+
"n04209239",
|
| 813 |
+
"n04228054",
|
| 814 |
+
"n04229816",
|
| 815 |
+
"n04235860",
|
| 816 |
+
"n04238763",
|
| 817 |
+
"n04239074",
|
| 818 |
+
"n04243546",
|
| 819 |
+
"n04251144",
|
| 820 |
+
"n04252077",
|
| 821 |
+
"n04252225",
|
| 822 |
+
"n04254120",
|
| 823 |
+
"n04254680",
|
| 824 |
+
"n04254777",
|
| 825 |
+
"n04258138",
|
| 826 |
+
"n04259630",
|
| 827 |
+
"n04263257",
|
| 828 |
+
"n04264628",
|
| 829 |
+
"n04265275",
|
| 830 |
+
"n04266014",
|
| 831 |
+
"n04270147",
|
| 832 |
+
"n04273569",
|
| 833 |
+
"n04275548",
|
| 834 |
+
"n04277352",
|
| 835 |
+
"n04285008",
|
| 836 |
+
"n04286575",
|
| 837 |
+
"n04296562",
|
| 838 |
+
"n04310018",
|
| 839 |
+
"n04311004",
|
| 840 |
+
"n04311174",
|
| 841 |
+
"n04317175",
|
| 842 |
+
"n04325704",
|
| 843 |
+
"n04326547",
|
| 844 |
+
"n04328186",
|
| 845 |
+
"n04330267",
|
| 846 |
+
"n04332243",
|
| 847 |
+
"n04335435",
|
| 848 |
+
"n04336792",
|
| 849 |
+
"n04344873",
|
| 850 |
+
"n04346328",
|
| 851 |
+
"n04347754",
|
| 852 |
+
"n04350905",
|
| 853 |
+
"n04355338",
|
| 854 |
+
"n04355933",
|
| 855 |
+
"n04356056",
|
| 856 |
+
"n04357314",
|
| 857 |
+
"n04366367",
|
| 858 |
+
"n04367480",
|
| 859 |
+
"n04370456",
|
| 860 |
+
"n04371430",
|
| 861 |
+
"n04371774",
|
| 862 |
+
"n04372370",
|
| 863 |
+
"n04376876",
|
| 864 |
+
"n04380533",
|
| 865 |
+
"n04389033",
|
| 866 |
+
"n04392985",
|
| 867 |
+
"n04398044",
|
| 868 |
+
"n04399382",
|
| 869 |
+
"n04404412",
|
| 870 |
+
"n04409515",
|
| 871 |
+
"n04417672",
|
| 872 |
+
"n04418357",
|
| 873 |
+
"n04423845",
|
| 874 |
+
"n04428191",
|
| 875 |
+
"n04429376",
|
| 876 |
+
"n04435653",
|
| 877 |
+
"n04442312",
|
| 878 |
+
"n04443257",
|
| 879 |
+
"n04447861",
|
| 880 |
+
"n04456115",
|
| 881 |
+
"n04458633",
|
| 882 |
+
"n04461696",
|
| 883 |
+
"n04462240",
|
| 884 |
+
"n04465501",
|
| 885 |
+
"n04467665",
|
| 886 |
+
"n04476259",
|
| 887 |
+
"n04479046",
|
| 888 |
+
"n04482393",
|
| 889 |
+
"n04483307",
|
| 890 |
+
"n04485082",
|
| 891 |
+
"n04486054",
|
| 892 |
+
"n04487081",
|
| 893 |
+
"n04487394",
|
| 894 |
+
"n04493381",
|
| 895 |
+
"n04501370",
|
| 896 |
+
"n04505470",
|
| 897 |
+
"n04507155",
|
| 898 |
+
"n04509417",
|
| 899 |
+
"n04515003",
|
| 900 |
+
"n04517823",
|
| 901 |
+
"n04522168",
|
| 902 |
+
"n04523525",
|
| 903 |
+
"n04525038",
|
| 904 |
+
"n04525305",
|
| 905 |
+
"n04532106",
|
| 906 |
+
"n04532670",
|
| 907 |
+
"n04536866",
|
| 908 |
+
"n04540053",
|
| 909 |
+
"n04542943",
|
| 910 |
+
"n04548280",
|
| 911 |
+
"n04548362",
|
| 912 |
+
"n04550184",
|
| 913 |
+
"n04552348",
|
| 914 |
+
"n04553703",
|
| 915 |
+
"n04554684",
|
| 916 |
+
"n04557648",
|
| 917 |
+
"n04560804",
|
| 918 |
+
"n04562935",
|
| 919 |
+
"n04579145",
|
| 920 |
+
"n04579432",
|
| 921 |
+
"n04584207",
|
| 922 |
+
"n04589890",
|
| 923 |
+
"n04590129",
|
| 924 |
+
"n04591157",
|
| 925 |
+
"n04591713",
|
| 926 |
+
"n04592741",
|
| 927 |
+
"n04596742",
|
| 928 |
+
"n04597913",
|
| 929 |
+
"n04599235",
|
| 930 |
+
"n04604644",
|
| 931 |
+
"n04606251",
|
| 932 |
+
"n04612504",
|
| 933 |
+
"n04613696",
|
| 934 |
+
"n06359193",
|
| 935 |
+
"n06596364",
|
| 936 |
+
"n06785654",
|
| 937 |
+
"n06794110",
|
| 938 |
+
"n06874185",
|
| 939 |
+
"n07248320",
|
| 940 |
+
"n07565083",
|
| 941 |
+
"n07579787",
|
| 942 |
+
"n07583066",
|
| 943 |
+
"n07584110",
|
| 944 |
+
"n07590611",
|
| 945 |
+
"n07613480",
|
| 946 |
+
"n07614500",
|
| 947 |
+
"n07615774",
|
| 948 |
+
"n07684084",
|
| 949 |
+
"n07693725",
|
| 950 |
+
"n07695742",
|
| 951 |
+
"n07697313",
|
| 952 |
+
"n07697537",
|
| 953 |
+
"n07711569",
|
| 954 |
+
"n07714571",
|
| 955 |
+
"n07714990",
|
| 956 |
+
"n07715103",
|
| 957 |
+
"n07716358",
|
| 958 |
+
"n07716906",
|
| 959 |
+
"n07717410",
|
| 960 |
+
"n07717556",
|
| 961 |
+
"n07718472",
|
| 962 |
+
"n07718747",
|
| 963 |
+
"n07720875",
|
| 964 |
+
"n07730033",
|
| 965 |
+
"n07734744",
|
| 966 |
+
"n07742313",
|
| 967 |
+
"n07745940",
|
| 968 |
+
"n07747607",
|
| 969 |
+
"n07749582",
|
| 970 |
+
"n07753113",
|
| 971 |
+
"n07753275",
|
| 972 |
+
"n07753592",
|
| 973 |
+
"n07754684",
|
| 974 |
+
"n07760859",
|
| 975 |
+
"n07768694",
|
| 976 |
+
"n07802026",
|
| 977 |
+
"n07831146",
|
| 978 |
+
"n07836838",
|
| 979 |
+
"n07860988",
|
| 980 |
+
"n07871810",
|
| 981 |
+
"n07873807",
|
| 982 |
+
"n07875152",
|
| 983 |
+
"n07880968",
|
| 984 |
+
"n07892512",
|
| 985 |
+
"n07920052",
|
| 986 |
+
"n07930864",
|
| 987 |
+
"n07932039",
|
| 988 |
+
"n09193705",
|
| 989 |
+
"n09229709",
|
| 990 |
+
"n09246464",
|
| 991 |
+
"n09256479",
|
| 992 |
+
"n09288635",
|
| 993 |
+
"n09332890",
|
| 994 |
+
"n09399592",
|
| 995 |
+
"n09421951",
|
| 996 |
+
"n09428293",
|
| 997 |
+
"n09468604",
|
| 998 |
+
"n09472597",
|
| 999 |
+
"n09835506",
|
| 1000 |
+
"n10148035",
|
| 1001 |
+
"n10565667",
|
| 1002 |
+
"n11879895",
|
| 1003 |
+
"n11939491",
|
| 1004 |
+
"n12057211",
|
| 1005 |
+
"n12144580",
|
| 1006 |
+
"n12267677",
|
| 1007 |
+
"n12620546",
|
| 1008 |
+
"n12768682",
|
| 1009 |
+
"n12985857",
|
| 1010 |
+
"n12998815",
|
| 1011 |
+
"n13037406",
|
| 1012 |
+
"n13040303",
|
| 1013 |
+
"n13044778",
|
| 1014 |
+
"n13052670",
|
| 1015 |
+
"n13054560",
|
| 1016 |
+
"n13133613",
|
| 1017 |
+
"n15075141"
|
| 1018 |
+
],
|
| 1019 |
+
"class_names": [
|
| 1020 |
+
"tench, Tinca tinca",
|
| 1021 |
+
"goldfish, Carassius auratus",
|
| 1022 |
+
"great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias",
|
| 1023 |
+
"tiger shark, Galeocerdo cuvieri",
|
| 1024 |
+
"hammerhead, hammerhead shark",
|
| 1025 |
+
"electric ray, crampfish, numbfish, torpedo",
|
| 1026 |
+
"stingray",
|
| 1027 |
+
"cock",
|
| 1028 |
+
"hen",
|
| 1029 |
+
"ostrich, Struthio camelus",
|
| 1030 |
+
"brambling, Fringilla montifringilla",
|
| 1031 |
+
"goldfinch, Carduelis carduelis",
|
| 1032 |
+
"house finch, linnet, Carpodacus mexicanus",
|
| 1033 |
+
"junco, snowbird",
|
| 1034 |
+
"indigo bunting, indigo finch, indigo bird, Passerina cyanea",
|
| 1035 |
+
"robin, American robin, Turdus migratorius",
|
| 1036 |
+
"bulbul",
|
| 1037 |
+
"jay",
|
| 1038 |
+
"magpie",
|
| 1039 |
+
"chickadee",
|
| 1040 |
+
"water ouzel, dipper",
|
| 1041 |
+
"kite",
|
| 1042 |
+
"bald eagle, American eagle, Haliaeetus leucocephalus",
|
| 1043 |
+
"vulture",
|
| 1044 |
+
"great grey owl, great gray owl, Strix nebulosa",
|
| 1045 |
+
"European fire salamander, Salamandra salamandra",
|
| 1046 |
+
"common newt, Triturus vulgaris",
|
| 1047 |
+
"eft",
|
| 1048 |
+
"spotted salamander, Ambystoma maculatum",
|
| 1049 |
+
"axolotl, mud puppy, Ambystoma mexicanum",
|
| 1050 |
+
"bullfrog, Rana catesbeiana",
|
| 1051 |
+
"tree frog, tree-frog",
|
| 1052 |
+
"tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui",
|
| 1053 |
+
"loggerhead, loggerhead turtle, Caretta caretta",
|
| 1054 |
+
"leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea",
|
| 1055 |
+
"mud turtle",
|
| 1056 |
+
"terrapin",
|
| 1057 |
+
"box turtle, box tortoise",
|
| 1058 |
+
"banded gecko",
|
| 1059 |
+
"common iguana, iguana, Iguana iguana",
|
| 1060 |
+
"American chameleon, anole, Anolis carolinensis",
|
| 1061 |
+
"whiptail, whiptail lizard",
|
| 1062 |
+
"agama",
|
| 1063 |
+
"frilled lizard, Chlamydosaurus kingi",
|
| 1064 |
+
"alligator lizard",
|
| 1065 |
+
"Gila monster, Heloderma suspectum",
|
| 1066 |
+
"green lizard, Lacerta viridis",
|
| 1067 |
+
"African chameleon, Chamaeleo chamaeleon",
|
| 1068 |
+
"Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis",
|
| 1069 |
+
"African crocodile, Nile crocodile, Crocodylus niloticus",
|
| 1070 |
+
"American alligator, Alligator mississipiensis",
|
| 1071 |
+
"triceratops",
|
| 1072 |
+
"thunder snake, worm snake, Carphophis amoenus",
|
| 1073 |
+
"ringneck snake, ring-necked snake, ring snake",
|
| 1074 |
+
"hognose snake, puff adder, sand viper",
|
| 1075 |
+
"green snake, grass snake",
|
| 1076 |
+
"king snake, kingsnake",
|
| 1077 |
+
"garter snake, grass snake",
|
| 1078 |
+
"water snake",
|
| 1079 |
+
"vine snake",
|
| 1080 |
+
"night snake, Hypsiglena torquata",
|
| 1081 |
+
"boa constrictor, Constrictor constrictor",
|
| 1082 |
+
"rock python, rock snake, Python sebae",
|
| 1083 |
+
"Indian cobra, Naja naja",
|
| 1084 |
+
"green mamba",
|
| 1085 |
+
"sea snake",
|
| 1086 |
+
"horned viper, cerastes, sand viper, horned asp, Cerastes cornutus",
|
| 1087 |
+
"diamondback, diamondback rattlesnake, Crotalus adamanteus",
|
| 1088 |
+
"sidewinder, horned rattlesnake, Crotalus cerastes",
|
| 1089 |
+
"trilobite",
|
| 1090 |
+
"harvestman, daddy longlegs, Phalangium opilio",
|
| 1091 |
+
"scorpion",
|
| 1092 |
+
"black and gold garden spider, Argiope aurantia",
|
| 1093 |
+
"barn spider, Araneus cavaticus",
|
| 1094 |
+
"garden spider, Aranea diademata",
|
| 1095 |
+
"black widow, Latrodectus mactans",
|
| 1096 |
+
"tarantula",
|
| 1097 |
+
"wolf spider, hunting spider",
|
| 1098 |
+
"tick",
|
| 1099 |
+
"centipede",
|
| 1100 |
+
"black grouse",
|
| 1101 |
+
"ptarmigan",
|
| 1102 |
+
"ruffed grouse, partridge, Bonasa umbellus",
|
| 1103 |
+
"prairie chicken, prairie grouse, prairie fowl",
|
| 1104 |
+
"peacock",
|
| 1105 |
+
"quail",
|
| 1106 |
+
"partridge",
|
| 1107 |
+
"African grey, African gray, Psittacus erithacus",
|
| 1108 |
+
"macaw",
|
| 1109 |
+
"sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita",
|
| 1110 |
+
"lorikeet",
|
| 1111 |
+
"coucal",
|
| 1112 |
+
"bee eater",
|
| 1113 |
+
"hornbill",
|
| 1114 |
+
"hummingbird",
|
| 1115 |
+
"jacamar",
|
| 1116 |
+
"toucan",
|
| 1117 |
+
"drake",
|
| 1118 |
+
"red-breasted merganser, Mergus serrator",
|
| 1119 |
+
"goose",
|
| 1120 |
+
"black swan, Cygnus atratus",
|
| 1121 |
+
"tusker",
|
| 1122 |
+
"echidna, spiny anteater, anteater",
|
| 1123 |
+
"platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus",
|
| 1124 |
+
"wallaby, brush kangaroo",
|
| 1125 |
+
"koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus",
|
| 1126 |
+
"wombat",
|
| 1127 |
+
"jellyfish",
|
| 1128 |
+
"sea anemone, anemone",
|
| 1129 |
+
"brain coral",
|
| 1130 |
+
"flatworm, platyhelminth",
|
| 1131 |
+
"nematode, nematode worm, roundworm",
|
| 1132 |
+
"conch",
|
| 1133 |
+
"snail",
|
| 1134 |
+
"slug",
|
| 1135 |
+
"sea slug, nudibranch",
|
| 1136 |
+
"chiton, coat-of-mail shell, sea cradle, polyplacophore",
|
| 1137 |
+
"chambered nautilus, pearly nautilus, nautilus",
|
| 1138 |
+
"Dungeness crab, Cancer magister",
|
| 1139 |
+
"rock crab, Cancer irroratus",
|
| 1140 |
+
"fiddler crab",
|
| 1141 |
+
"king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica",
|
| 1142 |
+
"American lobster, Northern lobster, Maine lobster, Homarus americanus",
|
| 1143 |
+
"spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish",
|
| 1144 |
+
"crayfish, crawfish, crawdad, crawdaddy",
|
| 1145 |
+
"hermit crab",
|
| 1146 |
+
"isopod",
|
| 1147 |
+
"white stork, Ciconia ciconia",
|
| 1148 |
+
"black stork, Ciconia nigra",
|
| 1149 |
+
"spoonbill",
|
| 1150 |
+
"flamingo",
|
| 1151 |
+
"little blue heron, Egretta caerulea",
|
| 1152 |
+
"American egret, great white heron, Egretta albus",
|
| 1153 |
+
"bittern",
|
| 1154 |
+
"crane",
|
| 1155 |
+
"limpkin, Aramus pictus",
|
| 1156 |
+
"European gallinule, Porphyrio porphyrio",
|
| 1157 |
+
"American coot, marsh hen, mud hen, water hen, Fulica americana",
|
| 1158 |
+
"bustard",
|
| 1159 |
+
"ruddy turnstone, Arenaria interpres",
|
| 1160 |
+
"red-backed sandpiper, dunlin, Erolia alpina",
|
| 1161 |
+
"redshank, Tringa totanus",
|
| 1162 |
+
"dowitcher",
|
| 1163 |
+
"oystercatcher, oyster catcher",
|
| 1164 |
+
"pelican",
|
| 1165 |
+
"king penguin, Aptenodytes patagonica",
|
| 1166 |
+
"albatross, mollymawk",
|
| 1167 |
+
"grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus",
|
| 1168 |
+
"killer whale, killer, orca, grampus, sea wolf, Orcinus orca",
|
| 1169 |
+
"dugong, Dugong dugon",
|
| 1170 |
+
"sea lion",
|
| 1171 |
+
"Chihuahua",
|
| 1172 |
+
"Japanese spaniel",
|
| 1173 |
+
"Maltese dog, Maltese terrier, Maltese",
|
| 1174 |
+
"Pekinese, Pekingese, Peke",
|
| 1175 |
+
"Shih-Tzu",
|
| 1176 |
+
"Blenheim spaniel",
|
| 1177 |
+
"papillon",
|
| 1178 |
+
"toy terrier",
|
| 1179 |
+
"Rhodesian ridgeback",
|
| 1180 |
+
"Afghan hound, Afghan",
|
| 1181 |
+
"basset, basset hound",
|
| 1182 |
+
"beagle",
|
| 1183 |
+
"bloodhound, sleuthhound",
|
| 1184 |
+
"bluetick",
|
| 1185 |
+
"black-and-tan coonhound",
|
| 1186 |
+
"Walker hound, Walker foxhound",
|
| 1187 |
+
"English foxhound",
|
| 1188 |
+
"redbone",
|
| 1189 |
+
"borzoi, Russian wolfhound",
|
| 1190 |
+
"Irish wolfhound",
|
| 1191 |
+
"Italian greyhound",
|
| 1192 |
+
"whippet",
|
| 1193 |
+
"Ibizan hound, Ibizan Podenco",
|
| 1194 |
+
"Norwegian elkhound, elkhound",
|
| 1195 |
+
"otterhound, otter hound",
|
| 1196 |
+
"Saluki, gazelle hound",
|
| 1197 |
+
"Scottish deerhound, deerhound",
|
| 1198 |
+
"Weimaraner",
|
| 1199 |
+
"Staffordshire bullterrier, Staffordshire bull terrier",
|
| 1200 |
+
"American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier",
|
| 1201 |
+
"Bedlington terrier",
|
| 1202 |
+
"Border terrier",
|
| 1203 |
+
"Kerry blue terrier",
|
| 1204 |
+
"Irish terrier",
|
| 1205 |
+
"Norfolk terrier",
|
| 1206 |
+
"Norwich terrier",
|
| 1207 |
+
"Yorkshire terrier",
|
| 1208 |
+
"wire-haired fox terrier",
|
| 1209 |
+
"Lakeland terrier",
|
| 1210 |
+
"Sealyham terrier, Sealyham",
|
| 1211 |
+
"Airedale, Airedale terrier",
|
| 1212 |
+
"cairn, cairn terrier",
|
| 1213 |
+
"Australian terrier",
|
| 1214 |
+
"Dandie Dinmont, Dandie Dinmont terrier",
|
| 1215 |
+
"Boston bull, Boston terrier",
|
| 1216 |
+
"miniature schnauzer",
|
| 1217 |
+
"giant schnauzer",
|
| 1218 |
+
"standard schnauzer",
|
| 1219 |
+
"Scotch terrier, Scottish terrier, Scottie",
|
| 1220 |
+
"Tibetan terrier, chrysanthemum dog",
|
| 1221 |
+
"silky terrier, Sydney silky",
|
| 1222 |
+
"soft-coated wheaten terrier",
|
| 1223 |
+
"West Highland white terrier",
|
| 1224 |
+
"Lhasa, Lhasa apso",
|
| 1225 |
+
"flat-coated retriever",
|
| 1226 |
+
"curly-coated retriever",
|
| 1227 |
+
"golden retriever",
|
| 1228 |
+
"Labrador retriever",
|
| 1229 |
+
"Chesapeake Bay retriever",
|
| 1230 |
+
"German short-haired pointer",
|
| 1231 |
+
"vizsla, Hungarian pointer",
|
| 1232 |
+
"English setter",
|
| 1233 |
+
"Irish setter, red setter",
|
| 1234 |
+
"Gordon setter",
|
| 1235 |
+
"Brittany spaniel",
|
| 1236 |
+
"clumber, clumber spaniel",
|
| 1237 |
+
"English springer, English springer spaniel",
|
| 1238 |
+
"Welsh springer spaniel",
|
| 1239 |
+
"cocker spaniel, English cocker spaniel, cocker",
|
| 1240 |
+
"Sussex spaniel",
|
| 1241 |
+
"Irish water spaniel",
|
| 1242 |
+
"kuvasz",
|
| 1243 |
+
"schipperke",
|
| 1244 |
+
"groenendael",
|
| 1245 |
+
"malinois",
|
| 1246 |
+
"briard",
|
| 1247 |
+
"kelpie",
|
| 1248 |
+
"komondor",
|
| 1249 |
+
"Old English sheepdog, bobtail",
|
| 1250 |
+
"Shetland sheepdog, Shetland sheep dog, Shetland",
|
| 1251 |
+
"collie",
|
| 1252 |
+
"Border collie",
|
| 1253 |
+
"Bouvier des Flandres, Bouviers des Flandres",
|
| 1254 |
+
"Rottweiler",
|
| 1255 |
+
"German shepherd, German shepherd dog, German police dog, alsatian",
|
| 1256 |
+
"Doberman, Doberman pinscher",
|
| 1257 |
+
"miniature pinscher",
|
| 1258 |
+
"Greater Swiss Mountain dog",
|
| 1259 |
+
"Bernese mountain dog",
|
| 1260 |
+
"Appenzeller",
|
| 1261 |
+
"EntleBucher",
|
| 1262 |
+
"boxer",
|
| 1263 |
+
"bull mastiff",
|
| 1264 |
+
"Tibetan mastiff",
|
| 1265 |
+
"French bulldog",
|
| 1266 |
+
"Great Dane",
|
| 1267 |
+
"Saint Bernard, St Bernard",
|
| 1268 |
+
"Eskimo dog, husky",
|
| 1269 |
+
"malamute, malemute, Alaskan malamute",
|
| 1270 |
+
"Siberian husky",
|
| 1271 |
+
"dalmatian, coach dog, carriage dog",
|
| 1272 |
+
"affenpinscher, monkey pinscher, monkey dog",
|
| 1273 |
+
"basenji",
|
| 1274 |
+
"pug, pug-dog",
|
| 1275 |
+
"Leonberg",
|
| 1276 |
+
"Newfoundland, Newfoundland dog",
|
| 1277 |
+
"Great Pyrenees",
|
| 1278 |
+
"Samoyed, Samoyede",
|
| 1279 |
+
"Pomeranian",
|
| 1280 |
+
"chow, chow chow",
|
| 1281 |
+
"keeshond",
|
| 1282 |
+
"Brabancon griffon",
|
| 1283 |
+
"Pembroke, Pembroke Welsh corgi",
|
| 1284 |
+
"Cardigan, Cardigan Welsh corgi",
|
| 1285 |
+
"toy poodle",
|
| 1286 |
+
"miniature poodle",
|
| 1287 |
+
"standard poodle",
|
| 1288 |
+
"Mexican hairless",
|
| 1289 |
+
"timber wolf, grey wolf, gray wolf, Canis lupus",
|
| 1290 |
+
"white wolf, Arctic wolf, Canis lupus tundrarum",
|
| 1291 |
+
"red wolf, maned wolf, Canis rufus, Canis niger",
|
| 1292 |
+
"coyote, prairie wolf, brush wolf, Canis latrans",
|
| 1293 |
+
"dingo, warrigal, warragal, Canis dingo",
|
| 1294 |
+
"dhole, Cuon alpinus",
|
| 1295 |
+
"African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus",
|
| 1296 |
+
"hyena, hyaena",
|
| 1297 |
+
"red fox, Vulpes vulpes",
|
| 1298 |
+
"kit fox, Vulpes macrotis",
|
| 1299 |
+
"Arctic fox, white fox, Alopex lagopus",
|
| 1300 |
+
"grey fox, gray fox, Urocyon cinereoargenteus",
|
| 1301 |
+
"tabby, tabby cat",
|
| 1302 |
+
"tiger cat",
|
| 1303 |
+
"Persian cat",
|
| 1304 |
+
"Siamese cat, Siamese",
|
| 1305 |
+
"Egyptian cat",
|
| 1306 |
+
"cougar, puma, catamount, mountain lion, painter, panther, Felis concolor",
|
| 1307 |
+
"lynx, catamount",
|
| 1308 |
+
"leopard, Panthera pardus",
|
| 1309 |
+
"snow leopard, ounce, Panthera uncia",
|
| 1310 |
+
"jaguar, panther, Panthera onca, Felis onca",
|
| 1311 |
+
"lion, king of beasts, Panthera leo",
|
| 1312 |
+
"tiger, Panthera tigris",
|
| 1313 |
+
"cheetah, chetah, Acinonyx jubatus",
|
| 1314 |
+
"brown bear, bruin, Ursus arctos",
|
| 1315 |
+
"American black bear, black bear, Ursus americanus, Euarctos americanus",
|
| 1316 |
+
"ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus",
|
| 1317 |
+
"sloth bear, Melursus ursinus, Ursus ursinus",
|
| 1318 |
+
"mongoose",
|
| 1319 |
+
"meerkat, mierkat",
|
| 1320 |
+
"tiger beetle",
|
| 1321 |
+
"ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle",
|
| 1322 |
+
"ground beetle, carabid beetle",
|
| 1323 |
+
"long-horned beetle, longicorn, longicorn beetle",
|
| 1324 |
+
"leaf beetle, chrysomelid",
|
| 1325 |
+
"dung beetle",
|
| 1326 |
+
"rhinoceros beetle",
|
| 1327 |
+
"weevil",
|
| 1328 |
+
"fly",
|
| 1329 |
+
"bee",
|
| 1330 |
+
"ant, emmet, pismire",
|
| 1331 |
+
"grasshopper, hopper",
|
| 1332 |
+
"cricket",
|
| 1333 |
+
"walking stick, walkingstick, stick insect",
|
| 1334 |
+
"cockroach, roach",
|
| 1335 |
+
"mantis, mantid",
|
| 1336 |
+
"cicada, cicala",
|
| 1337 |
+
"leafhopper",
|
| 1338 |
+
"lacewing, lacewing fly",
|
| 1339 |
+
"dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk",
|
| 1340 |
+
"damselfly",
|
| 1341 |
+
"admiral",
|
| 1342 |
+
"ringlet, ringlet butterfly",
|
| 1343 |
+
"monarch, monarch butterfly, milkweed butterfly, Danaus plexippus",
|
| 1344 |
+
"cabbage butterfly",
|
| 1345 |
+
"sulphur butterfly, sulfur butterfly",
|
| 1346 |
+
"lycaenid, lycaenid butterfly",
|
| 1347 |
+
"starfish, sea star",
|
| 1348 |
+
"sea urchin",
|
| 1349 |
+
"sea cucumber, holothurian",
|
| 1350 |
+
"wood rabbit, cottontail, cottontail rabbit",
|
| 1351 |
+
"hare",
|
| 1352 |
+
"Angora, Angora rabbit",
|
| 1353 |
+
"hamster",
|
| 1354 |
+
"porcupine, hedgehog",
|
| 1355 |
+
"fox squirrel, eastern fox squirrel, Sciurus niger",
|
| 1356 |
+
"marmot",
|
| 1357 |
+
"beaver",
|
| 1358 |
+
"guinea pig, Cavia cobaya",
|
| 1359 |
+
"sorrel",
|
| 1360 |
+
"zebra",
|
| 1361 |
+
"hog, pig, grunter, squealer, Sus scrofa",
|
| 1362 |
+
"wild boar, boar, Sus scrofa",
|
| 1363 |
+
"warthog",
|
| 1364 |
+
"hippopotamus, hippo, river horse, Hippopotamus amphibius",
|
| 1365 |
+
"ox",
|
| 1366 |
+
"water buffalo, water ox, Asiatic buffalo, Bubalus bubalis",
|
| 1367 |
+
"bison",
|
| 1368 |
+
"ram, tup",
|
| 1369 |
+
"bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis",
|
| 1370 |
+
"ibex, Capra ibex",
|
| 1371 |
+
"hartebeest",
|
| 1372 |
+
"impala, Aepyceros melampus",
|
| 1373 |
+
"gazelle",
|
| 1374 |
+
"Arabian camel, dromedary, Camelus dromedarius",
|
| 1375 |
+
"llama",
|
| 1376 |
+
"weasel",
|
| 1377 |
+
"mink",
|
| 1378 |
+
"polecat, fitch, foulmart, foumart, Mustela putorius",
|
| 1379 |
+
"black-footed ferret, ferret, Mustela nigripes",
|
| 1380 |
+
"otter",
|
| 1381 |
+
"skunk, polecat, wood pussy",
|
| 1382 |
+
"badger",
|
| 1383 |
+
"armadillo",
|
| 1384 |
+
"three-toed sloth, ai, Bradypus tridactylus",
|
| 1385 |
+
"orangutan, orang, orangutang, Pongo pygmaeus",
|
| 1386 |
+
"gorilla, Gorilla gorilla",
|
| 1387 |
+
"chimpanzee, chimp, Pan troglodytes",
|
| 1388 |
+
"gibbon, Hylobates lar",
|
| 1389 |
+
"siamang, Hylobates syndactylus, Symphalangus syndactylus",
|
| 1390 |
+
"guenon, guenon monkey",
|
| 1391 |
+
"patas, hussar monkey, Erythrocebus patas",
|
| 1392 |
+
"baboon",
|
| 1393 |
+
"macaque",
|
| 1394 |
+
"langur",
|
| 1395 |
+
"colobus, colobus monkey",
|
| 1396 |
+
"proboscis monkey, Nasalis larvatus",
|
| 1397 |
+
"marmoset",
|
| 1398 |
+
"capuchin, ringtail, Cebus capucinus",
|
| 1399 |
+
"howler monkey, howler",
|
| 1400 |
+
"titi, titi monkey",
|
| 1401 |
+
"spider monkey, Ateles geoffroyi",
|
| 1402 |
+
"squirrel monkey, Saimiri sciureus",
|
| 1403 |
+
"Madagascar cat, ring-tailed lemur, Lemur catta",
|
| 1404 |
+
"indri, indris, Indri indri, Indri brevicaudatus",
|
| 1405 |
+
"Indian elephant, Elephas maximus",
|
| 1406 |
+
"African elephant, Loxodonta africana",
|
| 1407 |
+
"lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens",
|
| 1408 |
+
"giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca",
|
| 1409 |
+
"barracouta, snoek",
|
| 1410 |
+
"eel",
|
| 1411 |
+
"coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch",
|
| 1412 |
+
"rock beauty, Holocanthus tricolor",
|
| 1413 |
+
"anemone fish",
|
| 1414 |
+
"sturgeon",
|
| 1415 |
+
"gar, garfish, garpike, billfish, Lepisosteus osseus",
|
| 1416 |
+
"lionfish",
|
| 1417 |
+
"puffer, pufferfish, blowfish, globefish",
|
| 1418 |
+
"abacus",
|
| 1419 |
+
"abaya",
|
| 1420 |
+
"academic gown, academic robe, judge's robe",
|
| 1421 |
+
"accordion, piano accordion, squeeze box",
|
| 1422 |
+
"acoustic guitar",
|
| 1423 |
+
"aircraft carrier, carrier, flattop, attack aircraft carrier",
|
| 1424 |
+
"airliner",
|
| 1425 |
+
"airship, dirigible",
|
| 1426 |
+
"altar",
|
| 1427 |
+
"ambulance",
|
| 1428 |
+
"amphibian, amphibious vehicle",
|
| 1429 |
+
"analog clock",
|
| 1430 |
+
"apiary, bee house",
|
| 1431 |
+
"apron",
|
| 1432 |
+
"ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin",
|
| 1433 |
+
"assault rifle, assault gun",
|
| 1434 |
+
"backpack, back pack, knapsack, packsack, rucksack, haversack",
|
| 1435 |
+
"bakery, bakeshop, bakehouse",
|
| 1436 |
+
"balance beam, beam",
|
| 1437 |
+
"balloon",
|
| 1438 |
+
"ballpoint, ballpoint pen, ballpen, Biro",
|
| 1439 |
+
"Band Aid",
|
| 1440 |
+
"banjo",
|
| 1441 |
+
"bannister, banister, balustrade, balusters, handrail",
|
| 1442 |
+
"barbell",
|
| 1443 |
+
"barber chair",
|
| 1444 |
+
"barbershop",
|
| 1445 |
+
"barn",
|
| 1446 |
+
"barometer",
|
| 1447 |
+
"barrel, cask",
|
| 1448 |
+
"barrow, garden cart, lawn cart, wheelbarrow",
|
| 1449 |
+
"baseball",
|
| 1450 |
+
"basketball",
|
| 1451 |
+
"bassinet",
|
| 1452 |
+
"bassoon",
|
| 1453 |
+
"bathing cap, swimming cap",
|
| 1454 |
+
"bath towel",
|
| 1455 |
+
"bathtub, bathing tub, bath, tub",
|
| 1456 |
+
"beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon",
|
| 1457 |
+
"beacon, lighthouse, beacon light, pharos",
|
| 1458 |
+
"beaker",
|
| 1459 |
+
"bearskin, busby, shako",
|
| 1460 |
+
"beer bottle",
|
| 1461 |
+
"beer glass",
|
| 1462 |
+
"bell cote, bell cot",
|
| 1463 |
+
"bib",
|
| 1464 |
+
"bicycle-built-for-two, tandem bicycle, tandem",
|
| 1465 |
+
"bikini, two-piece",
|
| 1466 |
+
"binder, ring-binder",
|
| 1467 |
+
"binoculars, field glasses, opera glasses",
|
| 1468 |
+
"birdhouse",
|
| 1469 |
+
"boathouse",
|
| 1470 |
+
"bobsled, bobsleigh, bob",
|
| 1471 |
+
"bolo tie, bolo, bola tie, bola",
|
| 1472 |
+
"bonnet, poke bonnet",
|
| 1473 |
+
"bookcase",
|
| 1474 |
+
"bookshop, bookstore, bookstall",
|
| 1475 |
+
"bottlecap",
|
| 1476 |
+
"bow",
|
| 1477 |
+
"bow tie, bow-tie, bowtie",
|
| 1478 |
+
"brass, memorial tablet, plaque",
|
| 1479 |
+
"brassiere, bra, bandeau",
|
| 1480 |
+
"breakwater, groin, groyne, mole, bulwark, seawall, jetty",
|
| 1481 |
+
"breastplate, aegis, egis",
|
| 1482 |
+
"broom",
|
| 1483 |
+
"bucket, pail",
|
| 1484 |
+
"buckle",
|
| 1485 |
+
"bulletproof vest",
|
| 1486 |
+
"bullet train, bullet",
|
| 1487 |
+
"butcher shop, meat market",
|
| 1488 |
+
"cab, hack, taxi, taxicab",
|
| 1489 |
+
"caldron, cauldron",
|
| 1490 |
+
"candle, taper, wax light",
|
| 1491 |
+
"cannon",
|
| 1492 |
+
"canoe",
|
| 1493 |
+
"can opener, tin opener",
|
| 1494 |
+
"cardigan",
|
| 1495 |
+
"car mirror",
|
| 1496 |
+
"carousel, carrousel, merry-go-round, roundabout, whirligig",
|
| 1497 |
+
"carpenter's kit, tool kit",
|
| 1498 |
+
"carton",
|
| 1499 |
+
"car wheel",
|
| 1500 |
+
"cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM",
|
| 1501 |
+
"cassette",
|
| 1502 |
+
"cassette player",
|
| 1503 |
+
"castle",
|
| 1504 |
+
"catamaran",
|
| 1505 |
+
"CD player",
|
| 1506 |
+
"cello, violoncello",
|
| 1507 |
+
"cellular telephone, cellular phone, cellphone, cell, mobile phone",
|
| 1508 |
+
"chain",
|
| 1509 |
+
"chainlink fence",
|
| 1510 |
+
"chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour",
|
| 1511 |
+
"chain saw, chainsaw",
|
| 1512 |
+
"chest",
|
| 1513 |
+
"chiffonier, commode",
|
| 1514 |
+
"chime, bell, gong",
|
| 1515 |
+
"china cabinet, china closet",
|
| 1516 |
+
"Christmas stocking",
|
| 1517 |
+
"church, church building",
|
| 1518 |
+
"cinema, movie theater, movie theatre, movie house, picture palace",
|
| 1519 |
+
"cleaver, meat cleaver, chopper",
|
| 1520 |
+
"cliff dwelling",
|
| 1521 |
+
"cloak",
|
| 1522 |
+
"clog, geta, patten, sabot",
|
| 1523 |
+
"cocktail shaker",
|
| 1524 |
+
"coffee mug",
|
| 1525 |
+
"coffeepot",
|
| 1526 |
+
"coil, spiral, volute, whorl, helix",
|
| 1527 |
+
"combination lock",
|
| 1528 |
+
"computer keyboard, keypad",
|
| 1529 |
+
"confectionery, confectionary, candy store",
|
| 1530 |
+
"container ship, containership, container vessel",
|
| 1531 |
+
"convertible",
|
| 1532 |
+
"corkscrew, bottle screw",
|
| 1533 |
+
"cornet, horn, trumpet, trump",
|
| 1534 |
+
"cowboy boot",
|
| 1535 |
+
"cowboy hat, ten-gallon hat",
|
| 1536 |
+
"cradle",
|
| 1537 |
+
"crane",
|
| 1538 |
+
"crash helmet",
|
| 1539 |
+
"crate",
|
| 1540 |
+
"crib, cot",
|
| 1541 |
+
"Crock Pot",
|
| 1542 |
+
"croquet ball",
|
| 1543 |
+
"crutch",
|
| 1544 |
+
"cuirass",
|
| 1545 |
+
"dam, dike, dyke",
|
| 1546 |
+
"desk",
|
| 1547 |
+
"desktop computer",
|
| 1548 |
+
"dial telephone, dial phone",
|
| 1549 |
+
"diaper, nappy, napkin",
|
| 1550 |
+
"digital clock",
|
| 1551 |
+
"digital watch",
|
| 1552 |
+
"dining table, board",
|
| 1553 |
+
"dishrag, dishcloth",
|
| 1554 |
+
"dishwasher, dish washer, dishwashing machine",
|
| 1555 |
+
"disk brake, disc brake",
|
| 1556 |
+
"dock, dockage, docking facility",
|
| 1557 |
+
"dogsled, dog sled, dog sleigh",
|
| 1558 |
+
"dome",
|
| 1559 |
+
"doormat, welcome mat",
|
| 1560 |
+
"drilling platform, offshore rig",
|
| 1561 |
+
"drum, membranophone, tympan",
|
| 1562 |
+
"drumstick",
|
| 1563 |
+
"dumbbell",
|
| 1564 |
+
"Dutch oven",
|
| 1565 |
+
"electric fan, blower",
|
| 1566 |
+
"electric guitar",
|
| 1567 |
+
"electric locomotive",
|
| 1568 |
+
"entertainment center",
|
| 1569 |
+
"envelope",
|
| 1570 |
+
"espresso maker",
|
| 1571 |
+
"face powder",
|
| 1572 |
+
"feather boa, boa",
|
| 1573 |
+
"file, file cabinet, filing cabinet",
|
| 1574 |
+
"fireboat",
|
| 1575 |
+
"fire engine, fire truck",
|
| 1576 |
+
"fire screen, fireguard",
|
| 1577 |
+
"flagpole, flagstaff",
|
| 1578 |
+
"flute, transverse flute",
|
| 1579 |
+
"folding chair",
|
| 1580 |
+
"football helmet",
|
| 1581 |
+
"forklift",
|
| 1582 |
+
"fountain",
|
| 1583 |
+
"fountain pen",
|
| 1584 |
+
"four-poster",
|
| 1585 |
+
"freight car",
|
| 1586 |
+
"French horn, horn",
|
| 1587 |
+
"frying pan, frypan, skillet",
|
| 1588 |
+
"fur coat",
|
| 1589 |
+
"garbage truck, dustcart",
|
| 1590 |
+
"gasmask, respirator, gas helmet",
|
| 1591 |
+
"gas pump, gasoline pump, petrol pump, island dispenser",
|
| 1592 |
+
"goblet",
|
| 1593 |
+
"go-kart",
|
| 1594 |
+
"golf ball",
|
| 1595 |
+
"golfcart, golf cart",
|
| 1596 |
+
"gondola",
|
| 1597 |
+
"gong, tam-tam",
|
| 1598 |
+
"gown",
|
| 1599 |
+
"grand piano, grand",
|
| 1600 |
+
"greenhouse, nursery, glasshouse",
|
| 1601 |
+
"grille, radiator grille",
|
| 1602 |
+
"grocery store, grocery, food market, market",
|
| 1603 |
+
"guillotine",
|
| 1604 |
+
"hair slide",
|
| 1605 |
+
"hair spray",
|
| 1606 |
+
"half track",
|
| 1607 |
+
"hammer",
|
| 1608 |
+
"hamper",
|
| 1609 |
+
"hand blower, blow dryer, blow drier, hair dryer, hair drier",
|
| 1610 |
+
"hand-held computer, hand-held microcomputer",
|
| 1611 |
+
"handkerchief, hankie, hanky, hankey",
|
| 1612 |
+
"hard disc, hard disk, fixed disk",
|
| 1613 |
+
"harmonica, mouth organ, harp, mouth harp",
|
| 1614 |
+
"harp",
|
| 1615 |
+
"harvester, reaper",
|
| 1616 |
+
"hatchet",
|
| 1617 |
+
"holster",
|
| 1618 |
+
"home theater, home theatre",
|
| 1619 |
+
"honeycomb",
|
| 1620 |
+
"hook, claw",
|
| 1621 |
+
"hoopskirt, crinoline",
|
| 1622 |
+
"horizontal bar, high bar",
|
| 1623 |
+
"horse cart, horse-cart",
|
| 1624 |
+
"hourglass",
|
| 1625 |
+
"iPod",
|
| 1626 |
+
"iron, smoothing iron",
|
| 1627 |
+
"jack-o'-lantern",
|
| 1628 |
+
"jean, blue jean, denim",
|
| 1629 |
+
"jeep, landrover",
|
| 1630 |
+
"jersey, T-shirt, tee shirt",
|
| 1631 |
+
"jigsaw puzzle",
|
| 1632 |
+
"jinrikisha, ricksha, rickshaw",
|
| 1633 |
+
"joystick",
|
| 1634 |
+
"kimono",
|
| 1635 |
+
"knee pad",
|
| 1636 |
+
"knot",
|
| 1637 |
+
"lab coat, laboratory coat",
|
| 1638 |
+
"ladle",
|
| 1639 |
+
"lampshade, lamp shade",
|
| 1640 |
+
"laptop, laptop computer",
|
| 1641 |
+
"lawn mower, mower",
|
| 1642 |
+
"lens cap, lens cover",
|
| 1643 |
+
"letter opener, paper knife, paperknife",
|
| 1644 |
+
"library",
|
| 1645 |
+
"lifeboat",
|
| 1646 |
+
"lighter, light, igniter, ignitor",
|
| 1647 |
+
"limousine, limo",
|
| 1648 |
+
"liner, ocean liner",
|
| 1649 |
+
"lipstick, lip rouge",
|
| 1650 |
+
"Loafer",
|
| 1651 |
+
"lotion",
|
| 1652 |
+
"loudspeaker, speaker, speaker unit, loudspeaker system, speaker system",
|
| 1653 |
+
"loupe, jeweler's loupe",
|
| 1654 |
+
"lumbermill, sawmill",
|
| 1655 |
+
"magnetic compass",
|
| 1656 |
+
"mailbag, postbag",
|
| 1657 |
+
"mailbox, letter box",
|
| 1658 |
+
"maillot",
|
| 1659 |
+
"maillot, tank suit",
|
| 1660 |
+
"manhole cover",
|
| 1661 |
+
"maraca",
|
| 1662 |
+
"marimba, xylophone",
|
| 1663 |
+
"mask",
|
| 1664 |
+
"matchstick",
|
| 1665 |
+
"maypole",
|
| 1666 |
+
"maze, labyrinth",
|
| 1667 |
+
"measuring cup",
|
| 1668 |
+
"medicine chest, medicine cabinet",
|
| 1669 |
+
"megalith, megalithic structure",
|
| 1670 |
+
"microphone, mike",
|
| 1671 |
+
"microwave, microwave oven",
|
| 1672 |
+
"military uniform",
|
| 1673 |
+
"milk can",
|
| 1674 |
+
"minibus",
|
| 1675 |
+
"miniskirt, mini",
|
| 1676 |
+
"minivan",
|
| 1677 |
+
"missile",
|
| 1678 |
+
"mitten",
|
| 1679 |
+
"mixing bowl",
|
| 1680 |
+
"mobile home, manufactured home",
|
| 1681 |
+
"Model T",
|
| 1682 |
+
"modem",
|
| 1683 |
+
"monastery",
|
| 1684 |
+
"monitor",
|
| 1685 |
+
"moped",
|
| 1686 |
+
"mortar",
|
| 1687 |
+
"mortarboard",
|
| 1688 |
+
"mosque",
|
| 1689 |
+
"mosquito net",
|
| 1690 |
+
"motor scooter, scooter",
|
| 1691 |
+
"mountain bike, all-terrain bike, off-roader",
|
| 1692 |
+
"mountain tent",
|
| 1693 |
+
"mouse, computer mouse",
|
| 1694 |
+
"mousetrap",
|
| 1695 |
+
"moving van",
|
| 1696 |
+
"muzzle",
|
| 1697 |
+
"nail",
|
| 1698 |
+
"neck brace",
|
| 1699 |
+
"necklace",
|
| 1700 |
+
"nipple",
|
| 1701 |
+
"notebook, notebook computer",
|
| 1702 |
+
"obelisk",
|
| 1703 |
+
"oboe, hautboy, hautbois",
|
| 1704 |
+
"ocarina, sweet potato",
|
| 1705 |
+
"odometer, hodometer, mileometer, milometer",
|
| 1706 |
+
"oil filter",
|
| 1707 |
+
"organ, pipe organ",
|
| 1708 |
+
"oscilloscope, scope, cathode-ray oscilloscope, CRO",
|
| 1709 |
+
"overskirt",
|
| 1710 |
+
"oxcart",
|
| 1711 |
+
"oxygen mask",
|
| 1712 |
+
"packet",
|
| 1713 |
+
"paddle, boat paddle",
|
| 1714 |
+
"paddlewheel, paddle wheel",
|
| 1715 |
+
"padlock",
|
| 1716 |
+
"paintbrush",
|
| 1717 |
+
"pajama, pyjama, pj's, jammies",
|
| 1718 |
+
"palace",
|
| 1719 |
+
"panpipe, pandean pipe, syrinx",
|
| 1720 |
+
"paper towel",
|
| 1721 |
+
"parachute, chute",
|
| 1722 |
+
"parallel bars, bars",
|
| 1723 |
+
"park bench",
|
| 1724 |
+
"parking meter",
|
| 1725 |
+
"passenger car, coach, carriage",
|
| 1726 |
+
"patio, terrace",
|
| 1727 |
+
"pay-phone, pay-station",
|
| 1728 |
+
"pedestal, plinth, footstall",
|
| 1729 |
+
"pencil box, pencil case",
|
| 1730 |
+
"pencil sharpener",
|
| 1731 |
+
"perfume, essence",
|
| 1732 |
+
"Petri dish",
|
| 1733 |
+
"photocopier",
|
| 1734 |
+
"pick, plectrum, plectron",
|
| 1735 |
+
"pickelhaube",
|
| 1736 |
+
"picket fence, paling",
|
| 1737 |
+
"pickup, pickup truck",
|
| 1738 |
+
"pier",
|
| 1739 |
+
"piggy bank, penny bank",
|
| 1740 |
+
"pill bottle",
|
| 1741 |
+
"pillow",
|
| 1742 |
+
"ping-pong ball",
|
| 1743 |
+
"pinwheel",
|
| 1744 |
+
"pirate, pirate ship",
|
| 1745 |
+
"pitcher, ewer",
|
| 1746 |
+
"plane, carpenter's plane, woodworking plane",
|
| 1747 |
+
"planetarium",
|
| 1748 |
+
"plastic bag",
|
| 1749 |
+
"plate rack",
|
| 1750 |
+
"plow, plough",
|
| 1751 |
+
"plunger, plumber's helper",
|
| 1752 |
+
"Polaroid camera, Polaroid Land camera",
|
| 1753 |
+
"pole",
|
| 1754 |
+
"police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria",
|
| 1755 |
+
"poncho",
|
| 1756 |
+
"pool table, billiard table, snooker table",
|
| 1757 |
+
"pop bottle, soda bottle",
|
| 1758 |
+
"pot, flowerpot",
|
| 1759 |
+
"potter's wheel",
|
| 1760 |
+
"power drill",
|
| 1761 |
+
"prayer rug, prayer mat",
|
| 1762 |
+
"printer",
|
| 1763 |
+
"prison, prison house",
|
| 1764 |
+
"projectile, missile",
|
| 1765 |
+
"projector",
|
| 1766 |
+
"puck, hockey puck",
|
| 1767 |
+
"punching bag, punch bag, punching ball, punchball",
|
| 1768 |
+
"purse",
|
| 1769 |
+
"quill, quill pen",
|
| 1770 |
+
"quilt, comforter, comfort, puff",
|
| 1771 |
+
"racer, race car, racing car",
|
| 1772 |
+
"racket, racquet",
|
| 1773 |
+
"radiator",
|
| 1774 |
+
"radio, wireless",
|
| 1775 |
+
"radio telescope, radio reflector",
|
| 1776 |
+
"rain barrel",
|
| 1777 |
+
"recreational vehicle, RV, R.V.",
|
| 1778 |
+
"reel",
|
| 1779 |
+
"reflex camera",
|
| 1780 |
+
"refrigerator, icebox",
|
| 1781 |
+
"remote control, remote",
|
| 1782 |
+
"restaurant, eating house, eating place, eatery",
|
| 1783 |
+
"revolver, six-gun, six-shooter",
|
| 1784 |
+
"rifle",
|
| 1785 |
+
"rocking chair, rocker",
|
| 1786 |
+
"rotisserie",
|
| 1787 |
+
"rubber eraser, rubber, pencil eraser",
|
| 1788 |
+
"rugby ball",
|
| 1789 |
+
"rule, ruler",
|
| 1790 |
+
"running shoe",
|
| 1791 |
+
"safe",
|
| 1792 |
+
"safety pin",
|
| 1793 |
+
"saltshaker, salt shaker",
|
| 1794 |
+
"sandal",
|
| 1795 |
+
"sarong",
|
| 1796 |
+
"sax, saxophone",
|
| 1797 |
+
"scabbard",
|
| 1798 |
+
"scale, weighing machine",
|
| 1799 |
+
"school bus",
|
| 1800 |
+
"schooner",
|
| 1801 |
+
"scoreboard",
|
| 1802 |
+
"screen, CRT screen",
|
| 1803 |
+
"screw",
|
| 1804 |
+
"screwdriver",
|
| 1805 |
+
"seat belt, seatbelt",
|
| 1806 |
+
"sewing machine",
|
| 1807 |
+
"shield, buckler",
|
| 1808 |
+
"shoe shop, shoe-shop, shoe store",
|
| 1809 |
+
"shoji",
|
| 1810 |
+
"shopping basket",
|
| 1811 |
+
"shopping cart",
|
| 1812 |
+
"shovel",
|
| 1813 |
+
"shower cap",
|
| 1814 |
+
"shower curtain",
|
| 1815 |
+
"ski",
|
| 1816 |
+
"ski mask",
|
| 1817 |
+
"sleeping bag",
|
| 1818 |
+
"slide rule, slipstick",
|
| 1819 |
+
"sliding door",
|
| 1820 |
+
"slot, one-armed bandit",
|
| 1821 |
+
"snorkel",
|
| 1822 |
+
"snowmobile",
|
| 1823 |
+
"snowplow, snowplough",
|
| 1824 |
+
"soap dispenser",
|
| 1825 |
+
"soccer ball",
|
| 1826 |
+
"sock",
|
| 1827 |
+
"solar dish, solar collector, solar furnace",
|
| 1828 |
+
"sombrero",
|
| 1829 |
+
"soup bowl",
|
| 1830 |
+
"space bar",
|
| 1831 |
+
"space heater",
|
| 1832 |
+
"space shuttle",
|
| 1833 |
+
"spatula",
|
| 1834 |
+
"speedboat",
|
| 1835 |
+
"spider web, spider's web",
|
| 1836 |
+
"spindle",
|
| 1837 |
+
"sports car, sport car",
|
| 1838 |
+
"spotlight, spot",
|
| 1839 |
+
"stage",
|
| 1840 |
+
"steam locomotive",
|
| 1841 |
+
"steel arch bridge",
|
| 1842 |
+
"steel drum",
|
| 1843 |
+
"stethoscope",
|
| 1844 |
+
"stole",
|
| 1845 |
+
"stone wall",
|
| 1846 |
+
"stopwatch, stop watch",
|
| 1847 |
+
"stove",
|
| 1848 |
+
"strainer",
|
| 1849 |
+
"streetcar, tram, tramcar, trolley, trolley car",
|
| 1850 |
+
"stretcher",
|
| 1851 |
+
"studio couch, day bed",
|
| 1852 |
+
"stupa, tope",
|
| 1853 |
+
"submarine, pigboat, sub, U-boat",
|
| 1854 |
+
"suit, suit of clothes",
|
| 1855 |
+
"sundial",
|
| 1856 |
+
"sunglass",
|
| 1857 |
+
"sunglasses, dark glasses, shades",
|
| 1858 |
+
"sunscreen, sunblock, sun blocker",
|
| 1859 |
+
"suspension bridge",
|
| 1860 |
+
"swab, swob, mop",
|
| 1861 |
+
"sweatshirt",
|
| 1862 |
+
"swimming trunks, bathing trunks",
|
| 1863 |
+
"swing",
|
| 1864 |
+
"switch, electric switch, electrical switch",
|
| 1865 |
+
"syringe",
|
| 1866 |
+
"table lamp",
|
| 1867 |
+
"tank, army tank, armored combat vehicle, armoured combat vehicle",
|
| 1868 |
+
"tape player",
|
| 1869 |
+
"teapot",
|
| 1870 |
+
"teddy, teddy bear",
|
| 1871 |
+
"television, television system",
|
| 1872 |
+
"tennis ball",
|
| 1873 |
+
"thatch, thatched roof",
|
| 1874 |
+
"theater curtain, theatre curtain",
|
| 1875 |
+
"thimble",
|
| 1876 |
+
"thresher, thrasher, threshing machine",
|
| 1877 |
+
"throne",
|
| 1878 |
+
"tile roof",
|
| 1879 |
+
"toaster",
|
| 1880 |
+
"tobacco shop, tobacconist shop, tobacconist",
|
| 1881 |
+
"toilet seat",
|
| 1882 |
+
"torch",
|
| 1883 |
+
"totem pole",
|
| 1884 |
+
"tow truck, tow car, wrecker",
|
| 1885 |
+
"toyshop",
|
| 1886 |
+
"tractor",
|
| 1887 |
+
"trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi",
|
| 1888 |
+
"tray",
|
| 1889 |
+
"trench coat",
|
| 1890 |
+
"tricycle, trike, velocipede",
|
| 1891 |
+
"trimaran",
|
| 1892 |
+
"tripod",
|
| 1893 |
+
"triumphal arch",
|
| 1894 |
+
"trolleybus, trolley coach, trackless trolley",
|
| 1895 |
+
"trombone",
|
| 1896 |
+
"tub, vat",
|
| 1897 |
+
"turnstile",
|
| 1898 |
+
"typewriter keyboard",
|
| 1899 |
+
"umbrella",
|
| 1900 |
+
"unicycle, monocycle",
|
| 1901 |
+
"upright, upright piano",
|
| 1902 |
+
"vacuum, vacuum cleaner",
|
| 1903 |
+
"vase",
|
| 1904 |
+
"vault",
|
| 1905 |
+
"velvet",
|
| 1906 |
+
"vending machine",
|
| 1907 |
+
"vestment",
|
| 1908 |
+
"viaduct",
|
| 1909 |
+
"violin, fiddle",
|
| 1910 |
+
"volleyball",
|
| 1911 |
+
"waffle iron",
|
| 1912 |
+
"wall clock",
|
| 1913 |
+
"wallet, billfold, notecase, pocketbook",
|
| 1914 |
+
"wardrobe, closet, press",
|
| 1915 |
+
"warplane, military plane",
|
| 1916 |
+
"washbasin, handbasin, washbowl, lavabo, wash-hand basin",
|
| 1917 |
+
"washer, automatic washer, washing machine",
|
| 1918 |
+
"water bottle",
|
| 1919 |
+
"water jug",
|
| 1920 |
+
"water tower",
|
| 1921 |
+
"whiskey jug",
|
| 1922 |
+
"whistle",
|
| 1923 |
+
"wig",
|
| 1924 |
+
"window screen",
|
| 1925 |
+
"window shade",
|
| 1926 |
+
"Windsor tie",
|
| 1927 |
+
"wine bottle",
|
| 1928 |
+
"wing",
|
| 1929 |
+
"wok",
|
| 1930 |
+
"wooden spoon",
|
| 1931 |
+
"wool, woolen, woollen",
|
| 1932 |
+
"worm fence, snake fence, snake-rail fence, Virginia fence",
|
| 1933 |
+
"wreck",
|
| 1934 |
+
"yawl",
|
| 1935 |
+
"yurt",
|
| 1936 |
+
"web site, website, internet site, site",
|
| 1937 |
+
"comic book",
|
| 1938 |
+
"crossword puzzle, crossword",
|
| 1939 |
+
"street sign",
|
| 1940 |
+
"traffic light, traffic signal, stoplight",
|
| 1941 |
+
"book jacket, dust cover, dust jacket, dust wrapper",
|
| 1942 |
+
"menu",
|
| 1943 |
+
"plate",
|
| 1944 |
+
"guacamole",
|
| 1945 |
+
"consomme",
|
| 1946 |
+
"hot pot, hotpot",
|
| 1947 |
+
"trifle",
|
| 1948 |
+
"ice cream, icecream",
|
| 1949 |
+
"ice lolly, lolly, lollipop, popsicle",
|
| 1950 |
+
"French loaf",
|
| 1951 |
+
"bagel, beigel",
|
| 1952 |
+
"pretzel",
|
| 1953 |
+
"cheeseburger",
|
| 1954 |
+
"hotdog, hot dog, red hot",
|
| 1955 |
+
"mashed potato",
|
| 1956 |
+
"head cabbage",
|
| 1957 |
+
"broccoli",
|
| 1958 |
+
"cauliflower",
|
| 1959 |
+
"zucchini, courgette",
|
| 1960 |
+
"spaghetti squash",
|
| 1961 |
+
"acorn squash",
|
| 1962 |
+
"butternut squash",
|
| 1963 |
+
"cucumber, cuke",
|
| 1964 |
+
"artichoke, globe artichoke",
|
| 1965 |
+
"bell pepper",
|
| 1966 |
+
"cardoon",
|
| 1967 |
+
"mushroom",
|
| 1968 |
+
"Granny Smith",
|
| 1969 |
+
"strawberry",
|
| 1970 |
+
"orange",
|
| 1971 |
+
"lemon",
|
| 1972 |
+
"fig",
|
| 1973 |
+
"pineapple, ananas",
|
| 1974 |
+
"banana",
|
| 1975 |
+
"jackfruit, jak, jack",
|
| 1976 |
+
"custard apple",
|
| 1977 |
+
"pomegranate",
|
| 1978 |
+
"hay",
|
| 1979 |
+
"carbonara",
|
| 1980 |
+
"chocolate sauce, chocolate syrup",
|
| 1981 |
+
"dough",
|
| 1982 |
+
"meat loaf, meatloaf",
|
| 1983 |
+
"pizza, pizza pie",
|
| 1984 |
+
"potpie",
|
| 1985 |
+
"burrito",
|
| 1986 |
+
"red wine",
|
| 1987 |
+
"espresso",
|
| 1988 |
+
"cup",
|
| 1989 |
+
"eggnog",
|
| 1990 |
+
"alp",
|
| 1991 |
+
"bubble",
|
| 1992 |
+
"cliff, drop, drop-off",
|
| 1993 |
+
"coral reef",
|
| 1994 |
+
"geyser",
|
| 1995 |
+
"lakeside, lakeshore",
|
| 1996 |
+
"promontory, headland, head, foreland",
|
| 1997 |
+
"sandbar, sand bar",
|
| 1998 |
+
"seashore, coast, seacoast, sea-coast",
|
| 1999 |
+
"valley, vale",
|
| 2000 |
+
"volcano",
|
| 2001 |
+
"ballplayer, baseball player",
|
| 2002 |
+
"groom, bridegroom",
|
| 2003 |
+
"scuba diver",
|
| 2004 |
+
"rapeseed",
|
| 2005 |
+
"daisy",
|
| 2006 |
+
"yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum",
|
| 2007 |
+
"corn",
|
| 2008 |
+
"acorn",
|
| 2009 |
+
"hip, rose hip, rosehip",
|
| 2010 |
+
"buckeye, horse chestnut, conker",
|
| 2011 |
+
"coral fungus",
|
| 2012 |
+
"agaric",
|
| 2013 |
+
"gyromitra",
|
| 2014 |
+
"stinkhorn, carrion fungus",
|
| 2015 |
+
"earthstar",
|
| 2016 |
+
"hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa",
|
| 2017 |
+
"bolete",
|
| 2018 |
+
"ear, spike, capitulum",
|
| 2019 |
+
"toilet tissue, toilet paper, bathroom tissue"
|
| 2020 |
+
],
|
| 2021 |
+
"torch_dtype": "float32"
|
| 2022 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dfcd993fe23a6a24a1267feea720c76611fc17bedb4c90ec0b5caac9f40851ef
|
| 3 |
+
size 347155192
|