diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/Error/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/Error/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..5dac858f382c3247ff0ce2b204af1ddcd10dace5
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/Error/__init__.pyi
@@ -0,0 +1,118 @@
+__all__: list[str] = []
+
+# Enumerations
+StsOk: int
+STS_OK: int
+StsBackTrace: int
+STS_BACK_TRACE: int
+StsError: int
+STS_ERROR: int
+StsInternal: int
+STS_INTERNAL: int
+StsNoMem: int
+STS_NO_MEM: int
+StsBadArg: int
+STS_BAD_ARG: int
+StsBadFunc: int
+STS_BAD_FUNC: int
+StsNoConv: int
+STS_NO_CONV: int
+StsAutoTrace: int
+STS_AUTO_TRACE: int
+HeaderIsNull: int
+HEADER_IS_NULL: int
+BadImageSize: int
+BAD_IMAGE_SIZE: int
+BadOffset: int
+BAD_OFFSET: int
+BadDataPtr: int
+BAD_DATA_PTR: int
+BadStep: int
+BAD_STEP: int
+BadModelOrChSeq: int
+BAD_MODEL_OR_CH_SEQ: int
+BadNumChannels: int
+BAD_NUM_CHANNELS: int
+BadNumChannel1U: int
+BAD_NUM_CHANNEL1U: int
+BadDepth: int
+BAD_DEPTH: int
+BadAlphaChannel: int
+BAD_ALPHA_CHANNEL: int
+BadOrder: int
+BAD_ORDER: int
+BadOrigin: int
+BAD_ORIGIN: int
+BadAlign: int
+BAD_ALIGN: int
+BadCallBack: int
+BAD_CALL_BACK: int
+BadTileSize: int
+BAD_TILE_SIZE: int
+BadCOI: int
+BAD_COI: int
+BadROISize: int
+BAD_ROISIZE: int
+MaskIsTiled: int
+MASK_IS_TILED: int
+StsNullPtr: int
+STS_NULL_PTR: int
+StsVecLengthErr: int
+STS_VEC_LENGTH_ERR: int
+StsFilterStructContentErr: int
+STS_FILTER_STRUCT_CONTENT_ERR: int
+StsKernelStructContentErr: int
+STS_KERNEL_STRUCT_CONTENT_ERR: int
+StsFilterOffsetErr: int
+STS_FILTER_OFFSET_ERR: int
+StsBadSize: int
+STS_BAD_SIZE: int
+StsDivByZero: int
+STS_DIV_BY_ZERO: int
+StsInplaceNotSupported: int
+STS_INPLACE_NOT_SUPPORTED: int
+StsObjectNotFound: int
+STS_OBJECT_NOT_FOUND: int
+StsUnmatchedFormats: int
+STS_UNMATCHED_FORMATS: int
+StsBadFlag: int
+STS_BAD_FLAG: int
+StsBadPoint: int
+STS_BAD_POINT: int
+StsBadMask: int
+STS_BAD_MASK: int
+StsUnmatchedSizes: int
+STS_UNMATCHED_SIZES: int
+StsUnsupportedFormat: int
+STS_UNSUPPORTED_FORMAT: int
+StsOutOfRange: int
+STS_OUT_OF_RANGE: int
+StsParseError: int
+STS_PARSE_ERROR: int
+StsNotImplemented: int
+STS_NOT_IMPLEMENTED: int
+StsBadMemBlock: int
+STS_BAD_MEM_BLOCK: int
+StsAssert: int
+STS_ASSERT: int
+GpuNotSupported: int
+GPU_NOT_SUPPORTED: int
+GpuApiCallError: int
+GPU_API_CALL_ERROR: int
+OpenGlNotSupported: int
+OPEN_GL_NOT_SUPPORTED: int
+OpenGlApiCallError: int
+OPEN_GL_API_CALL_ERROR: int
+OpenCLApiCallError: int
+OPEN_CLAPI_CALL_ERROR: int
+OpenCLDoubleNotSupported: int
+OPEN_CLDOUBLE_NOT_SUPPORTED: int
+OpenCLInitError: int
+OPEN_CLINIT_ERROR: int
+OpenCLNoAMDBlasFft: int
+OPEN_CLNO_AMDBLAS_FFT: int
+Code = int
+"""One of [StsOk, STS_OK, StsBackTrace, STS_BACK_TRACE, StsError, STS_ERROR, StsInternal, STS_INTERNAL, StsNoMem, STS_NO_MEM, StsBadArg, STS_BAD_ARG, StsBadFunc, STS_BAD_FUNC, StsNoConv, STS_NO_CONV, StsAutoTrace, STS_AUTO_TRACE, HeaderIsNull, HEADER_IS_NULL, BadImageSize, BAD_IMAGE_SIZE, BadOffset, BAD_OFFSET, BadDataPtr, BAD_DATA_PTR, BadStep, BAD_STEP, BadModelOrChSeq, BAD_MODEL_OR_CH_SEQ, BadNumChannels, BAD_NUM_CHANNELS, BadNumChannel1U, BAD_NUM_CHANNEL1U, BadDepth, BAD_DEPTH, BadAlphaChannel, BAD_ALPHA_CHANNEL, BadOrder, BAD_ORDER, BadOrigin, BAD_ORIGIN, BadAlign, BAD_ALIGN, BadCallBack, BAD_CALL_BACK, BadTileSize, BAD_TILE_SIZE, BadCOI, BAD_COI, BadROISize, BAD_ROISIZE, MaskIsTiled, MASK_IS_TILED, StsNullPtr, STS_NULL_PTR, StsVecLengthErr, STS_VEC_LENGTH_ERR, StsFilterStructContentErr, STS_FILTER_STRUCT_CONTENT_ERR, StsKernelStructContentErr, STS_KERNEL_STRUCT_CONTENT_ERR, StsFilterOffsetErr, STS_FILTER_OFFSET_ERR, StsBadSize, STS_BAD_SIZE, StsDivByZero, STS_DIV_BY_ZERO, StsInplaceNotSupported, STS_INPLACE_NOT_SUPPORTED, StsObjectNotFound, STS_OBJECT_NOT_FOUND, StsUnmatchedFormats, STS_UNMATCHED_FORMATS, StsBadFlag, STS_BAD_FLAG, StsBadPoint, STS_BAD_POINT, StsBadMask, STS_BAD_MASK, StsUnmatchedSizes, STS_UNMATCHED_SIZES, StsUnsupportedFormat, STS_UNSUPPORTED_FORMAT, StsOutOfRange, STS_OUT_OF_RANGE, StsParseError, STS_PARSE_ERROR, StsNotImplemented, STS_NOT_IMPLEMENTED, StsBadMemBlock, STS_BAD_MEM_BLOCK, StsAssert, STS_ASSERT, GpuNotSupported, GPU_NOT_SUPPORTED, GpuApiCallError, GPU_API_CALL_ERROR, OpenGlNotSupported, OPEN_GL_NOT_SUPPORTED, OpenGlApiCallError, OPEN_GL_API_CALL_ERROR, OpenCLApiCallError, OPEN_CLAPI_CALL_ERROR, OpenCLDoubleNotSupported, OPEN_CLDOUBLE_NOT_SUPPORTED, OpenCLInitError, OPEN_CLINIT_ERROR, OpenCLNoAMDBlasFft, OPEN_CLNO_AMDBLAS_FFT]"""
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE-3RD-PARTY.txt b/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE-3RD-PARTY.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0462eee383c3d7d24ad85bf75c4024caa4634e34
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE-3RD-PARTY.txt
@@ -0,0 +1,3090 @@
+OpenCV library is redistributed within opencv-python package.
+This license applies to OpenCV binary in the directory cv2/.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+------------------------------------------------------------------------------
+libvpx is redistributed within all opencv-python Linux packages.
+This license applies to libvpx binary in the directory cv2/.
+
+Copyright (c) 2010, The WebM Project authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * Neither the name of Google, nor the WebM Project, nor the names
+ of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+FFmpeg is redistributed within all opencv-python packages.
+
+Libbluray, libgnutls, libnettle, libhogweed, libintl, libmp3lame, libp11,
+librtmp, libsoxr and libtasn1 are redistributed within all opencv-python macOS packages.
+
+This license applies to the above library binaries in the directory cv2/.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+------------------------------------------------------------------------------
+Qt 5 is redistributed within non-headless opencv-python Linux and macOS packages.
+libgmp is redistributed within opencv-python macOS packages.
+libidn2 is redistributed within opencv-python macOS packages.
+libunistring is redistributed within opencv-python macOS packages.
+This license applies to the above binaries in the directory cv2/.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
+
+------------------------------------------------------------------------------
+bzip2 is redistributed within all opencv-python Linux packages.
+This license applies to libbz2 binary in the directory cv2/.
+
+This program, "bzip2", the associated library "libbzip2", and all
+documentation, are copyright (C) 1996-2010 Julian R Seward. All
+rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. The origin of this software must not be misrepresented; you must
+ not claim that you wrote the original software. If you use this
+ software in a product, an acknowledgment in the product
+ documentation would be appreciated but is not required.
+
+3. Altered source versions must be plainly marked as such, and must
+ not be misrepresented as being the original software.
+
+4. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Julian Seward, jseward@bzip.org
+bzip2/libbzip2 version 1.0.6 of 6 September 2010
+
+------------------------------------------------------------------------------
+libcrypto and libssl are redistributed within all opencv-python Linux and macOS packages.
+libopencore-amrnb and libopencore-amrwb are redistributed within all opencv-python Linux and macOS packages.
+This license applies to above binaries in the directory cv2/.
+
+ LICENSE ISSUES
+ ==============
+
+ The OpenSSL toolkit stays under a double license, i.e. both the conditions of
+ the OpenSSL License and the original SSLeay license apply to the toolkit.
+ See below for the actual license texts.
+
+ OpenSSL License
+ ---------------
+
+/* ====================================================================
+ * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * 3. All advertising materials mentioning features or use of this
+ * software must display the following acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+ *
+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ * endorse or promote products derived from this software without
+ * prior written permission. For written permission, please contact
+ * openssl-core@openssl.org.
+ *
+ * 5. Products derived from this software may not be called "OpenSSL"
+ * nor may "OpenSSL" appear in their names without prior written
+ * permission of the OpenSSL Project.
+ *
+ * 6. Redistributions of any form whatsoever must retain the following
+ * acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ * ====================================================================
+ *
+ * This product includes cryptographic software written by Eric Young
+ * (eay@cryptsoft.com). This product includes software written by Tim
+ * Hudson (tjh@cryptsoft.com).
+ *
+ */
+
+ Original SSLeay License
+ -----------------------
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are adhered to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the routines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publicly available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+------------------------------------------------------------------------------
+libfontconfig is redistributed within all opencv-python macOS packages.
+This license applies to libfontconfig binary in the directory cv2/.
+
+Copyright © 2000,2001,2002,2003,2004,2006,2007 Keith Packard
+Copyright © 2005 Patrick Lam
+Copyright © 2009 Roozbeh Pournader
+Copyright © 2008,2009 Red Hat, Inc.
+Copyright © 2008 Danilo Šegan
+Copyright © 2012 Google, Inc.
+
+
+Permission to use, copy, modify, distribute, and sell this software and its
+documentation for any purpose is hereby granted without fee, provided that
+the above copyright notice appear in all copies and that both that
+copyright notice and this permission notice appear in supporting
+documentation, and that the name of the author(s) not be used in
+advertising or publicity pertaining to distribution of the software without
+specific, written prior permission. The authors make no
+representations about the suitability of this software for any purpose. It
+is provided "as is" without express or implied warranty.
+
+THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
+EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+------------------------------------------------------------------------------
+libfreetype is redistributed within opencv-python Linux and macOS packages.
+This license applies to libfreetype binary in the directory cv2/.
+
+ The FreeType Project LICENSE
+ ----------------------------
+
+ 2006-Jan-27
+
+ Copyright 1996-2002, 2006 by
+ David Turner, Robert Wilhelm, and Werner Lemberg
+
+
+
+Introduction
+============
+
+ The FreeType Project is distributed in several archive packages;
+ some of them may contain, in addition to the FreeType font engine,
+ various tools and contributions which rely on, or relate to, the
+ FreeType Project.
+
+ This license applies to all files found in such packages, and
+ which do not fall under their own explicit license. The license
+ affects thus the FreeType font engine, the test programs,
+ documentation and makefiles, at the very least.
+
+ This license was inspired by the BSD, Artistic, and IJG
+ (Independent JPEG Group) licenses, which all encourage inclusion
+ and use of free software in commercial and freeware products
+ alike. As a consequence, its main points are that:
+
+ o We don't promise that this software works. However, we will be
+ interested in any kind of bug reports. (`as is' distribution)
+
+ o You can use this software for whatever you want, in parts or
+ full form, without having to pay us. (`royalty-free' usage)
+
+ o You may not pretend that you wrote this software. If you use
+ it, or only parts of it, in a program, you must acknowledge
+ somewhere in your documentation that you have used the
+ FreeType code. (`credits')
+
+ We specifically permit and encourage the inclusion of this
+ software, with or without modifications, in commercial products.
+ We disclaim all warranties covering The FreeType Project and
+ assume no liability related to The FreeType Project.
+
+
+ Finally, many people asked us for a preferred form for a
+ credit/disclaimer to use in compliance with this license. We thus
+ encourage you to use the following text:
+
+ """
+ Portions of this software are copyright © The FreeType
+ Project (www.freetype.org). All rights reserved.
+ """
+
+ Please replace with the value from the FreeType version you
+ actually use.
+
+
+Legal Terms
+===========
+
+0. Definitions
+--------------
+
+ Throughout this license, the terms `package', `FreeType Project',
+ and `FreeType archive' refer to the set of files originally
+ distributed by the authors (David Turner, Robert Wilhelm, and
+ Werner Lemberg) as the `FreeType Project', be they named as alpha,
+ beta or final release.
+
+ `You' refers to the licensee, or person using the project, where
+ `using' is a generic term including compiling the project's source
+ code as well as linking it to form a `program' or `executable'.
+ This program is referred to as `a program using the FreeType
+ engine'.
+
+ This license applies to all files distributed in the original
+ FreeType Project, including all source code, binaries and
+ documentation, unless otherwise stated in the file in its
+ original, unmodified form as distributed in the original archive.
+ If you are unsure whether or not a particular file is covered by
+ this license, you must contact us to verify this.
+
+ The FreeType Project is copyright (C) 1996-2000 by David Turner,
+ Robert Wilhelm, and Werner Lemberg. All rights reserved except as
+ specified below.
+
+1. No Warranty
+--------------
+
+ THE FREETYPE PROJECT IS PROVIDED `AS IS' WITHOUT WARRANTY OF ANY
+ KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS
+ BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OR THE INABILITY TO
+ USE, OF THE FREETYPE PROJECT.
+
+2. Redistribution
+-----------------
+
+ This license grants a worldwide, royalty-free, perpetual and
+ irrevocable right and license to use, execute, perform, compile,
+ display, copy, create derivative works of, distribute and
+ sublicense the FreeType Project (in both source and object code
+ forms) and derivative works thereof for any purpose; and to
+ authorize others to exercise some or all of the rights granted
+ herein, subject to the following conditions:
+
+ o Redistribution of source code must retain this license file
+ (`FTL.TXT') unaltered; any additions, deletions or changes to
+ the original files must be clearly indicated in accompanying
+ documentation. The copyright notices of the unaltered,
+ original files must be preserved in all copies of source
+ files.
+
+ o Redistribution in binary form must provide a disclaimer that
+ states that the software is based in part of the work of the
+ FreeType Team, in the distribution documentation. We also
+ encourage you to put an URL to the FreeType web page in your
+ documentation, though this isn't mandatory.
+
+ These conditions apply to any software derived from or based on
+ the FreeType Project, not just the unmodified files. If you use
+ our work, you must acknowledge us. However, no fee need be paid
+ to us.
+
+3. Advertising
+--------------
+
+ Neither the FreeType authors and contributors nor you shall use
+ the name of the other for commercial, advertising, or promotional
+ purposes without specific prior written permission.
+
+ We suggest, but do not require, that you use one or more of the
+ following phrases to refer to this software in your documentation
+ or advertising materials: `FreeType Project', `FreeType Engine',
+ `FreeType library', or `FreeType Distribution'.
+
+ As you have not signed this license, you are not required to
+ accept it. However, as the FreeType Project is copyrighted
+ material, only this license, or another one contracted with the
+ authors, grants you the right to use, distribute, and modify it.
+ Therefore, by using, distributing, or modifying the FreeType
+ Project, you indicate that you understand and accept all the terms
+ of this license.
+
+4. Contacts
+-----------
+
+ There are two mailing lists related to FreeType:
+
+ o freetype@nongnu.org
+
+ Discusses general use and applications of FreeType, as well as
+ future and wanted additions to the library and distribution.
+ If you are looking for support, start in this list if you
+ haven't found anything to help you in the documentation.
+
+ o freetype-devel@nongnu.org
+
+ Discusses bugs, as well as engine internals, design issues,
+ specific licenses, porting, etc.
+
+ Our home page can be found at
+
+ https://www.freetype.org
+
+------------------------------------------------------------------------------
+libpng is redistributed within all opencv-python Linux and macOS packages.
+This license applies to libpng binary in the directory cv2/.
+
+PNG Reference Library License version 2
+---------------------------------------
+
+ * Copyright (c) 1995-2019 The PNG Reference Library Authors.
+ * Copyright (c) 2018-2019 Cosmin Truta.
+ * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson.
+ * Copyright (c) 1996-1997 Andreas Dilger.
+ * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.
+
+The software is supplied "as is", without warranty of any kind,
+express or implied, including, without limitation, the warranties
+of merchantability, fitness for a particular purpose, title, and
+non-infringement. In no event shall the Copyright owners, or
+anyone distributing the software, be liable for any damages or
+other liability, whether in contract, tort or otherwise, arising
+from, out of, or in connection with the software, or the use or
+other dealings in the software, even if advised of the possibility
+of such damage.
+
+Permission is hereby granted to use, copy, modify, and distribute
+this software, or portions hereof, for any purpose, without fee,
+subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you
+ must not claim that you wrote the original software. If you
+ use this software in a product, an acknowledgment in the product
+ documentation would be appreciated, but is not required.
+
+ 2. Altered source versions must be plainly marked as such, and must
+ not be misrepresented as being the original software.
+
+ 3. This Copyright notice may not be removed or altered from any
+ source or altered source distribution.
+
+
+PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35)
+-----------------------------------------------------------------------
+
+libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are
+Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are
+derived from libpng-1.0.6, and are distributed according to the same
+disclaimer and license as libpng-1.0.6 with the following individuals
+added to the list of Contributing Authors:
+
+ Simon-Pierre Cadieux
+ Eric S. Raymond
+ Mans Rullgard
+ Cosmin Truta
+ Gilles Vollant
+ James Yu
+ Mandar Sahastrabuddhe
+ Google Inc.
+ Vadim Barkov
+
+and with the following additions to the disclaimer:
+
+ There is no warranty against interference with your enjoyment of
+ the library or against infringement. There is no warranty that our
+ efforts or the library will fulfill any of your particular purposes
+ or needs. This library is provided with all faults, and the entire
+ risk of satisfactory quality, performance, accuracy, and effort is
+ with the user.
+
+Some files in the "contrib" directory and some configure-generated
+files that are distributed with libpng have other copyright owners, and
+are released under other open source licenses.
+
+libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are
+Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from
+libpng-0.96, and are distributed according to the same disclaimer and
+license as libpng-0.96, with the following individuals added to the
+list of Contributing Authors:
+
+ Tom Lane
+ Glenn Randers-Pehrson
+ Willem van Schaik
+
+libpng versions 0.89, June 1996, through 0.96, May 1997, are
+Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88,
+and are distributed according to the same disclaimer and license as
+libpng-0.88, with the following individuals added to the list of
+Contributing Authors:
+
+ John Bowler
+ Kevin Bracey
+ Sam Bushell
+ Magnus Holmgren
+ Greg Roelofs
+ Tom Tanner
+
+Some files in the "scripts" directory have other copyright owners,
+but are released under this license.
+
+libpng versions 0.5, May 1995, through 0.88, January 1996, are
+Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.
+
+For the purposes of this copyright and license, "Contributing Authors"
+is defined as the following set of individuals:
+
+ Andreas Dilger
+ Dave Martindale
+ Guy Eric Schalnat
+ Paul Schmidt
+ Tim Wegner
+
+The PNG Reference Library is supplied "AS IS". The Contributing
+Authors and Group 42, Inc. disclaim all warranties, expressed or
+implied, including, without limitation, the warranties of
+merchantability and of fitness for any purpose. The Contributing
+Authors and Group 42, Inc. assume no liability for direct, indirect,
+incidental, special, exemplary, or consequential damages, which may
+result from the use of the PNG Reference Library, even if advised of
+the possibility of such damage.
+
+Permission is hereby granted to use, copy, modify, and distribute this
+source code, or portions hereof, for any purpose, without fee, subject
+to the following restrictions:
+
+ 1. The origin of this source code must not be misrepresented.
+
+ 2. Altered versions must be plainly marked as such and must not
+ be misrepresented as being the original source.
+
+ 3. This Copyright notice may not be removed or altered from any
+ source or altered source distribution.
+
+The Contributing Authors and Group 42, Inc. specifically permit,
+without fee, and encourage the use of this source code as a component
+to supporting the PNG file format in commercial products. If you use
+this source code in a product, acknowledgment is not required but would
+be appreciated.
+
+------------------------------------------------------------------------------
+libz is redistributed within all opencv-python Linux packages.
+This license applies to libz binary in the directory cv2/.
+
+ Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ Jean-loup Gailly Mark Adler
+ jloup@gzip.org madler@alumni.caltech.edu
+
+------------------------------------------------------------------------------
+libdav1d is redistributed within opencv-python macOS packages.
+This license applies to libdav1d binary in the directory cv2/.
+
+Copyright © 2018-2019, VideoLAN and dav1d authors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libffi is redistributed within opencv-python macOS packages.
+This license applies to libffi binary in the directory cv2/.
+
+libffi - Copyright (c) 1996-2020 Anthony Green, Red Hat, Inc and others.
+See source files for details.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+``Software''), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+------------------------------------------------------------------------------
+libogg is redistributed within opencv-python macOS packages.
+This license applies to libogg binary in the directory cv2/.
+
+Copyright (c) 2002, Xiph.org Foundation
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+- Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+- Neither the name of the Xiph.org Foundation nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
+OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libopenjp2 is redistributed within opencv-python macOS packages.
+This license applies to libopenjp2 binary in the directory cv2/.
+
+The copyright in this software is being made available under the 2-clauses
+BSD License, included below. This software may be subject to other third
+party and contributor rights, including patent rights, and no such rights
+are granted under this license.
+
+Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium
+Copyright (c) 2002-2014, Professor Benoit Macq
+Copyright (c) 2003-2014, Antonin Descampe
+Copyright (c) 2003-2009, Francois-Olivier Devaux
+Copyright (c) 2005, Herve Drolon, FreeImage Team
+Copyright (c) 2002-2003, Yannick Verschueren
+Copyright (c) 2001-2003, David Janssens
+Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France
+Copyright (c) 2012, CS Systemes d'Information, France
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS'
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libopus is redistributed within opencv-python macOS packages.
+This license applies to libopus binary in the directory cv2/.
+
+Copyright 2001-2011 Xiph.Org, Skype Limited, Octasic,
+ Jean-Marc Valin, Timothy B. Terriberry,
+ CSIRO, Gregory Maxwell, Mark Borgerding,
+ Erik de Castro Lopo
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+- Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+- Neither the name of Internet Society, IETF or IETF Trust, nor the
+names of specific contributors, may be used to endorse or promote
+products derived from this software without specific prior written
+permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Opus is subject to the royalty-free patent licenses which are
+specified at:
+
+Xiph.Org Foundation:
+https://datatracker.ietf.org/ipr/1524/
+
+Microsoft Corporation:
+https://datatracker.ietf.org/ipr/1914/
+
+Broadcom Corporation:
+https://datatracker.ietf.org/ipr/1526/
+
+------------------------------------------------------------------------------
+librav1e is redistributed within opencv-python macOS packages.
+This license applies to librav1e binary in the directory cv2/.
+
+BSD 2-Clause License
+
+Copyright (c) 2017-2020, the rav1e contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libsnappy is redistributed within opencv-python macOS packages.
+This license applies to libsnappy binary in the directory cv2/.
+
+Copyright 2011, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libspeex is redistributed within opencv-python macOS packages.
+This license applies to libspeex binary in the directory cv2/.
+
+Copyright 2002-2008 Xiph.org Foundation
+Copyright 2002-2008 Jean-Marc Valin
+Copyright 2005-2007 Analog Devices Inc.
+Copyright 2005-2008 Commonwealth Scientific and Industrial Research
+ Organisation (CSIRO)
+Copyright 1993, 2002, 2006 David Rowe
+Copyright 2003 EpicGames
+Copyright 1992-1994 Jutta Degener, Carsten Bormann
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+- Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+- Neither the name of the Xiph.org Foundation nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libsrt is redistributed within opencv-python macOS packages.
+This license applies to libsrt binary in the directory cv2/.
+
+/*
+ *
+ * Copyright (c) 2001-2017 Cisco Systems, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * Neither the name of the Cisco Systems, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+
+ Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
+
+------------------------------------------------------------------------------
+libtheoradec and libtheoraenc are redistributed within opencv-python macOS packages.
+This license applies to libtheoradec and libtheoraenc binaries in the directory cv2/.
+
+ Copyright (C) 2002-2009 Xiph.org Foundation
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+- Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+- Neither the name of the Xiph.org Foundation nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
+OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libwebp and libwebpmux are redistributed within all opencv-python packages.
+This license applies to libwebp and libwebpmux binaries in the directory cv2/.
+
+Copyright (c) 2010, Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * Neither the name of Google nor the names of its contributors may
+ be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+libvorbis and libvorbisenc are redistributed within opencv-python macOS packages.
+This license applies to libvorbis and libvorbisenc binaries in the directory cv2/.
+
+Copyright (c) 2002-2020 Xiph.org Foundation
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+- Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+- Neither the name of the Xiph.org Foundation nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
+OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+Libxcb utility libraries are redistributed within opencv-python non-headless Linux packages.
+This license applies to libxcb related binaries in the directory cv2/.
+
+Copyright (C) 2001-2006 Bart Massey, Jamey Sharp, and Josh Triplett.
+All Rights Reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall
+be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors
+or their institutions shall not be used in advertising or
+otherwise to promote the sale, use or other dealings in this
+Software without prior written authorization from the
+authors.
+
+------------------------------------------------------------------------------
+Libxcb-image is redistributed within opencv-python non-headless Linux packages.
+This license applies to libxcb-image binary in the directory cv2/.
+
+Copyright © 2007-2008 Bart Massey
+Copyright © 2008 Julien Danjou
+Copyright © 2008 Keith Packard
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors or
+their institutions shall not be used in advertising or otherwise to
+promote the sale, use or other dealings in this Software without
+prior written authorization from the authors.
+
+------------------------------------------------------------------------------
+Libxcb-util is redistributed within opencv-python non-headless Linux packages.
+This license applies to libxcb-util binary in the directory cv2/.
+
+Copyright © 2008 Bart Massey
+Copyright © 2008 Ian Osgood
+Copyright © 2008 Jamey Sharp
+Copyright © 2008 Josh Triplett
+Copyright © 2008-2009 Julien Danjou
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors or
+their institutions shall not be used in advertising or otherwise to
+promote the sale, use or other dealings in this Software without
+prior written authorization from the authors.
+
+------------------------------------------------------------------------------
+Libxcb-render-util is redistributed within opencv-python non-headless Linux packages.
+This license applies to libxcb-render-util binary in the directory cv2/.
+
+Copyright © 2000 Keith Packard
+
+Permission to use, copy, modify, distribute, and sell this software and its
+documentation for any purpose is hereby granted without fee, provided that
+the above copyright notice appear in all copies and that both that
+copyright notice and this permission notice appear in supporting
+documentation, and that the name of Keith Packard not be used in
+advertising or publicity pertaining to distribution of the software without
+specific, written prior permission. Keith Packard makes no
+representations about the suitability of this software for any purpose. It
+is provided "as is" without express or implied warranty.
+
+KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
+EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+Copyright © 2006 Jamey Sharp.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors or their
+institutions shall not be used in advertising or otherwise to promote the
+sale, use or other dealings in this Software without prior written
+authorization from the authors.
+
+Copyright © 2006 Ian Osgood
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors or their
+institutions shall not be used in advertising or otherwise to promote the
+sale, use or other dealings in this Software without prior written
+authorization from the authors.
+
+------------------------------------------------------------------------------
+Libxcb-icccm is redistributed within opencv-python non-headless Linux packages.
+This license applies to Libxcb-icccm binary in the directory cv2/.
+
+Copyright © 2008-2011 Arnaud Fontaine
+Copyright © 2007-2008 Vincent Torri
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors or
+their institutions shall not be used in advertising or otherwise to
+promote the sale, use or other dealings in this Software without
+prior written authorization from the authors.
+
+------------------------------------------------------------------------------
+libXau is redistributed within opencv-python non-headless Linux packages.
+This license applies to libXau binary in the directory cv2/.
+
+Copyright 1988, 1993, 1994, 1998 The Open Group
+
+Permission to use, copy, modify, distribute, and sell this software and its
+documentation for any purpose is hereby granted without fee, provided that
+the above copyright notice appear in all copies and that both that
+copyright notice and this permission notice appear in supporting
+documentation.
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the name of The Open Group shall not be
+used in advertising or otherwise to promote the sale, use or other dealings
+in this Software without prior written authorization from The Open Group.
+
+------------------------------------------------------------------------------
+Vulkan headers are redistributed within all opencv-python packages.
+This license applies to Vulkan headers in the directory 3rdparty/include/vulkan.
+
+Copyright (c) 2015-2018 The Khronos Group Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+------------------------------------------------------------------------------
+Libjpeg-turbo is redistributed within all opencv-python packages as build option.
+
+libjpeg-turbo Licenses
+======================
+
+libjpeg-turbo is covered by three compatible BSD-style open source licenses:
+
+- The IJG (Independent JPEG Group) License, which is listed in
+ [README.ijg](README.ijg)
+
+ This license applies to the libjpeg API library and associated programs
+ (any code inherited from libjpeg, and any modifications to that code.)
+
+- The Modified (3-clause) BSD License, which is listed below
+
+ This license covers the TurboJPEG API library and associated programs, as
+ well as the build system.
+
+- The [zlib License](https://opensource.org/licenses/Zlib)
+
+ This license is a subset of the other two, and it covers the libjpeg-turbo
+ SIMD extensions.
+
+
+Complying with the libjpeg-turbo Licenses
+=========================================
+
+This section provides a roll-up of the libjpeg-turbo licensing terms, to the
+best of our understanding.
+
+1. If you are distributing a modified version of the libjpeg-turbo source,
+ then:
+
+ 1. You cannot alter or remove any existing copyright or license notices
+ from the source.
+
+ **Origin**
+ - Clause 1 of the IJG License
+ - Clause 1 of the Modified BSD License
+ - Clauses 1 and 3 of the zlib License
+
+ 2. You must add your own copyright notice to the header of each source
+ file you modified, so others can tell that you modified that file (if
+ there is not an existing copyright header in that file, then you can
+ simply add a notice stating that you modified the file.)
+
+ **Origin**
+ - Clause 1 of the IJG License
+ - Clause 2 of the zlib License
+
+ 3. You must include the IJG README file, and you must not alter any of the
+ copyright or license text in that file.
+
+ **Origin**
+ - Clause 1 of the IJG License
+
+2. If you are distributing only libjpeg-turbo binaries without the source, or
+ if you are distributing an application that statically links with
+ libjpeg-turbo, then:
+
+ 1. Your product documentation must include a message stating:
+
+ This software is based in part on the work of the Independent JPEG
+ Group.
+
+ **Origin**
+ - Clause 2 of the IJG license
+
+ 2. If your binary distribution includes or uses the TurboJPEG API, then
+ your product documentation must include the text of the Modified BSD
+ License (see below.)
+
+ **Origin**
+ - Clause 2 of the Modified BSD License
+
+3. You cannot use the name of the IJG or The libjpeg-turbo Project or the
+ contributors thereof in advertising, publicity, etc.
+
+ **Origin**
+ - IJG License
+ - Clause 3 of the Modified BSD License
+
+4. The IJG and The libjpeg-turbo Project do not warrant libjpeg-turbo to be
+ free of defects, nor do we accept any liability for undesirable
+ consequences resulting from your use of the software.
+
+ **Origin**
+ - IJG License
+ - Modified BSD License
+ - zlib License
+
+
+The Modified (3-clause) BSD License
+===================================
+
+Copyright (C)2009-2022 D. R. Commander. All Rights Reserved.
+Copyright (C)2015 Viktor Szathmáry. All Rights Reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+- Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+- Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+- Neither the name of the libjpeg-turbo Project nor the names of its
+ contributors may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS",
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+Why Three Licenses?
+===================
+
+The zlib License could have been used instead of the Modified (3-clause) BSD
+License, and since the IJG License effectively subsumes the distribution
+conditions of the zlib License, this would have effectively placed
+libjpeg-turbo binary distributions under the IJG License. However, the IJG
+License specifically refers to the Independent JPEG Group and does not extend
+attribution and endorsement protections to other entities. Thus, it was
+desirable to choose a license that granted us the same protections for new code
+that were granted to the IJG for code derived from their software.
+
+------------------------------------------------------------------------------
+Libspng is redistributed within all opencv-python packages as build option.
+
+BSD 2-Clause License
+
+Copyright (c) 2018-2022, Randy
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+QUIRC library is redistributed within all opencv-python packages.
+
+quirc -- QR-code recognition library
+Copyright (C) 2010-2012 Daniel Beer
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all
+copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
+PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+------------------------------------------------------------------------------
+Flatbuffers library is redistributed within all opencv-python packages.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+------------------------------------------------------------------------------
+Protobuf library is redistributed within all opencv-python packages.
+
+Copyright 2008 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Code generated by the Protocol Buffer compiler is owned by the owner
+of the input file used when generating it. This code is not
+standalone and requires a support library to be linked with it. This
+support library is itself covered by the above license.
+
+------------------------------------------------------------------------------
+OpenJPEG library is redistributed within all opencv-python packages.
+
+/*
+ * The copyright in this software is being made available under the 2-clauses
+ * BSD License, included below. This software may be subject to other third
+ * party and contributor rights, including patent rights, and no such rights
+ * are granted under this license.
+ *
+ * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium
+ * Copyright (c) 2002-2014, Professor Benoit Macq
+ * Copyright (c) 2003-2014, Antonin Descampe
+ * Copyright (c) 2003-2009, Francois-Olivier Devaux
+ * Copyright (c) 2005, Herve Drolon, FreeImage Team
+ * Copyright (c) 2002-2003, Yannick Verschueren
+ * Copyright (c) 2001-2003, David Janssens
+ * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France
+ * Copyright (c) 2012, CS Systemes d'Information, France
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS'
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+------------------------------------------------------------------------------
+TIFF library is redistributed within all opencv-python packages.
+
+Copyright (c) 1988-1997 Sam Leffler
+Copyright (c) 1991-1997 Silicon Graphics, Inc.
+
+Permission to use, copy, modify, distribute, and sell this software and
+its documentation for any purpose is hereby granted without fee, provided
+that (i) the above copyright notices and this permission notice appear in
+all copies of the software and related documentation, and (ii) the names of
+Sam Leffler and Silicon Graphics may not be used in any advertising or
+publicity relating to the software without the specific, prior written
+permission of Sam Leffler and Silicon Graphics.
+
+THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
+EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
+WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
+
+IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR
+ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND,
+OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF
+LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+OF THIS SOFTWARE.
+
+------------------------------------------------------------------------------
+OpenEXR library is redistributed within all opencv-python packages.
+
+Copyright (c) 2006, Industrial Light & Magic, a division of Lucasfilm
+Entertainment Company Ltd. Portions contributed and copyright held by
+others as indicated. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the following
+ disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided with
+ the distribution.
+
+ * Neither the name of Industrial Light & Magic nor the names of
+ any other contributors to this software may be used to endorse or
+ promote products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+Intel(R) IPP ICV library statically linked within x86 and x86_64 opencv-python packages.
+
+Intel(R) Integrated Performance Primitives 2021 Update 10
+
+Intel Simplified Software License (Version October 2022)
+
+Intel(R) Integrated Performance Primitives (Intel(R) IPP) : Copyright (C) 1997 Intel Corporation
+
+Use and Redistribution. You may use and redistribute the software, which is
+provided in binary form only, (the "Software"), without modification,
+provided the following conditions are met:
+
+* Redistributions must reproduce the above copyright notice and these
+ terms of use in the Software and in the documentation and/or other materials
+ provided with the distribution.
+* Neither the name of Intel nor the names of its suppliers may be used to
+ endorse or promote products derived from this Software without specific
+ prior written permission.
+* No reverse engineering, decompilation, or disassembly of the Software is
+ permitted, nor any modification or alteration of the Software or its operation
+ at any time, including during execution.
+
+No other licenses. Except as provided in the preceding section, Intel grants no
+licenses or other rights by implication, estoppel or otherwise to, patent,
+copyright, trademark, trade name, service mark or other intellectual property
+licenses or rights of Intel.
+
+Third party software. "Third Party Software" means the files (if any) listed
+in the "third-party-software.txt" or other similarly-named text file that may
+be included with the Software. Third Party Software, even if included with the
+distribution of the Software, may be governed by separate license terms, including
+without limitation, third party license terms, open source software notices and
+terms, and/or other Intel software license terms. These separate license terms
+solely govern Your use of the Third Party Software.
+
+DISCLAIMER. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT ARE
+DISCLAIMED. THIS SOFTWARE IS NOT INTENDED FOR USE IN SYSTEMS OR APPLICATIONS
+WHERE FAILURE OF THE SOFTWARE MAY CAUSE PERSONAL INJURY OR DEATH AND YOU AGREE
+THAT YOU ARE FULLY RESPONSIBLE FOR ANY CLAIMS, COSTS, DAMAGES, EXPENSES, AND
+ATTORNEYS' FEES ARISING OUT OF ANY SUCH USE, EVEN IF ANY CLAIM ALLEGES THAT
+INTEL WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE.
+
+LIMITATION OF LIABILITY. IN NO EVENT WILL INTEL BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+No support. Intel may make changes to the Software, at any time without notice,
+and is not obligated to support, update or provide training for the Software.
+
+Termination. Your right to use the Software is terminated in the event of your
+breach of this license.
+
+Feedback. Should you provide Intel with comments, modifications, corrections,
+enhancements or other input ("Feedback") related to the Software, Intel will be
+free to use, disclose, reproduce, license or otherwise distribute or exploit the
+Feedback in its sole discretion without any obligations or restrictions of any
+kind, including without limitation, intellectual property rights or licensing
+obligations.
+
+Compliance with laws. You agree to comply with all relevant laws and regulations
+governing your use, transfer, import or export (or prohibition thereof) of the
+Software.
+
+Governing law. All disputes will be governed by the laws of the United States of
+America and the State of Delaware without reference to conflict of law
+principles and subject to the exclusive jurisdiction of the state or federal
+courts sitting in the State of Delaware, and each party agrees that it submits
+to the personal jurisdiction and venue of those courts and waives any
+objections. THE UNITED NATIONS CONVENTION ON CONTRACTS FOR THE INTERNATIONAL
+SALE OF GOODS (1980) IS SPECIFICALLY EXCLUDED AND WILL NOT APPLY TO THE SOFTWARE.
+
+------------------------------------------------------------------------------
+Orbbec SDK distributed with arm64 MacOS packages.
+
+MIT License
+
+Copyright (c) 2023 OrbbecDeveloper
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE.txt b/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..328bf50632a988cf1cc494d557936d84fec16335
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Olli-Pekka Heinisuo
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e148fc9f2b93a2b51b3bc6ec49187dbdcdfc5cb
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.py
@@ -0,0 +1,181 @@
+'''
+OpenCV Python binary extension loader
+'''
+import os
+import importlib
+import sys
+
+__all__ = []
+
+try:
+ import numpy
+ import numpy.core.multiarray
+except ImportError:
+ print('OpenCV bindings requires "numpy" package.')
+ print('Install it via command:')
+ print(' pip install numpy')
+ raise
+
+# TODO
+# is_x64 = sys.maxsize > 2**32
+
+
+def __load_extra_py_code_for_module(base, name, enable_debug_print=False):
+ module_name = "{}.{}".format(__name__, name)
+ export_module_name = "{}.{}".format(base, name)
+ native_module = sys.modules.pop(module_name, None)
+ try:
+ py_module = importlib.import_module(module_name)
+ except ImportError as err:
+ if enable_debug_print:
+ print("Can't load Python code for module:", module_name,
+ ". Reason:", err)
+ # Extension doesn't contain extra py code
+ return False
+
+ if base in sys.modules and not hasattr(sys.modules[base], name):
+ setattr(sys.modules[base], name, py_module)
+ sys.modules[export_module_name] = py_module
+ # If it is C extension module it is already loaded by cv2 package
+ if native_module:
+ setattr(py_module, "_native", native_module)
+ for k, v in filter(lambda kv: not hasattr(py_module, kv[0]),
+ native_module.__dict__.items()):
+ if enable_debug_print: print(' symbol({}): {} = {}'.format(name, k, v))
+ setattr(py_module, k, v)
+ return True
+
+
+def __collect_extra_submodules(enable_debug_print=False):
+ def modules_filter(module):
+ return all((
+ # module is not internal
+ not module.startswith("_"),
+ not module.startswith("python-"),
+ # it is not a file
+ os.path.isdir(os.path.join(_extra_submodules_init_path, module))
+ ))
+ if sys.version_info[0] < 3:
+ if enable_debug_print:
+ print("Extra submodules is loaded only for Python 3")
+ return []
+
+ __INIT_FILE_PATH = os.path.abspath(__file__)
+ _extra_submodules_init_path = os.path.dirname(__INIT_FILE_PATH)
+ return filter(modules_filter, os.listdir(_extra_submodules_init_path))
+
+
+def bootstrap():
+ import sys
+
+ import copy
+ save_sys_path = copy.copy(sys.path)
+
+ if hasattr(sys, 'OpenCV_LOADER'):
+ print(sys.path)
+ raise ImportError('ERROR: recursion is detected during loading of "cv2" binary extensions. Check OpenCV installation.')
+ sys.OpenCV_LOADER = True
+
+ DEBUG = False
+ if hasattr(sys, 'OpenCV_LOADER_DEBUG'):
+ DEBUG = True
+
+ import platform
+ if DEBUG: print('OpenCV loader: os.name="{}" platform.system()="{}"'.format(os.name, str(platform.system())))
+
+ LOADER_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
+
+ PYTHON_EXTENSIONS_PATHS = []
+ BINARIES_PATHS = []
+
+ g_vars = globals()
+ l_vars = locals().copy()
+
+ if sys.version_info[:2] < (3, 0):
+ from . load_config_py2 import exec_file_wrapper
+ else:
+ from . load_config_py3 import exec_file_wrapper
+
+ def load_first_config(fnames, required=True):
+ for fname in fnames:
+ fpath = os.path.join(LOADER_DIR, fname)
+ if not os.path.exists(fpath):
+ if DEBUG: print('OpenCV loader: config not found, skip: {}'.format(fpath))
+ continue
+ if DEBUG: print('OpenCV loader: loading config: {}'.format(fpath))
+ exec_file_wrapper(fpath, g_vars, l_vars)
+ return True
+ if required:
+ raise ImportError('OpenCV loader: missing configuration file: {}. Check OpenCV installation.'.format(fnames))
+
+ load_first_config(['config.py'], True)
+ load_first_config([
+ 'config-{}.{}.py'.format(sys.version_info[0], sys.version_info[1]),
+ 'config-{}.py'.format(sys.version_info[0])
+ ], True)
+
+ if DEBUG: print('OpenCV loader: PYTHON_EXTENSIONS_PATHS={}'.format(str(l_vars['PYTHON_EXTENSIONS_PATHS'])))
+ if DEBUG: print('OpenCV loader: BINARIES_PATHS={}'.format(str(l_vars['BINARIES_PATHS'])))
+
+ applySysPathWorkaround = False
+ if hasattr(sys, 'OpenCV_REPLACE_SYS_PATH_0'):
+ applySysPathWorkaround = True
+ else:
+ try:
+ BASE_DIR = os.path.dirname(LOADER_DIR)
+ if sys.path[0] == BASE_DIR or os.path.realpath(sys.path[0]) == BASE_DIR:
+ applySysPathWorkaround = True
+ except:
+ if DEBUG: print('OpenCV loader: exception during checking workaround for sys.path[0]')
+ pass # applySysPathWorkaround is False
+
+ for p in reversed(l_vars['PYTHON_EXTENSIONS_PATHS']):
+ sys.path.insert(1 if not applySysPathWorkaround else 0, p)
+
+ if os.name == 'nt':
+ if sys.version_info[:2] >= (3, 8): # https://github.com/python/cpython/pull/12302
+ for p in l_vars['BINARIES_PATHS']:
+ try:
+ os.add_dll_directory(p)
+ except Exception as e:
+ if DEBUG: print('Failed os.add_dll_directory(): '+ str(e))
+ pass
+ os.environ['PATH'] = ';'.join(l_vars['BINARIES_PATHS']) + ';' + os.environ.get('PATH', '')
+ if DEBUG: print('OpenCV loader: PATH={}'.format(str(os.environ['PATH'])))
+ else:
+ # amending of LD_LIBRARY_PATH works for sub-processes only
+ os.environ['LD_LIBRARY_PATH'] = ':'.join(l_vars['BINARIES_PATHS']) + ':' + os.environ.get('LD_LIBRARY_PATH', '')
+
+ if DEBUG: print("Relink everything from native cv2 module to cv2 package")
+
+ py_module = sys.modules.pop("cv2")
+
+ native_module = importlib.import_module("cv2")
+
+ sys.modules["cv2"] = py_module
+ setattr(py_module, "_native", native_module)
+
+ for item_name, item in filter(lambda kv: kv[0] not in ("__file__", "__loader__", "__spec__",
+ "__name__", "__package__"),
+ native_module.__dict__.items()):
+ if item_name not in g_vars:
+ g_vars[item_name] = item
+
+ sys.path = save_sys_path # multiprocessing should start from bootstrap code (https://github.com/opencv/opencv/issues/18502)
+
+ try:
+ del sys.OpenCV_LOADER
+ except Exception as e:
+ if DEBUG:
+ print("Exception during delete OpenCV_LOADER:", e)
+
+ if DEBUG: print('OpenCV loader: binary extension... OK')
+
+ for submodule in __collect_extra_submodules(DEBUG):
+ if __load_extra_py_code_for_module("cv2", submodule, DEBUG):
+ if DEBUG: print("Extra Python code for", submodule, "is loaded")
+
+ if DEBUG: print('OpenCV loader: DONE')
+
+
+bootstrap()
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..937f8e9540c4c2f2cded778af41a326e59dcc41b
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/__init__.pyi
@@ -0,0 +1,6305 @@
+__all__: list[str] = []
+
+import cv2.aruco
+import cv2.cuda
+import cv2.dnn
+import cv2.gapi
+import cv2.gapi.ot
+import cv2.gapi.streaming
+import cv2.typing
+import numpy
+import typing as _typing
+
+
+from cv2 import Error as Error
+from cv2 import aruco as aruco
+from cv2 import barcode as barcode
+from cv2 import cuda as cuda
+from cv2 import detail as detail
+from cv2 import dnn as dnn
+from cv2 import fisheye as fisheye
+from cv2 import flann as flann
+from cv2 import gapi as gapi
+from cv2 import ipp as ipp
+from cv2 import ml as ml
+from cv2 import ocl as ocl
+from cv2 import ogl as ogl
+from cv2 import parallel as parallel
+from cv2 import samples as samples
+from cv2 import segmentation as segmentation
+from cv2 import typing as typing
+from cv2 import utils as utils
+from cv2 import videoio_registry as videoio_registry
+from cv2.mat_wrapper import Mat as Mat
+
+
+# Enumerations
+SORT_EVERY_ROW: int
+SORT_EVERY_COLUMN: int
+SORT_ASCENDING: int
+SORT_DESCENDING: int
+SortFlags = int
+"""One of [SORT_EVERY_ROW, SORT_EVERY_COLUMN, SORT_ASCENDING, SORT_DESCENDING]"""
+
+COVAR_SCRAMBLED: int
+COVAR_NORMAL: int
+COVAR_USE_AVG: int
+COVAR_SCALE: int
+COVAR_ROWS: int
+COVAR_COLS: int
+CovarFlags = int
+"""One of [COVAR_SCRAMBLED, COVAR_NORMAL, COVAR_USE_AVG, COVAR_SCALE, COVAR_ROWS, COVAR_COLS]"""
+
+KMEANS_RANDOM_CENTERS: int
+KMEANS_PP_CENTERS: int
+KMEANS_USE_INITIAL_LABELS: int
+KmeansFlags = int
+"""One of [KMEANS_RANDOM_CENTERS, KMEANS_PP_CENTERS, KMEANS_USE_INITIAL_LABELS]"""
+
+REDUCE_SUM: int
+REDUCE_AVG: int
+REDUCE_MAX: int
+REDUCE_MIN: int
+REDUCE_SUM2: int
+ReduceTypes = int
+"""One of [REDUCE_SUM, REDUCE_AVG, REDUCE_MAX, REDUCE_MIN, REDUCE_SUM2]"""
+
+ROTATE_90_CLOCKWISE: int
+ROTATE_180: int
+ROTATE_90_COUNTERCLOCKWISE: int
+RotateFlags = int
+"""One of [ROTATE_90_CLOCKWISE, ROTATE_180, ROTATE_90_COUNTERCLOCKWISE]"""
+
+Param_INT: int
+PARAM_INT: int
+Param_BOOLEAN: int
+PARAM_BOOLEAN: int
+Param_REAL: int
+PARAM_REAL: int
+Param_STRING: int
+PARAM_STRING: int
+Param_MAT: int
+PARAM_MAT: int
+Param_MAT_VECTOR: int
+PARAM_MAT_VECTOR: int
+Param_ALGORITHM: int
+PARAM_ALGORITHM: int
+Param_FLOAT: int
+PARAM_FLOAT: int
+Param_UNSIGNED_INT: int
+PARAM_UNSIGNED_INT: int
+Param_UINT64: int
+PARAM_UINT64: int
+Param_UCHAR: int
+PARAM_UCHAR: int
+Param_SCALAR: int
+PARAM_SCALAR: int
+Param = int
+"""One of [Param_INT, PARAM_INT, Param_BOOLEAN, PARAM_BOOLEAN, Param_REAL, PARAM_REAL, Param_STRING, PARAM_STRING, Param_MAT, PARAM_MAT, Param_MAT_VECTOR, PARAM_MAT_VECTOR, Param_ALGORITHM, PARAM_ALGORITHM, Param_FLOAT, PARAM_FLOAT, Param_UNSIGNED_INT, PARAM_UNSIGNED_INT, Param_UINT64, PARAM_UINT64, Param_UCHAR, PARAM_UCHAR, Param_SCALAR, PARAM_SCALAR]"""
+
+DECOMP_LU: int
+DECOMP_SVD: int
+DECOMP_EIG: int
+DECOMP_CHOLESKY: int
+DECOMP_QR: int
+DECOMP_NORMAL: int
+DecompTypes = int
+"""One of [DECOMP_LU, DECOMP_SVD, DECOMP_EIG, DECOMP_CHOLESKY, DECOMP_QR, DECOMP_NORMAL]"""
+
+NORM_INF: int
+NORM_L1: int
+NORM_L2: int
+NORM_L2SQR: int
+NORM_HAMMING: int
+NORM_HAMMING2: int
+NORM_TYPE_MASK: int
+NORM_RELATIVE: int
+NORM_MINMAX: int
+NormTypes = int
+"""One of [NORM_INF, NORM_L1, NORM_L2, NORM_L2SQR, NORM_HAMMING, NORM_HAMMING2, NORM_TYPE_MASK, NORM_RELATIVE, NORM_MINMAX]"""
+
+CMP_EQ: int
+CMP_GT: int
+CMP_GE: int
+CMP_LT: int
+CMP_LE: int
+CMP_NE: int
+CmpTypes = int
+"""One of [CMP_EQ, CMP_GT, CMP_GE, CMP_LT, CMP_LE, CMP_NE]"""
+
+GEMM_1_T: int
+GEMM_2_T: int
+GEMM_3_T: int
+GemmFlags = int
+"""One of [GEMM_1_T, GEMM_2_T, GEMM_3_T]"""
+
+DFT_INVERSE: int
+DFT_SCALE: int
+DFT_ROWS: int
+DFT_COMPLEX_OUTPUT: int
+DFT_REAL_OUTPUT: int
+DFT_COMPLEX_INPUT: int
+DCT_INVERSE: int
+DCT_ROWS: int
+DftFlags = int
+"""One of [DFT_INVERSE, DFT_SCALE, DFT_ROWS, DFT_COMPLEX_OUTPUT, DFT_REAL_OUTPUT, DFT_COMPLEX_INPUT, DCT_INVERSE, DCT_ROWS]"""
+
+BORDER_CONSTANT: int
+BORDER_REPLICATE: int
+BORDER_REFLECT: int
+BORDER_WRAP: int
+BORDER_REFLECT_101: int
+BORDER_TRANSPARENT: int
+BORDER_REFLECT101: int
+BORDER_DEFAULT: int
+BORDER_ISOLATED: int
+BorderTypes = int
+"""One of [BORDER_CONSTANT, BORDER_REPLICATE, BORDER_REFLECT, BORDER_WRAP, BORDER_REFLECT_101, BORDER_TRANSPARENT, BORDER_REFLECT101, BORDER_DEFAULT, BORDER_ISOLATED]"""
+
+ACCESS_READ: int
+ACCESS_WRITE: int
+ACCESS_RW: int
+ACCESS_MASK: int
+ACCESS_FAST: int
+AccessFlag = int
+"""One of [ACCESS_READ, ACCESS_WRITE, ACCESS_RW, ACCESS_MASK, ACCESS_FAST]"""
+
+USAGE_DEFAULT: int
+USAGE_ALLOCATE_HOST_MEMORY: int
+USAGE_ALLOCATE_DEVICE_MEMORY: int
+USAGE_ALLOCATE_SHARED_MEMORY: int
+__UMAT_USAGE_FLAGS_32BIT: int
+UMatUsageFlags = int
+"""One of [USAGE_DEFAULT, USAGE_ALLOCATE_HOST_MEMORY, USAGE_ALLOCATE_DEVICE_MEMORY, USAGE_ALLOCATE_SHARED_MEMORY, __UMAT_USAGE_FLAGS_32BIT]"""
+
+SOLVELP_LOST: int
+SOLVELP_UNBOUNDED: int
+SOLVELP_UNFEASIBLE: int
+SOLVELP_SINGLE: int
+SOLVELP_MULTI: int
+SolveLPResult = int
+"""One of [SOLVELP_LOST, SOLVELP_UNBOUNDED, SOLVELP_UNFEASIBLE, SOLVELP_SINGLE, SOLVELP_MULTI]"""
+
+QUAT_ASSUME_NOT_UNIT: int
+QUAT_ASSUME_UNIT: int
+QuatAssumeType = int
+"""One of [QUAT_ASSUME_NOT_UNIT, QUAT_ASSUME_UNIT]"""
+
+FILTER_SCHARR: int
+SpecialFilter = int
+"""One of [FILTER_SCHARR]"""
+
+MORPH_ERODE: int
+MORPH_DILATE: int
+MORPH_OPEN: int
+MORPH_CLOSE: int
+MORPH_GRADIENT: int
+MORPH_TOPHAT: int
+MORPH_BLACKHAT: int
+MORPH_HITMISS: int
+MorphTypes = int
+"""One of [MORPH_ERODE, MORPH_DILATE, MORPH_OPEN, MORPH_CLOSE, MORPH_GRADIENT, MORPH_TOPHAT, MORPH_BLACKHAT, MORPH_HITMISS]"""
+
+MORPH_RECT: int
+MORPH_CROSS: int
+MORPH_ELLIPSE: int
+MorphShapes = int
+"""One of [MORPH_RECT, MORPH_CROSS, MORPH_ELLIPSE]"""
+
+INTER_NEAREST: int
+INTER_LINEAR: int
+INTER_CUBIC: int
+INTER_AREA: int
+INTER_LANCZOS4: int
+INTER_LINEAR_EXACT: int
+INTER_NEAREST_EXACT: int
+INTER_MAX: int
+WARP_FILL_OUTLIERS: int
+WARP_INVERSE_MAP: int
+WARP_RELATIVE_MAP: int
+InterpolationFlags = int
+"""One of [INTER_NEAREST, INTER_LINEAR, INTER_CUBIC, INTER_AREA, INTER_LANCZOS4, INTER_LINEAR_EXACT, INTER_NEAREST_EXACT, INTER_MAX, WARP_FILL_OUTLIERS, WARP_INVERSE_MAP, WARP_RELATIVE_MAP]"""
+
+WARP_POLAR_LINEAR: int
+WARP_POLAR_LOG: int
+WarpPolarMode = int
+"""One of [WARP_POLAR_LINEAR, WARP_POLAR_LOG]"""
+
+INTER_BITS: int
+INTER_BITS2: int
+INTER_TAB_SIZE: int
+INTER_TAB_SIZE2: int
+InterpolationMasks = int
+"""One of [INTER_BITS, INTER_BITS2, INTER_TAB_SIZE, INTER_TAB_SIZE2]"""
+
+DIST_USER: int
+DIST_L1: int
+DIST_L2: int
+DIST_C: int
+DIST_L12: int
+DIST_FAIR: int
+DIST_WELSCH: int
+DIST_HUBER: int
+DistanceTypes = int
+"""One of [DIST_USER, DIST_L1, DIST_L2, DIST_C, DIST_L12, DIST_FAIR, DIST_WELSCH, DIST_HUBER]"""
+
+DIST_MASK_3: int
+DIST_MASK_5: int
+DIST_MASK_PRECISE: int
+DistanceTransformMasks = int
+"""One of [DIST_MASK_3, DIST_MASK_5, DIST_MASK_PRECISE]"""
+
+THRESH_BINARY: int
+THRESH_BINARY_INV: int
+THRESH_TRUNC: int
+THRESH_TOZERO: int
+THRESH_TOZERO_INV: int
+THRESH_MASK: int
+THRESH_OTSU: int
+THRESH_TRIANGLE: int
+ThresholdTypes = int
+"""One of [THRESH_BINARY, THRESH_BINARY_INV, THRESH_TRUNC, THRESH_TOZERO, THRESH_TOZERO_INV, THRESH_MASK, THRESH_OTSU, THRESH_TRIANGLE]"""
+
+ADAPTIVE_THRESH_MEAN_C: int
+ADAPTIVE_THRESH_GAUSSIAN_C: int
+AdaptiveThresholdTypes = int
+"""One of [ADAPTIVE_THRESH_MEAN_C, ADAPTIVE_THRESH_GAUSSIAN_C]"""
+
+GC_BGD: int
+GC_FGD: int
+GC_PR_BGD: int
+GC_PR_FGD: int
+GrabCutClasses = int
+"""One of [GC_BGD, GC_FGD, GC_PR_BGD, GC_PR_FGD]"""
+
+GC_INIT_WITH_RECT: int
+GC_INIT_WITH_MASK: int
+GC_EVAL: int
+GC_EVAL_FREEZE_MODEL: int
+GrabCutModes = int
+"""One of [GC_INIT_WITH_RECT, GC_INIT_WITH_MASK, GC_EVAL, GC_EVAL_FREEZE_MODEL]"""
+
+DIST_LABEL_CCOMP: int
+DIST_LABEL_PIXEL: int
+DistanceTransformLabelTypes = int
+"""One of [DIST_LABEL_CCOMP, DIST_LABEL_PIXEL]"""
+
+FLOODFILL_FIXED_RANGE: int
+FLOODFILL_MASK_ONLY: int
+FloodFillFlags = int
+"""One of [FLOODFILL_FIXED_RANGE, FLOODFILL_MASK_ONLY]"""
+
+CC_STAT_LEFT: int
+CC_STAT_TOP: int
+CC_STAT_WIDTH: int
+CC_STAT_HEIGHT: int
+CC_STAT_AREA: int
+CC_STAT_MAX: int
+ConnectedComponentsTypes = int
+"""One of [CC_STAT_LEFT, CC_STAT_TOP, CC_STAT_WIDTH, CC_STAT_HEIGHT, CC_STAT_AREA, CC_STAT_MAX]"""
+
+CCL_DEFAULT: int
+CCL_WU: int
+CCL_GRANA: int
+CCL_BOLELLI: int
+CCL_SAUF: int
+CCL_BBDT: int
+CCL_SPAGHETTI: int
+ConnectedComponentsAlgorithmsTypes = int
+"""One of [CCL_DEFAULT, CCL_WU, CCL_GRANA, CCL_BOLELLI, CCL_SAUF, CCL_BBDT, CCL_SPAGHETTI]"""
+
+RETR_EXTERNAL: int
+RETR_LIST: int
+RETR_CCOMP: int
+RETR_TREE: int
+RETR_FLOODFILL: int
+RetrievalModes = int
+"""One of [RETR_EXTERNAL, RETR_LIST, RETR_CCOMP, RETR_TREE, RETR_FLOODFILL]"""
+
+CHAIN_APPROX_NONE: int
+CHAIN_APPROX_SIMPLE: int
+CHAIN_APPROX_TC89_L1: int
+CHAIN_APPROX_TC89_KCOS: int
+ContourApproximationModes = int
+"""One of [CHAIN_APPROX_NONE, CHAIN_APPROX_SIMPLE, CHAIN_APPROX_TC89_L1, CHAIN_APPROX_TC89_KCOS]"""
+
+CONTOURS_MATCH_I1: int
+CONTOURS_MATCH_I2: int
+CONTOURS_MATCH_I3: int
+ShapeMatchModes = int
+"""One of [CONTOURS_MATCH_I1, CONTOURS_MATCH_I2, CONTOURS_MATCH_I3]"""
+
+HOUGH_STANDARD: int
+HOUGH_PROBABILISTIC: int
+HOUGH_MULTI_SCALE: int
+HOUGH_GRADIENT: int
+HOUGH_GRADIENT_ALT: int
+HoughModes = int
+"""One of [HOUGH_STANDARD, HOUGH_PROBABILISTIC, HOUGH_MULTI_SCALE, HOUGH_GRADIENT, HOUGH_GRADIENT_ALT]"""
+
+LSD_REFINE_NONE: int
+LSD_REFINE_STD: int
+LSD_REFINE_ADV: int
+LineSegmentDetectorModes = int
+"""One of [LSD_REFINE_NONE, LSD_REFINE_STD, LSD_REFINE_ADV]"""
+
+HISTCMP_CORREL: int
+HISTCMP_CHISQR: int
+HISTCMP_INTERSECT: int
+HISTCMP_BHATTACHARYYA: int
+HISTCMP_HELLINGER: int
+HISTCMP_CHISQR_ALT: int
+HISTCMP_KL_DIV: int
+HistCompMethods = int
+"""One of [HISTCMP_CORREL, HISTCMP_CHISQR, HISTCMP_INTERSECT, HISTCMP_BHATTACHARYYA, HISTCMP_HELLINGER, HISTCMP_CHISQR_ALT, HISTCMP_KL_DIV]"""
+
+COLOR_BGR2BGRA: int
+COLOR_RGB2RGBA: int
+COLOR_BGRA2BGR: int
+COLOR_RGBA2RGB: int
+COLOR_BGR2RGBA: int
+COLOR_RGB2BGRA: int
+COLOR_RGBA2BGR: int
+COLOR_BGRA2RGB: int
+COLOR_BGR2RGB: int
+COLOR_RGB2BGR: int
+COLOR_BGRA2RGBA: int
+COLOR_RGBA2BGRA: int
+COLOR_BGR2GRAY: int
+COLOR_RGB2GRAY: int
+COLOR_GRAY2BGR: int
+COLOR_GRAY2RGB: int
+COLOR_GRAY2BGRA: int
+COLOR_GRAY2RGBA: int
+COLOR_BGRA2GRAY: int
+COLOR_RGBA2GRAY: int
+COLOR_BGR2BGR565: int
+COLOR_RGB2BGR565: int
+COLOR_BGR5652BGR: int
+COLOR_BGR5652RGB: int
+COLOR_BGRA2BGR565: int
+COLOR_RGBA2BGR565: int
+COLOR_BGR5652BGRA: int
+COLOR_BGR5652RGBA: int
+COLOR_GRAY2BGR565: int
+COLOR_BGR5652GRAY: int
+COLOR_BGR2BGR555: int
+COLOR_RGB2BGR555: int
+COLOR_BGR5552BGR: int
+COLOR_BGR5552RGB: int
+COLOR_BGRA2BGR555: int
+COLOR_RGBA2BGR555: int
+COLOR_BGR5552BGRA: int
+COLOR_BGR5552RGBA: int
+COLOR_GRAY2BGR555: int
+COLOR_BGR5552GRAY: int
+COLOR_BGR2XYZ: int
+COLOR_RGB2XYZ: int
+COLOR_XYZ2BGR: int
+COLOR_XYZ2RGB: int
+COLOR_BGR2YCrCb: int
+COLOR_BGR2YCR_CB: int
+COLOR_RGB2YCrCb: int
+COLOR_RGB2YCR_CB: int
+COLOR_YCrCb2BGR: int
+COLOR_YCR_CB2BGR: int
+COLOR_YCrCb2RGB: int
+COLOR_YCR_CB2RGB: int
+COLOR_BGR2HSV: int
+COLOR_RGB2HSV: int
+COLOR_BGR2Lab: int
+COLOR_BGR2LAB: int
+COLOR_RGB2Lab: int
+COLOR_RGB2LAB: int
+COLOR_BGR2Luv: int
+COLOR_BGR2LUV: int
+COLOR_RGB2Luv: int
+COLOR_RGB2LUV: int
+COLOR_BGR2HLS: int
+COLOR_RGB2HLS: int
+COLOR_HSV2BGR: int
+COLOR_HSV2RGB: int
+COLOR_Lab2BGR: int
+COLOR_LAB2BGR: int
+COLOR_Lab2RGB: int
+COLOR_LAB2RGB: int
+COLOR_Luv2BGR: int
+COLOR_LUV2BGR: int
+COLOR_Luv2RGB: int
+COLOR_LUV2RGB: int
+COLOR_HLS2BGR: int
+COLOR_HLS2RGB: int
+COLOR_BGR2HSV_FULL: int
+COLOR_RGB2HSV_FULL: int
+COLOR_BGR2HLS_FULL: int
+COLOR_RGB2HLS_FULL: int
+COLOR_HSV2BGR_FULL: int
+COLOR_HSV2RGB_FULL: int
+COLOR_HLS2BGR_FULL: int
+COLOR_HLS2RGB_FULL: int
+COLOR_LBGR2Lab: int
+COLOR_LBGR2LAB: int
+COLOR_LRGB2Lab: int
+COLOR_LRGB2LAB: int
+COLOR_LBGR2Luv: int
+COLOR_LBGR2LUV: int
+COLOR_LRGB2Luv: int
+COLOR_LRGB2LUV: int
+COLOR_Lab2LBGR: int
+COLOR_LAB2LBGR: int
+COLOR_Lab2LRGB: int
+COLOR_LAB2LRGB: int
+COLOR_Luv2LBGR: int
+COLOR_LUV2LBGR: int
+COLOR_Luv2LRGB: int
+COLOR_LUV2LRGB: int
+COLOR_BGR2YUV: int
+COLOR_RGB2YUV: int
+COLOR_YUV2BGR: int
+COLOR_YUV2RGB: int
+COLOR_YUV2RGB_NV12: int
+COLOR_YUV2BGR_NV12: int
+COLOR_YUV2RGB_NV21: int
+COLOR_YUV2BGR_NV21: int
+COLOR_YUV420sp2RGB: int
+COLOR_YUV420SP2RGB: int
+COLOR_YUV420sp2BGR: int
+COLOR_YUV420SP2BGR: int
+COLOR_YUV2RGBA_NV12: int
+COLOR_YUV2BGRA_NV12: int
+COLOR_YUV2RGBA_NV21: int
+COLOR_YUV2BGRA_NV21: int
+COLOR_YUV420sp2RGBA: int
+COLOR_YUV420SP2RGBA: int
+COLOR_YUV420sp2BGRA: int
+COLOR_YUV420SP2BGRA: int
+COLOR_YUV2RGB_YV12: int
+COLOR_YUV2BGR_YV12: int
+COLOR_YUV2RGB_IYUV: int
+COLOR_YUV2BGR_IYUV: int
+COLOR_YUV2RGB_I420: int
+COLOR_YUV2BGR_I420: int
+COLOR_YUV420p2RGB: int
+COLOR_YUV420P2RGB: int
+COLOR_YUV420p2BGR: int
+COLOR_YUV420P2BGR: int
+COLOR_YUV2RGBA_YV12: int
+COLOR_YUV2BGRA_YV12: int
+COLOR_YUV2RGBA_IYUV: int
+COLOR_YUV2BGRA_IYUV: int
+COLOR_YUV2RGBA_I420: int
+COLOR_YUV2BGRA_I420: int
+COLOR_YUV420p2RGBA: int
+COLOR_YUV420P2RGBA: int
+COLOR_YUV420p2BGRA: int
+COLOR_YUV420P2BGRA: int
+COLOR_YUV2GRAY_420: int
+COLOR_YUV2GRAY_NV21: int
+COLOR_YUV2GRAY_NV12: int
+COLOR_YUV2GRAY_YV12: int
+COLOR_YUV2GRAY_IYUV: int
+COLOR_YUV2GRAY_I420: int
+COLOR_YUV420sp2GRAY: int
+COLOR_YUV420SP2GRAY: int
+COLOR_YUV420p2GRAY: int
+COLOR_YUV420P2GRAY: int
+COLOR_YUV2RGB_UYVY: int
+COLOR_YUV2BGR_UYVY: int
+COLOR_YUV2RGB_Y422: int
+COLOR_YUV2BGR_Y422: int
+COLOR_YUV2RGB_UYNV: int
+COLOR_YUV2BGR_UYNV: int
+COLOR_YUV2RGBA_UYVY: int
+COLOR_YUV2BGRA_UYVY: int
+COLOR_YUV2RGBA_Y422: int
+COLOR_YUV2BGRA_Y422: int
+COLOR_YUV2RGBA_UYNV: int
+COLOR_YUV2BGRA_UYNV: int
+COLOR_YUV2RGB_YUY2: int
+COLOR_YUV2BGR_YUY2: int
+COLOR_YUV2RGB_YVYU: int
+COLOR_YUV2BGR_YVYU: int
+COLOR_YUV2RGB_YUYV: int
+COLOR_YUV2BGR_YUYV: int
+COLOR_YUV2RGB_YUNV: int
+COLOR_YUV2BGR_YUNV: int
+COLOR_YUV2RGBA_YUY2: int
+COLOR_YUV2BGRA_YUY2: int
+COLOR_YUV2RGBA_YVYU: int
+COLOR_YUV2BGRA_YVYU: int
+COLOR_YUV2RGBA_YUYV: int
+COLOR_YUV2BGRA_YUYV: int
+COLOR_YUV2RGBA_YUNV: int
+COLOR_YUV2BGRA_YUNV: int
+COLOR_YUV2GRAY_UYVY: int
+COLOR_YUV2GRAY_YUY2: int
+COLOR_YUV2GRAY_Y422: int
+COLOR_YUV2GRAY_UYNV: int
+COLOR_YUV2GRAY_YVYU: int
+COLOR_YUV2GRAY_YUYV: int
+COLOR_YUV2GRAY_YUNV: int
+COLOR_RGBA2mRGBA: int
+COLOR_RGBA2M_RGBA: int
+COLOR_mRGBA2RGBA: int
+COLOR_M_RGBA2RGBA: int
+COLOR_RGB2YUV_I420: int
+COLOR_BGR2YUV_I420: int
+COLOR_RGB2YUV_IYUV: int
+COLOR_BGR2YUV_IYUV: int
+COLOR_RGBA2YUV_I420: int
+COLOR_BGRA2YUV_I420: int
+COLOR_RGBA2YUV_IYUV: int
+COLOR_BGRA2YUV_IYUV: int
+COLOR_RGB2YUV_YV12: int
+COLOR_BGR2YUV_YV12: int
+COLOR_RGBA2YUV_YV12: int
+COLOR_BGRA2YUV_YV12: int
+COLOR_BayerBG2BGR: int
+COLOR_BAYER_BG2BGR: int
+COLOR_BayerGB2BGR: int
+COLOR_BAYER_GB2BGR: int
+COLOR_BayerRG2BGR: int
+COLOR_BAYER_RG2BGR: int
+COLOR_BayerGR2BGR: int
+COLOR_BAYER_GR2BGR: int
+COLOR_BayerRGGB2BGR: int
+COLOR_BAYER_RGGB2BGR: int
+COLOR_BayerGRBG2BGR: int
+COLOR_BAYER_GRBG2BGR: int
+COLOR_BayerBGGR2BGR: int
+COLOR_BAYER_BGGR2BGR: int
+COLOR_BayerGBRG2BGR: int
+COLOR_BAYER_GBRG2BGR: int
+COLOR_BayerRGGB2RGB: int
+COLOR_BAYER_RGGB2RGB: int
+COLOR_BayerGRBG2RGB: int
+COLOR_BAYER_GRBG2RGB: int
+COLOR_BayerBGGR2RGB: int
+COLOR_BAYER_BGGR2RGB: int
+COLOR_BayerGBRG2RGB: int
+COLOR_BAYER_GBRG2RGB: int
+COLOR_BayerBG2RGB: int
+COLOR_BAYER_BG2RGB: int
+COLOR_BayerGB2RGB: int
+COLOR_BAYER_GB2RGB: int
+COLOR_BayerRG2RGB: int
+COLOR_BAYER_RG2RGB: int
+COLOR_BayerGR2RGB: int
+COLOR_BAYER_GR2RGB: int
+COLOR_BayerBG2GRAY: int
+COLOR_BAYER_BG2GRAY: int
+COLOR_BayerGB2GRAY: int
+COLOR_BAYER_GB2GRAY: int
+COLOR_BayerRG2GRAY: int
+COLOR_BAYER_RG2GRAY: int
+COLOR_BayerGR2GRAY: int
+COLOR_BAYER_GR2GRAY: int
+COLOR_BayerRGGB2GRAY: int
+COLOR_BAYER_RGGB2GRAY: int
+COLOR_BayerGRBG2GRAY: int
+COLOR_BAYER_GRBG2GRAY: int
+COLOR_BayerBGGR2GRAY: int
+COLOR_BAYER_BGGR2GRAY: int
+COLOR_BayerGBRG2GRAY: int
+COLOR_BAYER_GBRG2GRAY: int
+COLOR_BayerBG2BGR_VNG: int
+COLOR_BAYER_BG2BGR_VNG: int
+COLOR_BayerGB2BGR_VNG: int
+COLOR_BAYER_GB2BGR_VNG: int
+COLOR_BayerRG2BGR_VNG: int
+COLOR_BAYER_RG2BGR_VNG: int
+COLOR_BayerGR2BGR_VNG: int
+COLOR_BAYER_GR2BGR_VNG: int
+COLOR_BayerRGGB2BGR_VNG: int
+COLOR_BAYER_RGGB2BGR_VNG: int
+COLOR_BayerGRBG2BGR_VNG: int
+COLOR_BAYER_GRBG2BGR_VNG: int
+COLOR_BayerBGGR2BGR_VNG: int
+COLOR_BAYER_BGGR2BGR_VNG: int
+COLOR_BayerGBRG2BGR_VNG: int
+COLOR_BAYER_GBRG2BGR_VNG: int
+COLOR_BayerRGGB2RGB_VNG: int
+COLOR_BAYER_RGGB2RGB_VNG: int
+COLOR_BayerGRBG2RGB_VNG: int
+COLOR_BAYER_GRBG2RGB_VNG: int
+COLOR_BayerBGGR2RGB_VNG: int
+COLOR_BAYER_BGGR2RGB_VNG: int
+COLOR_BayerGBRG2RGB_VNG: int
+COLOR_BAYER_GBRG2RGB_VNG: int
+COLOR_BayerBG2RGB_VNG: int
+COLOR_BAYER_BG2RGB_VNG: int
+COLOR_BayerGB2RGB_VNG: int
+COLOR_BAYER_GB2RGB_VNG: int
+COLOR_BayerRG2RGB_VNG: int
+COLOR_BAYER_RG2RGB_VNG: int
+COLOR_BayerGR2RGB_VNG: int
+COLOR_BAYER_GR2RGB_VNG: int
+COLOR_BayerBG2BGR_EA: int
+COLOR_BAYER_BG2BGR_EA: int
+COLOR_BayerGB2BGR_EA: int
+COLOR_BAYER_GB2BGR_EA: int
+COLOR_BayerRG2BGR_EA: int
+COLOR_BAYER_RG2BGR_EA: int
+COLOR_BayerGR2BGR_EA: int
+COLOR_BAYER_GR2BGR_EA: int
+COLOR_BayerRGGB2BGR_EA: int
+COLOR_BAYER_RGGB2BGR_EA: int
+COLOR_BayerGRBG2BGR_EA: int
+COLOR_BAYER_GRBG2BGR_EA: int
+COLOR_BayerBGGR2BGR_EA: int
+COLOR_BAYER_BGGR2BGR_EA: int
+COLOR_BayerGBRG2BGR_EA: int
+COLOR_BAYER_GBRG2BGR_EA: int
+COLOR_BayerRGGB2RGB_EA: int
+COLOR_BAYER_RGGB2RGB_EA: int
+COLOR_BayerGRBG2RGB_EA: int
+COLOR_BAYER_GRBG2RGB_EA: int
+COLOR_BayerBGGR2RGB_EA: int
+COLOR_BAYER_BGGR2RGB_EA: int
+COLOR_BayerGBRG2RGB_EA: int
+COLOR_BAYER_GBRG2RGB_EA: int
+COLOR_BayerBG2RGB_EA: int
+COLOR_BAYER_BG2RGB_EA: int
+COLOR_BayerGB2RGB_EA: int
+COLOR_BAYER_GB2RGB_EA: int
+COLOR_BayerRG2RGB_EA: int
+COLOR_BAYER_RG2RGB_EA: int
+COLOR_BayerGR2RGB_EA: int
+COLOR_BAYER_GR2RGB_EA: int
+COLOR_BayerBG2BGRA: int
+COLOR_BAYER_BG2BGRA: int
+COLOR_BayerGB2BGRA: int
+COLOR_BAYER_GB2BGRA: int
+COLOR_BayerRG2BGRA: int
+COLOR_BAYER_RG2BGRA: int
+COLOR_BayerGR2BGRA: int
+COLOR_BAYER_GR2BGRA: int
+COLOR_BayerRGGB2BGRA: int
+COLOR_BAYER_RGGB2BGRA: int
+COLOR_BayerGRBG2BGRA: int
+COLOR_BAYER_GRBG2BGRA: int
+COLOR_BayerBGGR2BGRA: int
+COLOR_BAYER_BGGR2BGRA: int
+COLOR_BayerGBRG2BGRA: int
+COLOR_BAYER_GBRG2BGRA: int
+COLOR_BayerRGGB2RGBA: int
+COLOR_BAYER_RGGB2RGBA: int
+COLOR_BayerGRBG2RGBA: int
+COLOR_BAYER_GRBG2RGBA: int
+COLOR_BayerBGGR2RGBA: int
+COLOR_BAYER_BGGR2RGBA: int
+COLOR_BayerGBRG2RGBA: int
+COLOR_BAYER_GBRG2RGBA: int
+COLOR_BayerBG2RGBA: int
+COLOR_BAYER_BG2RGBA: int
+COLOR_BayerGB2RGBA: int
+COLOR_BAYER_GB2RGBA: int
+COLOR_BayerRG2RGBA: int
+COLOR_BAYER_RG2RGBA: int
+COLOR_BayerGR2RGBA: int
+COLOR_BAYER_GR2RGBA: int
+COLOR_RGB2YUV_UYVY: int
+COLOR_BGR2YUV_UYVY: int
+COLOR_RGB2YUV_Y422: int
+COLOR_BGR2YUV_Y422: int
+COLOR_RGB2YUV_UYNV: int
+COLOR_BGR2YUV_UYNV: int
+COLOR_RGBA2YUV_UYVY: int
+COLOR_BGRA2YUV_UYVY: int
+COLOR_RGBA2YUV_Y422: int
+COLOR_BGRA2YUV_Y422: int
+COLOR_RGBA2YUV_UYNV: int
+COLOR_BGRA2YUV_UYNV: int
+COLOR_RGB2YUV_YUY2: int
+COLOR_BGR2YUV_YUY2: int
+COLOR_RGB2YUV_YVYU: int
+COLOR_BGR2YUV_YVYU: int
+COLOR_RGB2YUV_YUYV: int
+COLOR_BGR2YUV_YUYV: int
+COLOR_RGB2YUV_YUNV: int
+COLOR_BGR2YUV_YUNV: int
+COLOR_RGBA2YUV_YUY2: int
+COLOR_BGRA2YUV_YUY2: int
+COLOR_RGBA2YUV_YVYU: int
+COLOR_BGRA2YUV_YVYU: int
+COLOR_RGBA2YUV_YUYV: int
+COLOR_BGRA2YUV_YUYV: int
+COLOR_RGBA2YUV_YUNV: int
+COLOR_BGRA2YUV_YUNV: int
+COLOR_COLORCVT_MAX: int
+ColorConversionCodes = int
+"""One of [COLOR_BGR2BGRA, COLOR_RGB2RGBA, COLOR_BGRA2BGR, COLOR_RGBA2RGB, COLOR_BGR2RGBA, COLOR_RGB2BGRA, COLOR_RGBA2BGR, COLOR_BGRA2RGB, COLOR_BGR2RGB, COLOR_RGB2BGR, COLOR_BGRA2RGBA, COLOR_RGBA2BGRA, COLOR_BGR2GRAY, COLOR_RGB2GRAY, COLOR_GRAY2BGR, COLOR_GRAY2RGB, COLOR_GRAY2BGRA, COLOR_GRAY2RGBA, COLOR_BGRA2GRAY, COLOR_RGBA2GRAY, COLOR_BGR2BGR565, COLOR_RGB2BGR565, COLOR_BGR5652BGR, COLOR_BGR5652RGB, COLOR_BGRA2BGR565, COLOR_RGBA2BGR565, COLOR_BGR5652BGRA, COLOR_BGR5652RGBA, COLOR_GRAY2BGR565, COLOR_BGR5652GRAY, COLOR_BGR2BGR555, COLOR_RGB2BGR555, COLOR_BGR5552BGR, COLOR_BGR5552RGB, COLOR_BGRA2BGR555, COLOR_RGBA2BGR555, COLOR_BGR5552BGRA, COLOR_BGR5552RGBA, COLOR_GRAY2BGR555, COLOR_BGR5552GRAY, COLOR_BGR2XYZ, COLOR_RGB2XYZ, COLOR_XYZ2BGR, COLOR_XYZ2RGB, COLOR_BGR2YCrCb, COLOR_BGR2YCR_CB, COLOR_RGB2YCrCb, COLOR_RGB2YCR_CB, COLOR_YCrCb2BGR, COLOR_YCR_CB2BGR, COLOR_YCrCb2RGB, COLOR_YCR_CB2RGB, COLOR_BGR2HSV, COLOR_RGB2HSV, COLOR_BGR2Lab, COLOR_BGR2LAB, COLOR_RGB2Lab, COLOR_RGB2LAB, COLOR_BGR2Luv, COLOR_BGR2LUV, COLOR_RGB2Luv, COLOR_RGB2LUV, COLOR_BGR2HLS, COLOR_RGB2HLS, COLOR_HSV2BGR, COLOR_HSV2RGB, COLOR_Lab2BGR, COLOR_LAB2BGR, COLOR_Lab2RGB, COLOR_LAB2RGB, COLOR_Luv2BGR, COLOR_LUV2BGR, COLOR_Luv2RGB, COLOR_LUV2RGB, COLOR_HLS2BGR, COLOR_HLS2RGB, COLOR_BGR2HSV_FULL, COLOR_RGB2HSV_FULL, COLOR_BGR2HLS_FULL, COLOR_RGB2HLS_FULL, COLOR_HSV2BGR_FULL, COLOR_HSV2RGB_FULL, COLOR_HLS2BGR_FULL, COLOR_HLS2RGB_FULL, COLOR_LBGR2Lab, COLOR_LBGR2LAB, COLOR_LRGB2Lab, COLOR_LRGB2LAB, COLOR_LBGR2Luv, COLOR_LBGR2LUV, COLOR_LRGB2Luv, COLOR_LRGB2LUV, COLOR_Lab2LBGR, COLOR_LAB2LBGR, COLOR_Lab2LRGB, COLOR_LAB2LRGB, COLOR_Luv2LBGR, COLOR_LUV2LBGR, COLOR_Luv2LRGB, COLOR_LUV2LRGB, COLOR_BGR2YUV, COLOR_RGB2YUV, COLOR_YUV2BGR, COLOR_YUV2RGB, COLOR_YUV2RGB_NV12, COLOR_YUV2BGR_NV12, COLOR_YUV2RGB_NV21, COLOR_YUV2BGR_NV21, COLOR_YUV420sp2RGB, COLOR_YUV420SP2RGB, COLOR_YUV420sp2BGR, COLOR_YUV420SP2BGR, COLOR_YUV2RGBA_NV12, COLOR_YUV2BGRA_NV12, COLOR_YUV2RGBA_NV21, COLOR_YUV2BGRA_NV21, COLOR_YUV420sp2RGBA, COLOR_YUV420SP2RGBA, COLOR_YUV420sp2BGRA, COLOR_YUV420SP2BGRA, COLOR_YUV2RGB_YV12, COLOR_YUV2BGR_YV12, COLOR_YUV2RGB_IYUV, COLOR_YUV2BGR_IYUV, COLOR_YUV2RGB_I420, COLOR_YUV2BGR_I420, COLOR_YUV420p2RGB, COLOR_YUV420P2RGB, COLOR_YUV420p2BGR, COLOR_YUV420P2BGR, COLOR_YUV2RGBA_YV12, COLOR_YUV2BGRA_YV12, COLOR_YUV2RGBA_IYUV, COLOR_YUV2BGRA_IYUV, COLOR_YUV2RGBA_I420, COLOR_YUV2BGRA_I420, COLOR_YUV420p2RGBA, COLOR_YUV420P2RGBA, COLOR_YUV420p2BGRA, COLOR_YUV420P2BGRA, COLOR_YUV2GRAY_420, COLOR_YUV2GRAY_NV21, COLOR_YUV2GRAY_NV12, COLOR_YUV2GRAY_YV12, COLOR_YUV2GRAY_IYUV, COLOR_YUV2GRAY_I420, COLOR_YUV420sp2GRAY, COLOR_YUV420SP2GRAY, COLOR_YUV420p2GRAY, COLOR_YUV420P2GRAY, COLOR_YUV2RGB_UYVY, COLOR_YUV2BGR_UYVY, COLOR_YUV2RGB_Y422, COLOR_YUV2BGR_Y422, COLOR_YUV2RGB_UYNV, COLOR_YUV2BGR_UYNV, COLOR_YUV2RGBA_UYVY, COLOR_YUV2BGRA_UYVY, COLOR_YUV2RGBA_Y422, COLOR_YUV2BGRA_Y422, COLOR_YUV2RGBA_UYNV, COLOR_YUV2BGRA_UYNV, COLOR_YUV2RGB_YUY2, COLOR_YUV2BGR_YUY2, COLOR_YUV2RGB_YVYU, COLOR_YUV2BGR_YVYU, COLOR_YUV2RGB_YUYV, COLOR_YUV2BGR_YUYV, COLOR_YUV2RGB_YUNV, COLOR_YUV2BGR_YUNV, COLOR_YUV2RGBA_YUY2, COLOR_YUV2BGRA_YUY2, COLOR_YUV2RGBA_YVYU, COLOR_YUV2BGRA_YVYU, COLOR_YUV2RGBA_YUYV, COLOR_YUV2BGRA_YUYV, COLOR_YUV2RGBA_YUNV, COLOR_YUV2BGRA_YUNV, COLOR_YUV2GRAY_UYVY, COLOR_YUV2GRAY_YUY2, COLOR_YUV2GRAY_Y422, COLOR_YUV2GRAY_UYNV, COLOR_YUV2GRAY_YVYU, COLOR_YUV2GRAY_YUYV, COLOR_YUV2GRAY_YUNV, COLOR_RGBA2mRGBA, COLOR_RGBA2M_RGBA, COLOR_mRGBA2RGBA, COLOR_M_RGBA2RGBA, COLOR_RGB2YUV_I420, COLOR_BGR2YUV_I420, COLOR_RGB2YUV_IYUV, COLOR_BGR2YUV_IYUV, COLOR_RGBA2YUV_I420, COLOR_BGRA2YUV_I420, COLOR_RGBA2YUV_IYUV, COLOR_BGRA2YUV_IYUV, COLOR_RGB2YUV_YV12, COLOR_BGR2YUV_YV12, COLOR_RGBA2YUV_YV12, COLOR_BGRA2YUV_YV12, COLOR_BayerBG2BGR, COLOR_BAYER_BG2BGR, COLOR_BayerGB2BGR, COLOR_BAYER_GB2BGR, COLOR_BayerRG2BGR, COLOR_BAYER_RG2BGR, COLOR_BayerGR2BGR, COLOR_BAYER_GR2BGR, COLOR_BayerRGGB2BGR, COLOR_BAYER_RGGB2BGR, COLOR_BayerGRBG2BGR, COLOR_BAYER_GRBG2BGR, COLOR_BayerBGGR2BGR, COLOR_BAYER_BGGR2BGR, COLOR_BayerGBRG2BGR, COLOR_BAYER_GBRG2BGR, COLOR_BayerRGGB2RGB, COLOR_BAYER_RGGB2RGB, COLOR_BayerGRBG2RGB, COLOR_BAYER_GRBG2RGB, COLOR_BayerBGGR2RGB, COLOR_BAYER_BGGR2RGB, COLOR_BayerGBRG2RGB, COLOR_BAYER_GBRG2RGB, COLOR_BayerBG2RGB, COLOR_BAYER_BG2RGB, COLOR_BayerGB2RGB, COLOR_BAYER_GB2RGB, COLOR_BayerRG2RGB, COLOR_BAYER_RG2RGB, COLOR_BayerGR2RGB, COLOR_BAYER_GR2RGB, COLOR_BayerBG2GRAY, COLOR_BAYER_BG2GRAY, COLOR_BayerGB2GRAY, COLOR_BAYER_GB2GRAY, COLOR_BayerRG2GRAY, COLOR_BAYER_RG2GRAY, COLOR_BayerGR2GRAY, COLOR_BAYER_GR2GRAY, COLOR_BayerRGGB2GRAY, COLOR_BAYER_RGGB2GRAY, COLOR_BayerGRBG2GRAY, COLOR_BAYER_GRBG2GRAY, COLOR_BayerBGGR2GRAY, COLOR_BAYER_BGGR2GRAY, COLOR_BayerGBRG2GRAY, COLOR_BAYER_GBRG2GRAY, COLOR_BayerBG2BGR_VNG, COLOR_BAYER_BG2BGR_VNG, COLOR_BayerGB2BGR_VNG, COLOR_BAYER_GB2BGR_VNG, COLOR_BayerRG2BGR_VNG, COLOR_BAYER_RG2BGR_VNG, COLOR_BayerGR2BGR_VNG, COLOR_BAYER_GR2BGR_VNG, COLOR_BayerRGGB2BGR_VNG, COLOR_BAYER_RGGB2BGR_VNG, COLOR_BayerGRBG2BGR_VNG, COLOR_BAYER_GRBG2BGR_VNG, COLOR_BayerBGGR2BGR_VNG, COLOR_BAYER_BGGR2BGR_VNG, COLOR_BayerGBRG2BGR_VNG, COLOR_BAYER_GBRG2BGR_VNG, COLOR_BayerRGGB2RGB_VNG, COLOR_BAYER_RGGB2RGB_VNG, COLOR_BayerGRBG2RGB_VNG, COLOR_BAYER_GRBG2RGB_VNG, COLOR_BayerBGGR2RGB_VNG, COLOR_BAYER_BGGR2RGB_VNG, COLOR_BayerGBRG2RGB_VNG, COLOR_BAYER_GBRG2RGB_VNG, COLOR_BayerBG2RGB_VNG, COLOR_BAYER_BG2RGB_VNG, COLOR_BayerGB2RGB_VNG, COLOR_BAYER_GB2RGB_VNG, COLOR_BayerRG2RGB_VNG, COLOR_BAYER_RG2RGB_VNG, COLOR_BayerGR2RGB_VNG, COLOR_BAYER_GR2RGB_VNG, COLOR_BayerBG2BGR_EA, COLOR_BAYER_BG2BGR_EA, COLOR_BayerGB2BGR_EA, COLOR_BAYER_GB2BGR_EA, COLOR_BayerRG2BGR_EA, COLOR_BAYER_RG2BGR_EA, COLOR_BayerGR2BGR_EA, COLOR_BAYER_GR2BGR_EA, COLOR_BayerRGGB2BGR_EA, COLOR_BAYER_RGGB2BGR_EA, COLOR_BayerGRBG2BGR_EA, COLOR_BAYER_GRBG2BGR_EA, COLOR_BayerBGGR2BGR_EA, COLOR_BAYER_BGGR2BGR_EA, COLOR_BayerGBRG2BGR_EA, COLOR_BAYER_GBRG2BGR_EA, COLOR_BayerRGGB2RGB_EA, COLOR_BAYER_RGGB2RGB_EA, COLOR_BayerGRBG2RGB_EA, COLOR_BAYER_GRBG2RGB_EA, COLOR_BayerBGGR2RGB_EA, COLOR_BAYER_BGGR2RGB_EA, COLOR_BayerGBRG2RGB_EA, COLOR_BAYER_GBRG2RGB_EA, COLOR_BayerBG2RGB_EA, COLOR_BAYER_BG2RGB_EA, COLOR_BayerGB2RGB_EA, COLOR_BAYER_GB2RGB_EA, COLOR_BayerRG2RGB_EA, COLOR_BAYER_RG2RGB_EA, COLOR_BayerGR2RGB_EA, COLOR_BAYER_GR2RGB_EA, COLOR_BayerBG2BGRA, COLOR_BAYER_BG2BGRA, COLOR_BayerGB2BGRA, COLOR_BAYER_GB2BGRA, COLOR_BayerRG2BGRA, COLOR_BAYER_RG2BGRA, COLOR_BayerGR2BGRA, COLOR_BAYER_GR2BGRA, COLOR_BayerRGGB2BGRA, COLOR_BAYER_RGGB2BGRA, COLOR_BayerGRBG2BGRA, COLOR_BAYER_GRBG2BGRA, COLOR_BayerBGGR2BGRA, COLOR_BAYER_BGGR2BGRA, COLOR_BayerGBRG2BGRA, COLOR_BAYER_GBRG2BGRA, COLOR_BayerRGGB2RGBA, COLOR_BAYER_RGGB2RGBA, COLOR_BayerGRBG2RGBA, COLOR_BAYER_GRBG2RGBA, COLOR_BayerBGGR2RGBA, COLOR_BAYER_BGGR2RGBA, COLOR_BayerGBRG2RGBA, COLOR_BAYER_GBRG2RGBA, COLOR_BayerBG2RGBA, COLOR_BAYER_BG2RGBA, COLOR_BayerGB2RGBA, COLOR_BAYER_GB2RGBA, COLOR_BayerRG2RGBA, COLOR_BAYER_RG2RGBA, COLOR_BayerGR2RGBA, COLOR_BAYER_GR2RGBA, COLOR_RGB2YUV_UYVY, COLOR_BGR2YUV_UYVY, COLOR_RGB2YUV_Y422, COLOR_BGR2YUV_Y422, COLOR_RGB2YUV_UYNV, COLOR_BGR2YUV_UYNV, COLOR_RGBA2YUV_UYVY, COLOR_BGRA2YUV_UYVY, COLOR_RGBA2YUV_Y422, COLOR_BGRA2YUV_Y422, COLOR_RGBA2YUV_UYNV, COLOR_BGRA2YUV_UYNV, COLOR_RGB2YUV_YUY2, COLOR_BGR2YUV_YUY2, COLOR_RGB2YUV_YVYU, COLOR_BGR2YUV_YVYU, COLOR_RGB2YUV_YUYV, COLOR_BGR2YUV_YUYV, COLOR_RGB2YUV_YUNV, COLOR_BGR2YUV_YUNV, COLOR_RGBA2YUV_YUY2, COLOR_BGRA2YUV_YUY2, COLOR_RGBA2YUV_YVYU, COLOR_BGRA2YUV_YVYU, COLOR_RGBA2YUV_YUYV, COLOR_BGRA2YUV_YUYV, COLOR_RGBA2YUV_YUNV, COLOR_BGRA2YUV_YUNV, COLOR_COLORCVT_MAX]"""
+
+INTERSECT_NONE: int
+INTERSECT_PARTIAL: int
+INTERSECT_FULL: int
+RectanglesIntersectTypes = int
+"""One of [INTERSECT_NONE, INTERSECT_PARTIAL, INTERSECT_FULL]"""
+
+FILLED: int
+LINE_4: int
+LINE_8: int
+LINE_AA: int
+LineTypes = int
+"""One of [FILLED, LINE_4, LINE_8, LINE_AA]"""
+
+FONT_HERSHEY_SIMPLEX: int
+FONT_HERSHEY_PLAIN: int
+FONT_HERSHEY_DUPLEX: int
+FONT_HERSHEY_COMPLEX: int
+FONT_HERSHEY_TRIPLEX: int
+FONT_HERSHEY_COMPLEX_SMALL: int
+FONT_HERSHEY_SCRIPT_SIMPLEX: int
+FONT_HERSHEY_SCRIPT_COMPLEX: int
+FONT_ITALIC: int
+HersheyFonts = int
+"""One of [FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PLAIN, FONT_HERSHEY_DUPLEX, FONT_HERSHEY_COMPLEX, FONT_HERSHEY_TRIPLEX, FONT_HERSHEY_COMPLEX_SMALL, FONT_HERSHEY_SCRIPT_SIMPLEX, FONT_HERSHEY_SCRIPT_COMPLEX, FONT_ITALIC]"""
+
+MARKER_CROSS: int
+MARKER_TILTED_CROSS: int
+MARKER_STAR: int
+MARKER_DIAMOND: int
+MARKER_SQUARE: int
+MARKER_TRIANGLE_UP: int
+MARKER_TRIANGLE_DOWN: int
+MarkerTypes = int
+"""One of [MARKER_CROSS, MARKER_TILTED_CROSS, MARKER_STAR, MARKER_DIAMOND, MARKER_SQUARE, MARKER_TRIANGLE_UP, MARKER_TRIANGLE_DOWN]"""
+
+TM_SQDIFF: int
+TM_SQDIFF_NORMED: int
+TM_CCORR: int
+TM_CCORR_NORMED: int
+TM_CCOEFF: int
+TM_CCOEFF_NORMED: int
+TemplateMatchModes = int
+"""One of [TM_SQDIFF, TM_SQDIFF_NORMED, TM_CCORR, TM_CCORR_NORMED, TM_CCOEFF, TM_CCOEFF_NORMED]"""
+
+COLORMAP_AUTUMN: int
+COLORMAP_BONE: int
+COLORMAP_JET: int
+COLORMAP_WINTER: int
+COLORMAP_RAINBOW: int
+COLORMAP_OCEAN: int
+COLORMAP_SUMMER: int
+COLORMAP_SPRING: int
+COLORMAP_COOL: int
+COLORMAP_HSV: int
+COLORMAP_PINK: int
+COLORMAP_HOT: int
+COLORMAP_PARULA: int
+COLORMAP_MAGMA: int
+COLORMAP_INFERNO: int
+COLORMAP_PLASMA: int
+COLORMAP_VIRIDIS: int
+COLORMAP_CIVIDIS: int
+COLORMAP_TWILIGHT: int
+COLORMAP_TWILIGHT_SHIFTED: int
+COLORMAP_TURBO: int
+COLORMAP_DEEPGREEN: int
+ColormapTypes = int
+"""One of [COLORMAP_AUTUMN, COLORMAP_BONE, COLORMAP_JET, COLORMAP_WINTER, COLORMAP_RAINBOW, COLORMAP_OCEAN, COLORMAP_SUMMER, COLORMAP_SPRING, COLORMAP_COOL, COLORMAP_HSV, COLORMAP_PINK, COLORMAP_HOT, COLORMAP_PARULA, COLORMAP_MAGMA, COLORMAP_INFERNO, COLORMAP_PLASMA, COLORMAP_VIRIDIS, COLORMAP_CIVIDIS, COLORMAP_TWILIGHT, COLORMAP_TWILIGHT_SHIFTED, COLORMAP_TURBO, COLORMAP_DEEPGREEN]"""
+
+INPAINT_NS: int
+INPAINT_TELEA: int
+LDR_SIZE: int
+NORMAL_CLONE: int
+MIXED_CLONE: int
+MONOCHROME_TRANSFER: int
+RECURS_FILTER: int
+NORMCONV_FILTER: int
+CAP_PROP_DC1394_OFF: int
+CAP_PROP_DC1394_MODE_MANUAL: int
+CAP_PROP_DC1394_MODE_AUTO: int
+CAP_PROP_DC1394_MODE_ONE_PUSH_AUTO: int
+CAP_PROP_DC1394_MAX: int
+CAP_OPENNI_DEPTH_GENERATOR: int
+CAP_OPENNI_IMAGE_GENERATOR: int
+CAP_OPENNI_IR_GENERATOR: int
+CAP_OPENNI_GENERATORS_MASK: int
+CAP_PROP_OPENNI_OUTPUT_MODE: int
+CAP_PROP_OPENNI_FRAME_MAX_DEPTH: int
+CAP_PROP_OPENNI_BASELINE: int
+CAP_PROP_OPENNI_FOCAL_LENGTH: int
+CAP_PROP_OPENNI_REGISTRATION: int
+CAP_PROP_OPENNI_REGISTRATION_ON: int
+CAP_PROP_OPENNI_APPROX_FRAME_SYNC: int
+CAP_PROP_OPENNI_MAX_BUFFER_SIZE: int
+CAP_PROP_OPENNI_CIRCLE_BUFFER: int
+CAP_PROP_OPENNI_MAX_TIME_DURATION: int
+CAP_PROP_OPENNI_GENERATOR_PRESENT: int
+CAP_PROP_OPENNI2_SYNC: int
+CAP_PROP_OPENNI2_MIRROR: int
+CAP_OPENNI_IMAGE_GENERATOR_PRESENT: int
+CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE: int
+CAP_OPENNI_DEPTH_GENERATOR_PRESENT: int
+CAP_OPENNI_DEPTH_GENERATOR_BASELINE: int
+CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH: int
+CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION: int
+CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON: int
+CAP_OPENNI_IR_GENERATOR_PRESENT: int
+CAP_OPENNI_DEPTH_MAP: int
+CAP_OPENNI_POINT_CLOUD_MAP: int
+CAP_OPENNI_DISPARITY_MAP: int
+CAP_OPENNI_DISPARITY_MAP_32F: int
+CAP_OPENNI_VALID_DEPTH_MASK: int
+CAP_OPENNI_BGR_IMAGE: int
+CAP_OPENNI_GRAY_IMAGE: int
+CAP_OPENNI_IR_IMAGE: int
+CAP_OPENNI_VGA_30HZ: int
+CAP_OPENNI_SXGA_15HZ: int
+CAP_OPENNI_SXGA_30HZ: int
+CAP_OPENNI_QVGA_30HZ: int
+CAP_OPENNI_QVGA_60HZ: int
+CAP_PROP_GSTREAMER_QUEUE_LENGTH: int
+CAP_PROP_PVAPI_MULTICASTIP: int
+CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE: int
+CAP_PROP_PVAPI_DECIMATIONHORIZONTAL: int
+CAP_PROP_PVAPI_DECIMATIONVERTICAL: int
+CAP_PROP_PVAPI_BINNINGX: int
+CAP_PROP_PVAPI_BINNINGY: int
+CAP_PROP_PVAPI_PIXELFORMAT: int
+CAP_PVAPI_FSTRIGMODE_FREERUN: int
+CAP_PVAPI_FSTRIGMODE_SYNCIN1: int
+CAP_PVAPI_FSTRIGMODE_SYNCIN2: int
+CAP_PVAPI_FSTRIGMODE_FIXEDRATE: int
+CAP_PVAPI_FSTRIGMODE_SOFTWARE: int
+CAP_PVAPI_DECIMATION_OFF: int
+CAP_PVAPI_DECIMATION_2OUTOF4: int
+CAP_PVAPI_DECIMATION_2OUTOF8: int
+CAP_PVAPI_DECIMATION_2OUTOF16: int
+CAP_PVAPI_PIXELFORMAT_MONO8: int
+CAP_PVAPI_PIXELFORMAT_MONO16: int
+CAP_PVAPI_PIXELFORMAT_BAYER8: int
+CAP_PVAPI_PIXELFORMAT_BAYER16: int
+CAP_PVAPI_PIXELFORMAT_RGB24: int
+CAP_PVAPI_PIXELFORMAT_BGR24: int
+CAP_PVAPI_PIXELFORMAT_RGBA32: int
+CAP_PVAPI_PIXELFORMAT_BGRA32: int
+CAP_PROP_XI_DOWNSAMPLING: int
+CAP_PROP_XI_DATA_FORMAT: int
+CAP_PROP_XI_OFFSET_X: int
+CAP_PROP_XI_OFFSET_Y: int
+CAP_PROP_XI_TRG_SOURCE: int
+CAP_PROP_XI_TRG_SOFTWARE: int
+CAP_PROP_XI_GPI_SELECTOR: int
+CAP_PROP_XI_GPI_MODE: int
+CAP_PROP_XI_GPI_LEVEL: int
+CAP_PROP_XI_GPO_SELECTOR: int
+CAP_PROP_XI_GPO_MODE: int
+CAP_PROP_XI_LED_SELECTOR: int
+CAP_PROP_XI_LED_MODE: int
+CAP_PROP_XI_MANUAL_WB: int
+CAP_PROP_XI_AUTO_WB: int
+CAP_PROP_XI_AEAG: int
+CAP_PROP_XI_EXP_PRIORITY: int
+CAP_PROP_XI_AE_MAX_LIMIT: int
+CAP_PROP_XI_AG_MAX_LIMIT: int
+CAP_PROP_XI_AEAG_LEVEL: int
+CAP_PROP_XI_TIMEOUT: int
+CAP_PROP_XI_EXPOSURE: int
+CAP_PROP_XI_EXPOSURE_BURST_COUNT: int
+CAP_PROP_XI_GAIN_SELECTOR: int
+CAP_PROP_XI_GAIN: int
+CAP_PROP_XI_DOWNSAMPLING_TYPE: int
+CAP_PROP_XI_BINNING_SELECTOR: int
+CAP_PROP_XI_BINNING_VERTICAL: int
+CAP_PROP_XI_BINNING_HORIZONTAL: int
+CAP_PROP_XI_BINNING_PATTERN: int
+CAP_PROP_XI_DECIMATION_SELECTOR: int
+CAP_PROP_XI_DECIMATION_VERTICAL: int
+CAP_PROP_XI_DECIMATION_HORIZONTAL: int
+CAP_PROP_XI_DECIMATION_PATTERN: int
+CAP_PROP_XI_TEST_PATTERN_GENERATOR_SELECTOR: int
+CAP_PROP_XI_TEST_PATTERN: int
+CAP_PROP_XI_IMAGE_DATA_FORMAT: int
+CAP_PROP_XI_SHUTTER_TYPE: int
+CAP_PROP_XI_SENSOR_TAPS: int
+CAP_PROP_XI_AEAG_ROI_OFFSET_X: int
+CAP_PROP_XI_AEAG_ROI_OFFSET_Y: int
+CAP_PROP_XI_AEAG_ROI_WIDTH: int
+CAP_PROP_XI_AEAG_ROI_HEIGHT: int
+CAP_PROP_XI_BPC: int
+CAP_PROP_XI_WB_KR: int
+CAP_PROP_XI_WB_KG: int
+CAP_PROP_XI_WB_KB: int
+CAP_PROP_XI_WIDTH: int
+CAP_PROP_XI_HEIGHT: int
+CAP_PROP_XI_REGION_SELECTOR: int
+CAP_PROP_XI_REGION_MODE: int
+CAP_PROP_XI_LIMIT_BANDWIDTH: int
+CAP_PROP_XI_SENSOR_DATA_BIT_DEPTH: int
+CAP_PROP_XI_OUTPUT_DATA_BIT_DEPTH: int
+CAP_PROP_XI_IMAGE_DATA_BIT_DEPTH: int
+CAP_PROP_XI_OUTPUT_DATA_PACKING: int
+CAP_PROP_XI_OUTPUT_DATA_PACKING_TYPE: int
+CAP_PROP_XI_IS_COOLED: int
+CAP_PROP_XI_COOLING: int
+CAP_PROP_XI_TARGET_TEMP: int
+CAP_PROP_XI_CHIP_TEMP: int
+CAP_PROP_XI_HOUS_TEMP: int
+CAP_PROP_XI_HOUS_BACK_SIDE_TEMP: int
+CAP_PROP_XI_SENSOR_BOARD_TEMP: int
+CAP_PROP_XI_CMS: int
+CAP_PROP_XI_APPLY_CMS: int
+CAP_PROP_XI_IMAGE_IS_COLOR: int
+CAP_PROP_XI_COLOR_FILTER_ARRAY: int
+CAP_PROP_XI_GAMMAY: int
+CAP_PROP_XI_GAMMAC: int
+CAP_PROP_XI_SHARPNESS: int
+CAP_PROP_XI_CC_MATRIX_00: int
+CAP_PROP_XI_CC_MATRIX_01: int
+CAP_PROP_XI_CC_MATRIX_02: int
+CAP_PROP_XI_CC_MATRIX_03: int
+CAP_PROP_XI_CC_MATRIX_10: int
+CAP_PROP_XI_CC_MATRIX_11: int
+CAP_PROP_XI_CC_MATRIX_12: int
+CAP_PROP_XI_CC_MATRIX_13: int
+CAP_PROP_XI_CC_MATRIX_20: int
+CAP_PROP_XI_CC_MATRIX_21: int
+CAP_PROP_XI_CC_MATRIX_22: int
+CAP_PROP_XI_CC_MATRIX_23: int
+CAP_PROP_XI_CC_MATRIX_30: int
+CAP_PROP_XI_CC_MATRIX_31: int
+CAP_PROP_XI_CC_MATRIX_32: int
+CAP_PROP_XI_CC_MATRIX_33: int
+CAP_PROP_XI_DEFAULT_CC_MATRIX: int
+CAP_PROP_XI_TRG_SELECTOR: int
+CAP_PROP_XI_ACQ_FRAME_BURST_COUNT: int
+CAP_PROP_XI_DEBOUNCE_EN: int
+CAP_PROP_XI_DEBOUNCE_T0: int
+CAP_PROP_XI_DEBOUNCE_T1: int
+CAP_PROP_XI_DEBOUNCE_POL: int
+CAP_PROP_XI_LENS_MODE: int
+CAP_PROP_XI_LENS_APERTURE_VALUE: int
+CAP_PROP_XI_LENS_FOCUS_MOVEMENT_VALUE: int
+CAP_PROP_XI_LENS_FOCUS_MOVE: int
+CAP_PROP_XI_LENS_FOCUS_DISTANCE: int
+CAP_PROP_XI_LENS_FOCAL_LENGTH: int
+CAP_PROP_XI_LENS_FEATURE_SELECTOR: int
+CAP_PROP_XI_LENS_FEATURE: int
+CAP_PROP_XI_DEVICE_MODEL_ID: int
+CAP_PROP_XI_DEVICE_SN: int
+CAP_PROP_XI_IMAGE_DATA_FORMAT_RGB32_ALPHA: int
+CAP_PROP_XI_IMAGE_PAYLOAD_SIZE: int
+CAP_PROP_XI_TRANSPORT_PIXEL_FORMAT: int
+CAP_PROP_XI_SENSOR_CLOCK_FREQ_HZ: int
+CAP_PROP_XI_SENSOR_CLOCK_FREQ_INDEX: int
+CAP_PROP_XI_SENSOR_OUTPUT_CHANNEL_COUNT: int
+CAP_PROP_XI_FRAMERATE: int
+CAP_PROP_XI_COUNTER_SELECTOR: int
+CAP_PROP_XI_COUNTER_VALUE: int
+CAP_PROP_XI_ACQ_TIMING_MODE: int
+CAP_PROP_XI_AVAILABLE_BANDWIDTH: int
+CAP_PROP_XI_BUFFER_POLICY: int
+CAP_PROP_XI_LUT_EN: int
+CAP_PROP_XI_LUT_INDEX: int
+CAP_PROP_XI_LUT_VALUE: int
+CAP_PROP_XI_TRG_DELAY: int
+CAP_PROP_XI_TS_RST_MODE: int
+CAP_PROP_XI_TS_RST_SOURCE: int
+CAP_PROP_XI_IS_DEVICE_EXIST: int
+CAP_PROP_XI_ACQ_BUFFER_SIZE: int
+CAP_PROP_XI_ACQ_BUFFER_SIZE_UNIT: int
+CAP_PROP_XI_ACQ_TRANSPORT_BUFFER_SIZE: int
+CAP_PROP_XI_BUFFERS_QUEUE_SIZE: int
+CAP_PROP_XI_ACQ_TRANSPORT_BUFFER_COMMIT: int
+CAP_PROP_XI_RECENT_FRAME: int
+CAP_PROP_XI_DEVICE_RESET: int
+CAP_PROP_XI_COLUMN_FPN_CORRECTION: int
+CAP_PROP_XI_ROW_FPN_CORRECTION: int
+CAP_PROP_XI_SENSOR_MODE: int
+CAP_PROP_XI_HDR: int
+CAP_PROP_XI_HDR_KNEEPOINT_COUNT: int
+CAP_PROP_XI_HDR_T1: int
+CAP_PROP_XI_HDR_T2: int
+CAP_PROP_XI_KNEEPOINT1: int
+CAP_PROP_XI_KNEEPOINT2: int
+CAP_PROP_XI_IMAGE_BLACK_LEVEL: int
+CAP_PROP_XI_HW_REVISION: int
+CAP_PROP_XI_DEBUG_LEVEL: int
+CAP_PROP_XI_AUTO_BANDWIDTH_CALCULATION: int
+CAP_PROP_XI_FFS_FILE_ID: int
+CAP_PROP_XI_FFS_FILE_SIZE: int
+CAP_PROP_XI_FREE_FFS_SIZE: int
+CAP_PROP_XI_USED_FFS_SIZE: int
+CAP_PROP_XI_FFS_ACCESS_KEY: int
+CAP_PROP_XI_SENSOR_FEATURE_SELECTOR: int
+CAP_PROP_XI_SENSOR_FEATURE_VALUE: int
+CAP_PROP_ARAVIS_AUTOTRIGGER: int
+CAP_PROP_IOS_DEVICE_FOCUS: int
+CAP_PROP_IOS_DEVICE_EXPOSURE: int
+CAP_PROP_IOS_DEVICE_FLASH: int
+CAP_PROP_IOS_DEVICE_WHITEBALANCE: int
+CAP_PROP_IOS_DEVICE_TORCH: int
+CAP_PROP_GIGA_FRAME_OFFSET_X: int
+CAP_PROP_GIGA_FRAME_OFFSET_Y: int
+CAP_PROP_GIGA_FRAME_WIDTH_MAX: int
+CAP_PROP_GIGA_FRAME_HEIGH_MAX: int
+CAP_PROP_GIGA_FRAME_SENS_WIDTH: int
+CAP_PROP_GIGA_FRAME_SENS_HEIGH: int
+CAP_PROP_INTELPERC_PROFILE_COUNT: int
+CAP_PROP_INTELPERC_PROFILE_IDX: int
+CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE: int
+CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE: int
+CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD: int
+CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ: int
+CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT: int
+CAP_INTELPERC_DEPTH_GENERATOR: int
+CAP_INTELPERC_IMAGE_GENERATOR: int
+CAP_INTELPERC_IR_GENERATOR: int
+CAP_INTELPERC_GENERATORS_MASK: int
+CAP_INTELPERC_DEPTH_MAP: int
+CAP_INTELPERC_UVDEPTH_MAP: int
+CAP_INTELPERC_IR_MAP: int
+CAP_INTELPERC_IMAGE: int
+CAP_PROP_GPHOTO2_PREVIEW: int
+CAP_PROP_GPHOTO2_WIDGET_ENUMERATE: int
+CAP_PROP_GPHOTO2_RELOAD_CONFIG: int
+CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE: int
+CAP_PROP_GPHOTO2_COLLECT_MSGS: int
+CAP_PROP_GPHOTO2_FLUSH_MSGS: int
+CAP_PROP_SPEED: int
+CAP_PROP_APERTURE: int
+CAP_PROP_EXPOSUREPROGRAM: int
+CAP_PROP_VIEWFINDER: int
+CAP_PROP_IMAGES_BASE: int
+CAP_PROP_IMAGES_LAST: int
+LMEDS: int
+RANSAC: int
+RHO: int
+USAC_DEFAULT: int
+USAC_PARALLEL: int
+USAC_FM_8PTS: int
+USAC_FAST: int
+USAC_ACCURATE: int
+USAC_PROSAC: int
+USAC_MAGSAC: int
+CALIB_CB_ADAPTIVE_THRESH: int
+CALIB_CB_NORMALIZE_IMAGE: int
+CALIB_CB_FILTER_QUADS: int
+CALIB_CB_FAST_CHECK: int
+CALIB_CB_EXHAUSTIVE: int
+CALIB_CB_ACCURACY: int
+CALIB_CB_LARGER: int
+CALIB_CB_MARKER: int
+CALIB_CB_PLAIN: int
+CALIB_CB_SYMMETRIC_GRID: int
+CALIB_CB_ASYMMETRIC_GRID: int
+CALIB_CB_CLUSTERING: int
+CALIB_NINTRINSIC: int
+CALIB_USE_INTRINSIC_GUESS: int
+CALIB_FIX_ASPECT_RATIO: int
+CALIB_FIX_PRINCIPAL_POINT: int
+CALIB_ZERO_TANGENT_DIST: int
+CALIB_FIX_FOCAL_LENGTH: int
+CALIB_FIX_K1: int
+CALIB_FIX_K2: int
+CALIB_FIX_K3: int
+CALIB_FIX_K4: int
+CALIB_FIX_K5: int
+CALIB_FIX_K6: int
+CALIB_RATIONAL_MODEL: int
+CALIB_THIN_PRISM_MODEL: int
+CALIB_FIX_S1_S2_S3_S4: int
+CALIB_TILTED_MODEL: int
+CALIB_FIX_TAUX_TAUY: int
+CALIB_USE_QR: int
+CALIB_FIX_TANGENT_DIST: int
+CALIB_FIX_INTRINSIC: int
+CALIB_SAME_FOCAL_LENGTH: int
+CALIB_ZERO_DISPARITY: int
+CALIB_USE_LU: int
+CALIB_USE_EXTRINSIC_GUESS: int
+FM_7POINT: int
+FM_8POINT: int
+FM_LMEDS: int
+FM_RANSAC: int
+CASCADE_DO_CANNY_PRUNING: int
+CASCADE_SCALE_IMAGE: int
+CASCADE_FIND_BIGGEST_OBJECT: int
+CASCADE_DO_ROUGH_SEARCH: int
+OPTFLOW_USE_INITIAL_FLOW: int
+OPTFLOW_LK_GET_MIN_EIGENVALS: int
+OPTFLOW_FARNEBACK_GAUSSIAN: int
+MOTION_TRANSLATION: int
+MOTION_EUCLIDEAN: int
+MOTION_AFFINE: int
+MOTION_HOMOGRAPHY: int
+
+DrawMatchesFlags_DEFAULT: int
+DRAW_MATCHES_FLAGS_DEFAULT: int
+DrawMatchesFlags_DRAW_OVER_OUTIMG: int
+DRAW_MATCHES_FLAGS_DRAW_OVER_OUTIMG: int
+DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS: int
+DRAW_MATCHES_FLAGS_NOT_DRAW_SINGLE_POINTS: int
+DrawMatchesFlags_DRAW_RICH_KEYPOINTS: int
+DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS: int
+DrawMatchesFlags = int
+"""One of [DrawMatchesFlags_DEFAULT, DRAW_MATCHES_FLAGS_DEFAULT, DrawMatchesFlags_DRAW_OVER_OUTIMG, DRAW_MATCHES_FLAGS_DRAW_OVER_OUTIMG, DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS, DRAW_MATCHES_FLAGS_NOT_DRAW_SINGLE_POINTS, DrawMatchesFlags_DRAW_RICH_KEYPOINTS, DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS]"""
+
+IMREAD_UNCHANGED: int
+IMREAD_GRAYSCALE: int
+IMREAD_COLOR: int
+IMREAD_ANYDEPTH: int
+IMREAD_ANYCOLOR: int
+IMREAD_LOAD_GDAL: int
+IMREAD_REDUCED_GRAYSCALE_2: int
+IMREAD_REDUCED_COLOR_2: int
+IMREAD_REDUCED_GRAYSCALE_4: int
+IMREAD_REDUCED_COLOR_4: int
+IMREAD_REDUCED_GRAYSCALE_8: int
+IMREAD_REDUCED_COLOR_8: int
+IMREAD_IGNORE_ORIENTATION: int
+ImreadModes = int
+"""One of [IMREAD_UNCHANGED, IMREAD_GRAYSCALE, IMREAD_COLOR, IMREAD_ANYDEPTH, IMREAD_ANYCOLOR, IMREAD_LOAD_GDAL, IMREAD_REDUCED_GRAYSCALE_2, IMREAD_REDUCED_COLOR_2, IMREAD_REDUCED_GRAYSCALE_4, IMREAD_REDUCED_COLOR_4, IMREAD_REDUCED_GRAYSCALE_8, IMREAD_REDUCED_COLOR_8, IMREAD_IGNORE_ORIENTATION]"""
+
+IMWRITE_JPEG_QUALITY: int
+IMWRITE_JPEG_PROGRESSIVE: int
+IMWRITE_JPEG_OPTIMIZE: int
+IMWRITE_JPEG_RST_INTERVAL: int
+IMWRITE_JPEG_LUMA_QUALITY: int
+IMWRITE_JPEG_CHROMA_QUALITY: int
+IMWRITE_JPEG_SAMPLING_FACTOR: int
+IMWRITE_PNG_COMPRESSION: int
+IMWRITE_PNG_STRATEGY: int
+IMWRITE_PNG_BILEVEL: int
+IMWRITE_PXM_BINARY: int
+IMWRITE_EXR_TYPE: int
+IMWRITE_EXR_COMPRESSION: int
+IMWRITE_EXR_DWA_COMPRESSION_LEVEL: int
+IMWRITE_WEBP_QUALITY: int
+IMWRITE_HDR_COMPRESSION: int
+IMWRITE_PAM_TUPLETYPE: int
+IMWRITE_TIFF_RESUNIT: int
+IMWRITE_TIFF_XDPI: int
+IMWRITE_TIFF_YDPI: int
+IMWRITE_TIFF_COMPRESSION: int
+IMWRITE_TIFF_ROWSPERSTRIP: int
+IMWRITE_TIFF_PREDICTOR: int
+IMWRITE_JPEG2000_COMPRESSION_X1000: int
+IMWRITE_AVIF_QUALITY: int
+IMWRITE_AVIF_DEPTH: int
+IMWRITE_AVIF_SPEED: int
+ImwriteFlags = int
+"""One of [IMWRITE_JPEG_QUALITY, IMWRITE_JPEG_PROGRESSIVE, IMWRITE_JPEG_OPTIMIZE, IMWRITE_JPEG_RST_INTERVAL, IMWRITE_JPEG_LUMA_QUALITY, IMWRITE_JPEG_CHROMA_QUALITY, IMWRITE_JPEG_SAMPLING_FACTOR, IMWRITE_PNG_COMPRESSION, IMWRITE_PNG_STRATEGY, IMWRITE_PNG_BILEVEL, IMWRITE_PXM_BINARY, IMWRITE_EXR_TYPE, IMWRITE_EXR_COMPRESSION, IMWRITE_EXR_DWA_COMPRESSION_LEVEL, IMWRITE_WEBP_QUALITY, IMWRITE_HDR_COMPRESSION, IMWRITE_PAM_TUPLETYPE, IMWRITE_TIFF_RESUNIT, IMWRITE_TIFF_XDPI, IMWRITE_TIFF_YDPI, IMWRITE_TIFF_COMPRESSION, IMWRITE_TIFF_ROWSPERSTRIP, IMWRITE_TIFF_PREDICTOR, IMWRITE_JPEG2000_COMPRESSION_X1000, IMWRITE_AVIF_QUALITY, IMWRITE_AVIF_DEPTH, IMWRITE_AVIF_SPEED]"""
+
+IMWRITE_JPEG_SAMPLING_FACTOR_411: int
+IMWRITE_JPEG_SAMPLING_FACTOR_420: int
+IMWRITE_JPEG_SAMPLING_FACTOR_422: int
+IMWRITE_JPEG_SAMPLING_FACTOR_440: int
+IMWRITE_JPEG_SAMPLING_FACTOR_444: int
+ImwriteJPEGSamplingFactorParams = int
+"""One of [IMWRITE_JPEG_SAMPLING_FACTOR_411, IMWRITE_JPEG_SAMPLING_FACTOR_420, IMWRITE_JPEG_SAMPLING_FACTOR_422, IMWRITE_JPEG_SAMPLING_FACTOR_440, IMWRITE_JPEG_SAMPLING_FACTOR_444]"""
+
+IMWRITE_TIFF_COMPRESSION_NONE: int
+IMWRITE_TIFF_COMPRESSION_CCITTRLE: int
+IMWRITE_TIFF_COMPRESSION_CCITTFAX3: int
+IMWRITE_TIFF_COMPRESSION_CCITT_T4: int
+IMWRITE_TIFF_COMPRESSION_CCITTFAX4: int
+IMWRITE_TIFF_COMPRESSION_CCITT_T6: int
+IMWRITE_TIFF_COMPRESSION_LZW: int
+IMWRITE_TIFF_COMPRESSION_OJPEG: int
+IMWRITE_TIFF_COMPRESSION_JPEG: int
+IMWRITE_TIFF_COMPRESSION_T85: int
+IMWRITE_TIFF_COMPRESSION_T43: int
+IMWRITE_TIFF_COMPRESSION_NEXT: int
+IMWRITE_TIFF_COMPRESSION_CCITTRLEW: int
+IMWRITE_TIFF_COMPRESSION_PACKBITS: int
+IMWRITE_TIFF_COMPRESSION_THUNDERSCAN: int
+IMWRITE_TIFF_COMPRESSION_IT8CTPAD: int
+IMWRITE_TIFF_COMPRESSION_IT8LW: int
+IMWRITE_TIFF_COMPRESSION_IT8MP: int
+IMWRITE_TIFF_COMPRESSION_IT8BL: int
+IMWRITE_TIFF_COMPRESSION_PIXARFILM: int
+IMWRITE_TIFF_COMPRESSION_PIXARLOG: int
+IMWRITE_TIFF_COMPRESSION_DEFLATE: int
+IMWRITE_TIFF_COMPRESSION_ADOBE_DEFLATE: int
+IMWRITE_TIFF_COMPRESSION_DCS: int
+IMWRITE_TIFF_COMPRESSION_JBIG: int
+IMWRITE_TIFF_COMPRESSION_SGILOG: int
+IMWRITE_TIFF_COMPRESSION_SGILOG24: int
+IMWRITE_TIFF_COMPRESSION_JP2000: int
+IMWRITE_TIFF_COMPRESSION_LERC: int
+IMWRITE_TIFF_COMPRESSION_LZMA: int
+IMWRITE_TIFF_COMPRESSION_ZSTD: int
+IMWRITE_TIFF_COMPRESSION_WEBP: int
+IMWRITE_TIFF_COMPRESSION_JXL: int
+ImwriteTiffCompressionFlags = int
+"""One of [IMWRITE_TIFF_COMPRESSION_NONE, IMWRITE_TIFF_COMPRESSION_CCITTRLE, IMWRITE_TIFF_COMPRESSION_CCITTFAX3, IMWRITE_TIFF_COMPRESSION_CCITT_T4, IMWRITE_TIFF_COMPRESSION_CCITTFAX4, IMWRITE_TIFF_COMPRESSION_CCITT_T6, IMWRITE_TIFF_COMPRESSION_LZW, IMWRITE_TIFF_COMPRESSION_OJPEG, IMWRITE_TIFF_COMPRESSION_JPEG, IMWRITE_TIFF_COMPRESSION_T85, IMWRITE_TIFF_COMPRESSION_T43, IMWRITE_TIFF_COMPRESSION_NEXT, IMWRITE_TIFF_COMPRESSION_CCITTRLEW, IMWRITE_TIFF_COMPRESSION_PACKBITS, IMWRITE_TIFF_COMPRESSION_THUNDERSCAN, IMWRITE_TIFF_COMPRESSION_IT8CTPAD, IMWRITE_TIFF_COMPRESSION_IT8LW, IMWRITE_TIFF_COMPRESSION_IT8MP, IMWRITE_TIFF_COMPRESSION_IT8BL, IMWRITE_TIFF_COMPRESSION_PIXARFILM, IMWRITE_TIFF_COMPRESSION_PIXARLOG, IMWRITE_TIFF_COMPRESSION_DEFLATE, IMWRITE_TIFF_COMPRESSION_ADOBE_DEFLATE, IMWRITE_TIFF_COMPRESSION_DCS, IMWRITE_TIFF_COMPRESSION_JBIG, IMWRITE_TIFF_COMPRESSION_SGILOG, IMWRITE_TIFF_COMPRESSION_SGILOG24, IMWRITE_TIFF_COMPRESSION_JP2000, IMWRITE_TIFF_COMPRESSION_LERC, IMWRITE_TIFF_COMPRESSION_LZMA, IMWRITE_TIFF_COMPRESSION_ZSTD, IMWRITE_TIFF_COMPRESSION_WEBP, IMWRITE_TIFF_COMPRESSION_JXL]"""
+
+IMWRITE_TIFF_PREDICTOR_NONE: int
+IMWRITE_TIFF_PREDICTOR_HORIZONTAL: int
+IMWRITE_TIFF_PREDICTOR_FLOATINGPOINT: int
+ImwriteTiffPredictorFlags = int
+"""One of [IMWRITE_TIFF_PREDICTOR_NONE, IMWRITE_TIFF_PREDICTOR_HORIZONTAL, IMWRITE_TIFF_PREDICTOR_FLOATINGPOINT]"""
+
+IMWRITE_EXR_TYPE_HALF: int
+IMWRITE_EXR_TYPE_FLOAT: int
+ImwriteEXRTypeFlags = int
+"""One of [IMWRITE_EXR_TYPE_HALF, IMWRITE_EXR_TYPE_FLOAT]"""
+
+IMWRITE_EXR_COMPRESSION_NO: int
+IMWRITE_EXR_COMPRESSION_RLE: int
+IMWRITE_EXR_COMPRESSION_ZIPS: int
+IMWRITE_EXR_COMPRESSION_ZIP: int
+IMWRITE_EXR_COMPRESSION_PIZ: int
+IMWRITE_EXR_COMPRESSION_PXR24: int
+IMWRITE_EXR_COMPRESSION_B44: int
+IMWRITE_EXR_COMPRESSION_B44A: int
+IMWRITE_EXR_COMPRESSION_DWAA: int
+IMWRITE_EXR_COMPRESSION_DWAB: int
+ImwriteEXRCompressionFlags = int
+"""One of [IMWRITE_EXR_COMPRESSION_NO, IMWRITE_EXR_COMPRESSION_RLE, IMWRITE_EXR_COMPRESSION_ZIPS, IMWRITE_EXR_COMPRESSION_ZIP, IMWRITE_EXR_COMPRESSION_PIZ, IMWRITE_EXR_COMPRESSION_PXR24, IMWRITE_EXR_COMPRESSION_B44, IMWRITE_EXR_COMPRESSION_B44A, IMWRITE_EXR_COMPRESSION_DWAA, IMWRITE_EXR_COMPRESSION_DWAB]"""
+
+IMWRITE_PNG_STRATEGY_DEFAULT: int
+IMWRITE_PNG_STRATEGY_FILTERED: int
+IMWRITE_PNG_STRATEGY_HUFFMAN_ONLY: int
+IMWRITE_PNG_STRATEGY_RLE: int
+IMWRITE_PNG_STRATEGY_FIXED: int
+ImwritePNGFlags = int
+"""One of [IMWRITE_PNG_STRATEGY_DEFAULT, IMWRITE_PNG_STRATEGY_FILTERED, IMWRITE_PNG_STRATEGY_HUFFMAN_ONLY, IMWRITE_PNG_STRATEGY_RLE, IMWRITE_PNG_STRATEGY_FIXED]"""
+
+IMWRITE_PAM_FORMAT_NULL: int
+IMWRITE_PAM_FORMAT_BLACKANDWHITE: int
+IMWRITE_PAM_FORMAT_GRAYSCALE: int
+IMWRITE_PAM_FORMAT_GRAYSCALE_ALPHA: int
+IMWRITE_PAM_FORMAT_RGB: int
+IMWRITE_PAM_FORMAT_RGB_ALPHA: int
+ImwritePAMFlags = int
+"""One of [IMWRITE_PAM_FORMAT_NULL, IMWRITE_PAM_FORMAT_BLACKANDWHITE, IMWRITE_PAM_FORMAT_GRAYSCALE, IMWRITE_PAM_FORMAT_GRAYSCALE_ALPHA, IMWRITE_PAM_FORMAT_RGB, IMWRITE_PAM_FORMAT_RGB_ALPHA]"""
+
+IMWRITE_HDR_COMPRESSION_NONE: int
+IMWRITE_HDR_COMPRESSION_RLE: int
+ImwriteHDRCompressionFlags = int
+"""One of [IMWRITE_HDR_COMPRESSION_NONE, IMWRITE_HDR_COMPRESSION_RLE]"""
+
+CAP_ANY: int
+CAP_VFW: int
+CAP_V4L: int
+CAP_V4L2: int
+CAP_FIREWIRE: int
+CAP_FIREWARE: int
+CAP_IEEE1394: int
+CAP_DC1394: int
+CAP_CMU1394: int
+CAP_QT: int
+CAP_UNICAP: int
+CAP_DSHOW: int
+CAP_PVAPI: int
+CAP_OPENNI: int
+CAP_OPENNI_ASUS: int
+CAP_ANDROID: int
+CAP_XIAPI: int
+CAP_AVFOUNDATION: int
+CAP_GIGANETIX: int
+CAP_MSMF: int
+CAP_WINRT: int
+CAP_INTELPERC: int
+CAP_REALSENSE: int
+CAP_OPENNI2: int
+CAP_OPENNI2_ASUS: int
+CAP_OPENNI2_ASTRA: int
+CAP_GPHOTO2: int
+CAP_GSTREAMER: int
+CAP_FFMPEG: int
+CAP_IMAGES: int
+CAP_ARAVIS: int
+CAP_OPENCV_MJPEG: int
+CAP_INTEL_MFX: int
+CAP_XINE: int
+CAP_UEYE: int
+CAP_OBSENSOR: int
+VideoCaptureAPIs = int
+"""One of [CAP_ANY, CAP_VFW, CAP_V4L, CAP_V4L2, CAP_FIREWIRE, CAP_FIREWARE, CAP_IEEE1394, CAP_DC1394, CAP_CMU1394, CAP_QT, CAP_UNICAP, CAP_DSHOW, CAP_PVAPI, CAP_OPENNI, CAP_OPENNI_ASUS, CAP_ANDROID, CAP_XIAPI, CAP_AVFOUNDATION, CAP_GIGANETIX, CAP_MSMF, CAP_WINRT, CAP_INTELPERC, CAP_REALSENSE, CAP_OPENNI2, CAP_OPENNI2_ASUS, CAP_OPENNI2_ASTRA, CAP_GPHOTO2, CAP_GSTREAMER, CAP_FFMPEG, CAP_IMAGES, CAP_ARAVIS, CAP_OPENCV_MJPEG, CAP_INTEL_MFX, CAP_XINE, CAP_UEYE, CAP_OBSENSOR]"""
+
+CAP_PROP_POS_MSEC: int
+CAP_PROP_POS_FRAMES: int
+CAP_PROP_POS_AVI_RATIO: int
+CAP_PROP_FRAME_WIDTH: int
+CAP_PROP_FRAME_HEIGHT: int
+CAP_PROP_FPS: int
+CAP_PROP_FOURCC: int
+CAP_PROP_FRAME_COUNT: int
+CAP_PROP_FORMAT: int
+CAP_PROP_MODE: int
+CAP_PROP_BRIGHTNESS: int
+CAP_PROP_CONTRAST: int
+CAP_PROP_SATURATION: int
+CAP_PROP_HUE: int
+CAP_PROP_GAIN: int
+CAP_PROP_EXPOSURE: int
+CAP_PROP_CONVERT_RGB: int
+CAP_PROP_WHITE_BALANCE_BLUE_U: int
+CAP_PROP_RECTIFICATION: int
+CAP_PROP_MONOCHROME: int
+CAP_PROP_SHARPNESS: int
+CAP_PROP_AUTO_EXPOSURE: int
+CAP_PROP_GAMMA: int
+CAP_PROP_TEMPERATURE: int
+CAP_PROP_TRIGGER: int
+CAP_PROP_TRIGGER_DELAY: int
+CAP_PROP_WHITE_BALANCE_RED_V: int
+CAP_PROP_ZOOM: int
+CAP_PROP_FOCUS: int
+CAP_PROP_GUID: int
+CAP_PROP_ISO_SPEED: int
+CAP_PROP_BACKLIGHT: int
+CAP_PROP_PAN: int
+CAP_PROP_TILT: int
+CAP_PROP_ROLL: int
+CAP_PROP_IRIS: int
+CAP_PROP_SETTINGS: int
+CAP_PROP_BUFFERSIZE: int
+CAP_PROP_AUTOFOCUS: int
+CAP_PROP_SAR_NUM: int
+CAP_PROP_SAR_DEN: int
+CAP_PROP_BACKEND: int
+CAP_PROP_CHANNEL: int
+CAP_PROP_AUTO_WB: int
+CAP_PROP_WB_TEMPERATURE: int
+CAP_PROP_CODEC_PIXEL_FORMAT: int
+CAP_PROP_BITRATE: int
+CAP_PROP_ORIENTATION_META: int
+CAP_PROP_ORIENTATION_AUTO: int
+CAP_PROP_HW_ACCELERATION: int
+CAP_PROP_HW_DEVICE: int
+CAP_PROP_HW_ACCELERATION_USE_OPENCL: int
+CAP_PROP_OPEN_TIMEOUT_MSEC: int
+CAP_PROP_READ_TIMEOUT_MSEC: int
+CAP_PROP_STREAM_OPEN_TIME_USEC: int
+CAP_PROP_VIDEO_TOTAL_CHANNELS: int
+CAP_PROP_VIDEO_STREAM: int
+CAP_PROP_AUDIO_STREAM: int
+CAP_PROP_AUDIO_POS: int
+CAP_PROP_AUDIO_SHIFT_NSEC: int
+CAP_PROP_AUDIO_DATA_DEPTH: int
+CAP_PROP_AUDIO_SAMPLES_PER_SECOND: int
+CAP_PROP_AUDIO_BASE_INDEX: int
+CAP_PROP_AUDIO_TOTAL_CHANNELS: int
+CAP_PROP_AUDIO_TOTAL_STREAMS: int
+CAP_PROP_AUDIO_SYNCHRONIZE: int
+CAP_PROP_LRF_HAS_KEY_FRAME: int
+CAP_PROP_CODEC_EXTRADATA_INDEX: int
+CAP_PROP_FRAME_TYPE: int
+CAP_PROP_N_THREADS: int
+VideoCaptureProperties = int
+"""One of [CAP_PROP_POS_MSEC, CAP_PROP_POS_FRAMES, CAP_PROP_POS_AVI_RATIO, CAP_PROP_FRAME_WIDTH, CAP_PROP_FRAME_HEIGHT, CAP_PROP_FPS, CAP_PROP_FOURCC, CAP_PROP_FRAME_COUNT, CAP_PROP_FORMAT, CAP_PROP_MODE, CAP_PROP_BRIGHTNESS, CAP_PROP_CONTRAST, CAP_PROP_SATURATION, CAP_PROP_HUE, CAP_PROP_GAIN, CAP_PROP_EXPOSURE, CAP_PROP_CONVERT_RGB, CAP_PROP_WHITE_BALANCE_BLUE_U, CAP_PROP_RECTIFICATION, CAP_PROP_MONOCHROME, CAP_PROP_SHARPNESS, CAP_PROP_AUTO_EXPOSURE, CAP_PROP_GAMMA, CAP_PROP_TEMPERATURE, CAP_PROP_TRIGGER, CAP_PROP_TRIGGER_DELAY, CAP_PROP_WHITE_BALANCE_RED_V, CAP_PROP_ZOOM, CAP_PROP_FOCUS, CAP_PROP_GUID, CAP_PROP_ISO_SPEED, CAP_PROP_BACKLIGHT, CAP_PROP_PAN, CAP_PROP_TILT, CAP_PROP_ROLL, CAP_PROP_IRIS, CAP_PROP_SETTINGS, CAP_PROP_BUFFERSIZE, CAP_PROP_AUTOFOCUS, CAP_PROP_SAR_NUM, CAP_PROP_SAR_DEN, CAP_PROP_BACKEND, CAP_PROP_CHANNEL, CAP_PROP_AUTO_WB, CAP_PROP_WB_TEMPERATURE, CAP_PROP_CODEC_PIXEL_FORMAT, CAP_PROP_BITRATE, CAP_PROP_ORIENTATION_META, CAP_PROP_ORIENTATION_AUTO, CAP_PROP_HW_ACCELERATION, CAP_PROP_HW_DEVICE, CAP_PROP_HW_ACCELERATION_USE_OPENCL, CAP_PROP_OPEN_TIMEOUT_MSEC, CAP_PROP_READ_TIMEOUT_MSEC, CAP_PROP_STREAM_OPEN_TIME_USEC, CAP_PROP_VIDEO_TOTAL_CHANNELS, CAP_PROP_VIDEO_STREAM, CAP_PROP_AUDIO_STREAM, CAP_PROP_AUDIO_POS, CAP_PROP_AUDIO_SHIFT_NSEC, CAP_PROP_AUDIO_DATA_DEPTH, CAP_PROP_AUDIO_SAMPLES_PER_SECOND, CAP_PROP_AUDIO_BASE_INDEX, CAP_PROP_AUDIO_TOTAL_CHANNELS, CAP_PROP_AUDIO_TOTAL_STREAMS, CAP_PROP_AUDIO_SYNCHRONIZE, CAP_PROP_LRF_HAS_KEY_FRAME, CAP_PROP_CODEC_EXTRADATA_INDEX, CAP_PROP_FRAME_TYPE, CAP_PROP_N_THREADS]"""
+
+VIDEOWRITER_PROP_QUALITY: int
+VIDEOWRITER_PROP_FRAMEBYTES: int
+VIDEOWRITER_PROP_NSTRIPES: int
+VIDEOWRITER_PROP_IS_COLOR: int
+VIDEOWRITER_PROP_DEPTH: int
+VIDEOWRITER_PROP_HW_ACCELERATION: int
+VIDEOWRITER_PROP_HW_DEVICE: int
+VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL: int
+VIDEOWRITER_PROP_RAW_VIDEO: int
+VIDEOWRITER_PROP_KEY_INTERVAL: int
+VIDEOWRITER_PROP_KEY_FLAG: int
+VideoWriterProperties = int
+"""One of [VIDEOWRITER_PROP_QUALITY, VIDEOWRITER_PROP_FRAMEBYTES, VIDEOWRITER_PROP_NSTRIPES, VIDEOWRITER_PROP_IS_COLOR, VIDEOWRITER_PROP_DEPTH, VIDEOWRITER_PROP_HW_ACCELERATION, VIDEOWRITER_PROP_HW_DEVICE, VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, VIDEOWRITER_PROP_RAW_VIDEO, VIDEOWRITER_PROP_KEY_INTERVAL, VIDEOWRITER_PROP_KEY_FLAG]"""
+
+VIDEO_ACCELERATION_NONE: int
+VIDEO_ACCELERATION_ANY: int
+VIDEO_ACCELERATION_D3D11: int
+VIDEO_ACCELERATION_VAAPI: int
+VIDEO_ACCELERATION_MFX: int
+VideoAccelerationType = int
+"""One of [VIDEO_ACCELERATION_NONE, VIDEO_ACCELERATION_ANY, VIDEO_ACCELERATION_D3D11, VIDEO_ACCELERATION_VAAPI, VIDEO_ACCELERATION_MFX]"""
+
+CAP_OBSENSOR_DEPTH_MAP: int
+CAP_OBSENSOR_BGR_IMAGE: int
+CAP_OBSENSOR_IR_IMAGE: int
+VideoCaptureOBSensorDataType = int
+"""One of [CAP_OBSENSOR_DEPTH_MAP, CAP_OBSENSOR_BGR_IMAGE, CAP_OBSENSOR_IR_IMAGE]"""
+
+CAP_OBSENSOR_DEPTH_GENERATOR: int
+CAP_OBSENSOR_IMAGE_GENERATOR: int
+CAP_OBSENSOR_IR_GENERATOR: int
+CAP_OBSENSOR_GENERATORS_MASK: int
+VideoCaptureOBSensorGenerators = int
+"""One of [CAP_OBSENSOR_DEPTH_GENERATOR, CAP_OBSENSOR_IMAGE_GENERATOR, CAP_OBSENSOR_IR_GENERATOR, CAP_OBSENSOR_GENERATORS_MASK]"""
+
+CAP_PROP_OBSENSOR_INTRINSIC_FX: int
+CAP_PROP_OBSENSOR_INTRINSIC_FY: int
+CAP_PROP_OBSENSOR_INTRINSIC_CX: int
+CAP_PROP_OBSENSOR_INTRINSIC_CY: int
+VideoCaptureOBSensorProperties = int
+"""One of [CAP_PROP_OBSENSOR_INTRINSIC_FX, CAP_PROP_OBSENSOR_INTRINSIC_FY, CAP_PROP_OBSENSOR_INTRINSIC_CX, CAP_PROP_OBSENSOR_INTRINSIC_CY]"""
+
+SOLVEPNP_ITERATIVE: int
+SOLVEPNP_EPNP: int
+SOLVEPNP_P3P: int
+SOLVEPNP_DLS: int
+SOLVEPNP_UPNP: int
+SOLVEPNP_AP3P: int
+SOLVEPNP_IPPE: int
+SOLVEPNP_IPPE_SQUARE: int
+SOLVEPNP_SQPNP: int
+SOLVEPNP_MAX_COUNT: int
+SolvePnPMethod = int
+"""One of [SOLVEPNP_ITERATIVE, SOLVEPNP_EPNP, SOLVEPNP_P3P, SOLVEPNP_DLS, SOLVEPNP_UPNP, SOLVEPNP_AP3P, SOLVEPNP_IPPE, SOLVEPNP_IPPE_SQUARE, SOLVEPNP_SQPNP, SOLVEPNP_MAX_COUNT]"""
+
+CALIB_HAND_EYE_TSAI: int
+CALIB_HAND_EYE_PARK: int
+CALIB_HAND_EYE_HORAUD: int
+CALIB_HAND_EYE_ANDREFF: int
+CALIB_HAND_EYE_DANIILIDIS: int
+HandEyeCalibrationMethod = int
+"""One of [CALIB_HAND_EYE_TSAI, CALIB_HAND_EYE_PARK, CALIB_HAND_EYE_HORAUD, CALIB_HAND_EYE_ANDREFF, CALIB_HAND_EYE_DANIILIDIS]"""
+
+CALIB_ROBOT_WORLD_HAND_EYE_SHAH: int
+CALIB_ROBOT_WORLD_HAND_EYE_LI: int
+RobotWorldHandEyeCalibrationMethod = int
+"""One of [CALIB_ROBOT_WORLD_HAND_EYE_SHAH, CALIB_ROBOT_WORLD_HAND_EYE_LI]"""
+
+SAMPLING_UNIFORM: int
+SAMPLING_PROGRESSIVE_NAPSAC: int
+SAMPLING_NAPSAC: int
+SAMPLING_PROSAC: int
+SamplingMethod = int
+"""One of [SAMPLING_UNIFORM, SAMPLING_PROGRESSIVE_NAPSAC, SAMPLING_NAPSAC, SAMPLING_PROSAC]"""
+
+LOCAL_OPTIM_NULL: int
+LOCAL_OPTIM_INNER_LO: int
+LOCAL_OPTIM_INNER_AND_ITER_LO: int
+LOCAL_OPTIM_GC: int
+LOCAL_OPTIM_SIGMA: int
+LocalOptimMethod = int
+"""One of [LOCAL_OPTIM_NULL, LOCAL_OPTIM_INNER_LO, LOCAL_OPTIM_INNER_AND_ITER_LO, LOCAL_OPTIM_GC, LOCAL_OPTIM_SIGMA]"""
+
+SCORE_METHOD_RANSAC: int
+SCORE_METHOD_MSAC: int
+SCORE_METHOD_MAGSAC: int
+SCORE_METHOD_LMEDS: int
+ScoreMethod = int
+"""One of [SCORE_METHOD_RANSAC, SCORE_METHOD_MSAC, SCORE_METHOD_MAGSAC, SCORE_METHOD_LMEDS]"""
+
+NEIGH_FLANN_KNN: int
+NEIGH_GRID: int
+NEIGH_FLANN_RADIUS: int
+NeighborSearchMethod = int
+"""One of [NEIGH_FLANN_KNN, NEIGH_GRID, NEIGH_FLANN_RADIUS]"""
+
+NONE_POLISHER: int
+LSQ_POLISHER: int
+MAGSAC: int
+COV_POLISHER: int
+PolishingMethod = int
+"""One of [NONE_POLISHER, LSQ_POLISHER, MAGSAC, COV_POLISHER]"""
+
+PROJ_SPHERICAL_ORTHO: int
+PROJ_SPHERICAL_EQRECT: int
+UndistortTypes = int
+"""One of [PROJ_SPHERICAL_ORTHO, PROJ_SPHERICAL_EQRECT]"""
+
+WINDOW_NORMAL: int
+WINDOW_AUTOSIZE: int
+WINDOW_OPENGL: int
+WINDOW_FULLSCREEN: int
+WINDOW_FREERATIO: int
+WINDOW_KEEPRATIO: int
+WINDOW_GUI_EXPANDED: int
+WINDOW_GUI_NORMAL: int
+WindowFlags = int
+"""One of [WINDOW_NORMAL, WINDOW_AUTOSIZE, WINDOW_OPENGL, WINDOW_FULLSCREEN, WINDOW_FREERATIO, WINDOW_KEEPRATIO, WINDOW_GUI_EXPANDED, WINDOW_GUI_NORMAL]"""
+
+WND_PROP_FULLSCREEN: int
+WND_PROP_AUTOSIZE: int
+WND_PROP_ASPECT_RATIO: int
+WND_PROP_OPENGL: int
+WND_PROP_VISIBLE: int
+WND_PROP_TOPMOST: int
+WND_PROP_VSYNC: int
+WindowPropertyFlags = int
+"""One of [WND_PROP_FULLSCREEN, WND_PROP_AUTOSIZE, WND_PROP_ASPECT_RATIO, WND_PROP_OPENGL, WND_PROP_VISIBLE, WND_PROP_TOPMOST, WND_PROP_VSYNC]"""
+
+EVENT_MOUSEMOVE: int
+EVENT_LBUTTONDOWN: int
+EVENT_RBUTTONDOWN: int
+EVENT_MBUTTONDOWN: int
+EVENT_LBUTTONUP: int
+EVENT_RBUTTONUP: int
+EVENT_MBUTTONUP: int
+EVENT_LBUTTONDBLCLK: int
+EVENT_RBUTTONDBLCLK: int
+EVENT_MBUTTONDBLCLK: int
+EVENT_MOUSEWHEEL: int
+EVENT_MOUSEHWHEEL: int
+MouseEventTypes = int
+"""One of [EVENT_MOUSEMOVE, EVENT_LBUTTONDOWN, EVENT_RBUTTONDOWN, EVENT_MBUTTONDOWN, EVENT_LBUTTONUP, EVENT_RBUTTONUP, EVENT_MBUTTONUP, EVENT_LBUTTONDBLCLK, EVENT_RBUTTONDBLCLK, EVENT_MBUTTONDBLCLK, EVENT_MOUSEWHEEL, EVENT_MOUSEHWHEEL]"""
+
+EVENT_FLAG_LBUTTON: int
+EVENT_FLAG_RBUTTON: int
+EVENT_FLAG_MBUTTON: int
+EVENT_FLAG_CTRLKEY: int
+EVENT_FLAG_SHIFTKEY: int
+EVENT_FLAG_ALTKEY: int
+MouseEventFlags = int
+"""One of [EVENT_FLAG_LBUTTON, EVENT_FLAG_RBUTTON, EVENT_FLAG_MBUTTON, EVENT_FLAG_CTRLKEY, EVENT_FLAG_SHIFTKEY, EVENT_FLAG_ALTKEY]"""
+
+QT_FONT_LIGHT: int
+QT_FONT_NORMAL: int
+QT_FONT_DEMIBOLD: int
+QT_FONT_BOLD: int
+QT_FONT_BLACK: int
+QtFontWeights = int
+"""One of [QT_FONT_LIGHT, QT_FONT_NORMAL, QT_FONT_DEMIBOLD, QT_FONT_BOLD, QT_FONT_BLACK]"""
+
+QT_STYLE_NORMAL: int
+QT_STYLE_ITALIC: int
+QT_STYLE_OBLIQUE: int
+QtFontStyles = int
+"""One of [QT_STYLE_NORMAL, QT_STYLE_ITALIC, QT_STYLE_OBLIQUE]"""
+
+QT_PUSH_BUTTON: int
+QT_CHECKBOX: int
+QT_RADIOBOX: int
+QT_NEW_BUTTONBAR: int
+QtButtonTypes = int
+"""One of [QT_PUSH_BUTTON, QT_CHECKBOX, QT_RADIOBOX, QT_NEW_BUTTONBAR]"""
+
+GShape_GMAT: int
+GSHAPE_GMAT: int
+GShape_GSCALAR: int
+GSHAPE_GSCALAR: int
+GShape_GARRAY: int
+GSHAPE_GARRAY: int
+GShape_GOPAQUE: int
+GSHAPE_GOPAQUE: int
+GShape_GFRAME: int
+GSHAPE_GFRAME: int
+GShape = int
+"""One of [GShape_GMAT, GSHAPE_GMAT, GShape_GSCALAR, GSHAPE_GSCALAR, GShape_GARRAY, GSHAPE_GARRAY, GShape_GOPAQUE, GSHAPE_GOPAQUE, GShape_GFRAME, GSHAPE_GFRAME]"""
+
+MediaFormat_BGR: int
+MEDIA_FORMAT_BGR: int
+MediaFormat_NV12: int
+MEDIA_FORMAT_NV12: int
+MediaFormat_GRAY: int
+MEDIA_FORMAT_GRAY: int
+MediaFormat = int
+"""One of [MediaFormat_BGR, MEDIA_FORMAT_BGR, MediaFormat_NV12, MEDIA_FORMAT_NV12, MediaFormat_GRAY, MEDIA_FORMAT_GRAY]"""
+
+
+FileStorage_READ: int
+FILE_STORAGE_READ: int
+FileStorage_WRITE: int
+FILE_STORAGE_WRITE: int
+FileStorage_APPEND: int
+FILE_STORAGE_APPEND: int
+FileStorage_MEMORY: int
+FILE_STORAGE_MEMORY: int
+FileStorage_FORMAT_MASK: int
+FILE_STORAGE_FORMAT_MASK: int
+FileStorage_FORMAT_AUTO: int
+FILE_STORAGE_FORMAT_AUTO: int
+FileStorage_FORMAT_XML: int
+FILE_STORAGE_FORMAT_XML: int
+FileStorage_FORMAT_YAML: int
+FILE_STORAGE_FORMAT_YAML: int
+FileStorage_FORMAT_JSON: int
+FILE_STORAGE_FORMAT_JSON: int
+FileStorage_BASE64: int
+FILE_STORAGE_BASE64: int
+FileStorage_WRITE_BASE64: int
+FILE_STORAGE_WRITE_BASE64: int
+FileStorage_Mode = int
+"""One of [FileStorage_READ, FILE_STORAGE_READ, FileStorage_WRITE, FILE_STORAGE_WRITE, FileStorage_APPEND, FILE_STORAGE_APPEND, FileStorage_MEMORY, FILE_STORAGE_MEMORY, FileStorage_FORMAT_MASK, FILE_STORAGE_FORMAT_MASK, FileStorage_FORMAT_AUTO, FILE_STORAGE_FORMAT_AUTO, FileStorage_FORMAT_XML, FILE_STORAGE_FORMAT_XML, FileStorage_FORMAT_YAML, FILE_STORAGE_FORMAT_YAML, FileStorage_FORMAT_JSON, FILE_STORAGE_FORMAT_JSON, FileStorage_BASE64, FILE_STORAGE_BASE64, FileStorage_WRITE_BASE64, FILE_STORAGE_WRITE_BASE64]"""
+
+FileStorage_UNDEFINED: int
+FILE_STORAGE_UNDEFINED: int
+FileStorage_VALUE_EXPECTED: int
+FILE_STORAGE_VALUE_EXPECTED: int
+FileStorage_NAME_EXPECTED: int
+FILE_STORAGE_NAME_EXPECTED: int
+FileStorage_INSIDE_MAP: int
+FILE_STORAGE_INSIDE_MAP: int
+FileStorage_State = int
+"""One of [FileStorage_UNDEFINED, FILE_STORAGE_UNDEFINED, FileStorage_VALUE_EXPECTED, FILE_STORAGE_VALUE_EXPECTED, FileStorage_NAME_EXPECTED, FILE_STORAGE_NAME_EXPECTED, FileStorage_INSIDE_MAP, FILE_STORAGE_INSIDE_MAP]"""
+
+FileNode_NONE: int
+FILE_NODE_NONE: int
+FileNode_INT: int
+FILE_NODE_INT: int
+FileNode_REAL: int
+FILE_NODE_REAL: int
+FileNode_FLOAT: int
+FILE_NODE_FLOAT: int
+FileNode_STR: int
+FILE_NODE_STR: int
+FileNode_STRING: int
+FILE_NODE_STRING: int
+FileNode_SEQ: int
+FILE_NODE_SEQ: int
+FileNode_MAP: int
+FILE_NODE_MAP: int
+FileNode_TYPE_MASK: int
+FILE_NODE_TYPE_MASK: int
+FileNode_FLOW: int
+FILE_NODE_FLOW: int
+FileNode_UNIFORM: int
+FILE_NODE_UNIFORM: int
+FileNode_EMPTY: int
+FILE_NODE_EMPTY: int
+FileNode_NAMED: int
+FILE_NODE_NAMED: int
+
+UMat_MAGIC_VAL: int
+UMAT_MAGIC_VAL: int
+UMat_AUTO_STEP: int
+UMAT_AUTO_STEP: int
+UMat_CONTINUOUS_FLAG: int
+UMAT_CONTINUOUS_FLAG: int
+UMat_SUBMATRIX_FLAG: int
+UMAT_SUBMATRIX_FLAG: int
+UMat_MAGIC_MASK: int
+UMAT_MAGIC_MASK: int
+UMat_TYPE_MASK: int
+UMAT_TYPE_MASK: int
+UMat_DEPTH_MASK: int
+UMAT_DEPTH_MASK: int
+
+Subdiv2D_PTLOC_ERROR: int
+SUBDIV2D_PTLOC_ERROR: int
+Subdiv2D_PTLOC_OUTSIDE_RECT: int
+SUBDIV2D_PTLOC_OUTSIDE_RECT: int
+Subdiv2D_PTLOC_INSIDE: int
+SUBDIV2D_PTLOC_INSIDE: int
+Subdiv2D_PTLOC_VERTEX: int
+SUBDIV2D_PTLOC_VERTEX: int
+Subdiv2D_PTLOC_ON_EDGE: int
+SUBDIV2D_PTLOC_ON_EDGE: int
+Subdiv2D_NEXT_AROUND_ORG: int
+SUBDIV2D_NEXT_AROUND_ORG: int
+Subdiv2D_NEXT_AROUND_DST: int
+SUBDIV2D_NEXT_AROUND_DST: int
+Subdiv2D_PREV_AROUND_ORG: int
+SUBDIV2D_PREV_AROUND_ORG: int
+Subdiv2D_PREV_AROUND_DST: int
+SUBDIV2D_PREV_AROUND_DST: int
+Subdiv2D_NEXT_AROUND_LEFT: int
+SUBDIV2D_NEXT_AROUND_LEFT: int
+Subdiv2D_NEXT_AROUND_RIGHT: int
+SUBDIV2D_NEXT_AROUND_RIGHT: int
+Subdiv2D_PREV_AROUND_LEFT: int
+SUBDIV2D_PREV_AROUND_LEFT: int
+Subdiv2D_PREV_AROUND_RIGHT: int
+SUBDIV2D_PREV_AROUND_RIGHT: int
+
+ORB_HARRIS_SCORE: int
+ORB_FAST_SCORE: int
+ORB_ScoreType = int
+"""One of [ORB_HARRIS_SCORE, ORB_FAST_SCORE]"""
+
+FastFeatureDetector_TYPE_5_8: int
+FAST_FEATURE_DETECTOR_TYPE_5_8: int
+FastFeatureDetector_TYPE_7_12: int
+FAST_FEATURE_DETECTOR_TYPE_7_12: int
+FastFeatureDetector_TYPE_9_16: int
+FAST_FEATURE_DETECTOR_TYPE_9_16: int
+FastFeatureDetector_DetectorType = int
+"""One of [FastFeatureDetector_TYPE_5_8, FAST_FEATURE_DETECTOR_TYPE_5_8, FastFeatureDetector_TYPE_7_12, FAST_FEATURE_DETECTOR_TYPE_7_12, FastFeatureDetector_TYPE_9_16, FAST_FEATURE_DETECTOR_TYPE_9_16]"""
+
+FastFeatureDetector_THRESHOLD: int
+FAST_FEATURE_DETECTOR_THRESHOLD: int
+FastFeatureDetector_NONMAX_SUPPRESSION: int
+FAST_FEATURE_DETECTOR_NONMAX_SUPPRESSION: int
+FastFeatureDetector_FAST_N: int
+FAST_FEATURE_DETECTOR_FAST_N: int
+
+AgastFeatureDetector_AGAST_5_8: int
+AGAST_FEATURE_DETECTOR_AGAST_5_8: int
+AgastFeatureDetector_AGAST_7_12d: int
+AGAST_FEATURE_DETECTOR_AGAST_7_12D: int
+AgastFeatureDetector_AGAST_7_12s: int
+AGAST_FEATURE_DETECTOR_AGAST_7_12S: int
+AgastFeatureDetector_OAST_9_16: int
+AGAST_FEATURE_DETECTOR_OAST_9_16: int
+AgastFeatureDetector_DetectorType = int
+"""One of [AgastFeatureDetector_AGAST_5_8, AGAST_FEATURE_DETECTOR_AGAST_5_8, AgastFeatureDetector_AGAST_7_12d, AGAST_FEATURE_DETECTOR_AGAST_7_12D, AgastFeatureDetector_AGAST_7_12s, AGAST_FEATURE_DETECTOR_AGAST_7_12S, AgastFeatureDetector_OAST_9_16, AGAST_FEATURE_DETECTOR_OAST_9_16]"""
+
+AgastFeatureDetector_THRESHOLD: int
+AGAST_FEATURE_DETECTOR_THRESHOLD: int
+AgastFeatureDetector_NONMAX_SUPPRESSION: int
+AGAST_FEATURE_DETECTOR_NONMAX_SUPPRESSION: int
+
+KAZE_DIFF_PM_G1: int
+KAZE_DIFF_PM_G2: int
+KAZE_DIFF_WEICKERT: int
+KAZE_DIFF_CHARBONNIER: int
+KAZE_DiffusivityType = int
+"""One of [KAZE_DIFF_PM_G1, KAZE_DIFF_PM_G2, KAZE_DIFF_WEICKERT, KAZE_DIFF_CHARBONNIER]"""
+
+AKAZE_DESCRIPTOR_KAZE_UPRIGHT: int
+AKAZE_DESCRIPTOR_KAZE: int
+AKAZE_DESCRIPTOR_MLDB_UPRIGHT: int
+AKAZE_DESCRIPTOR_MLDB: int
+AKAZE_DescriptorType = int
+"""One of [AKAZE_DESCRIPTOR_KAZE_UPRIGHT, AKAZE_DESCRIPTOR_KAZE, AKAZE_DESCRIPTOR_MLDB_UPRIGHT, AKAZE_DESCRIPTOR_MLDB]"""
+
+DescriptorMatcher_FLANNBASED: int
+DESCRIPTOR_MATCHER_FLANNBASED: int
+DescriptorMatcher_BRUTEFORCE: int
+DESCRIPTOR_MATCHER_BRUTEFORCE: int
+DescriptorMatcher_BRUTEFORCE_L1: int
+DESCRIPTOR_MATCHER_BRUTEFORCE_L1: int
+DescriptorMatcher_BRUTEFORCE_HAMMING: int
+DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING: int
+DescriptorMatcher_BRUTEFORCE_HAMMINGLUT: int
+DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMINGLUT: int
+DescriptorMatcher_BRUTEFORCE_SL2: int
+DESCRIPTOR_MATCHER_BRUTEFORCE_SL2: int
+DescriptorMatcher_MatcherType = int
+"""One of [DescriptorMatcher_FLANNBASED, DESCRIPTOR_MATCHER_FLANNBASED, DescriptorMatcher_BRUTEFORCE, DESCRIPTOR_MATCHER_BRUTEFORCE, DescriptorMatcher_BRUTEFORCE_L1, DESCRIPTOR_MATCHER_BRUTEFORCE_L1, DescriptorMatcher_BRUTEFORCE_HAMMING, DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING, DescriptorMatcher_BRUTEFORCE_HAMMINGLUT, DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMINGLUT, DescriptorMatcher_BRUTEFORCE_SL2, DESCRIPTOR_MATCHER_BRUTEFORCE_SL2]"""
+
+CirclesGridFinderParameters_SYMMETRIC_GRID: int
+CIRCLES_GRID_FINDER_PARAMETERS_SYMMETRIC_GRID: int
+CirclesGridFinderParameters_ASYMMETRIC_GRID: int
+CIRCLES_GRID_FINDER_PARAMETERS_ASYMMETRIC_GRID: int
+CirclesGridFinderParameters_GridType = int
+"""One of [CirclesGridFinderParameters_SYMMETRIC_GRID, CIRCLES_GRID_FINDER_PARAMETERS_SYMMETRIC_GRID, CirclesGridFinderParameters_ASYMMETRIC_GRID, CIRCLES_GRID_FINDER_PARAMETERS_ASYMMETRIC_GRID]"""
+
+StereoMatcher_DISP_SHIFT: int
+STEREO_MATCHER_DISP_SHIFT: int
+StereoMatcher_DISP_SCALE: int
+STEREO_MATCHER_DISP_SCALE: int
+
+StereoBM_PREFILTER_NORMALIZED_RESPONSE: int
+STEREO_BM_PREFILTER_NORMALIZED_RESPONSE: int
+StereoBM_PREFILTER_XSOBEL: int
+STEREO_BM_PREFILTER_XSOBEL: int
+
+StereoSGBM_MODE_SGBM: int
+STEREO_SGBM_MODE_SGBM: int
+StereoSGBM_MODE_HH: int
+STEREO_SGBM_MODE_HH: int
+StereoSGBM_MODE_SGBM_3WAY: int
+STEREO_SGBM_MODE_SGBM_3WAY: int
+StereoSGBM_MODE_HH4: int
+STEREO_SGBM_MODE_HH4: int
+
+HOGDescriptor_L2Hys: int
+HOGDESCRIPTOR_L2HYS: int
+HOGDescriptor_HistogramNormType = int
+"""One of [HOGDescriptor_L2Hys, HOGDESCRIPTOR_L2HYS]"""
+
+HOGDescriptor_DEFAULT_NLEVELS: int
+HOGDESCRIPTOR_DEFAULT_NLEVELS: int
+
+HOGDescriptor_DESCR_FORMAT_COL_BY_COL: int
+HOGDESCRIPTOR_DESCR_FORMAT_COL_BY_COL: int
+HOGDescriptor_DESCR_FORMAT_ROW_BY_ROW: int
+HOGDESCRIPTOR_DESCR_FORMAT_ROW_BY_ROW: int
+HOGDescriptor_DescriptorStorageFormat = int
+"""One of [HOGDescriptor_DESCR_FORMAT_COL_BY_COL, HOGDESCRIPTOR_DESCR_FORMAT_COL_BY_COL, HOGDescriptor_DESCR_FORMAT_ROW_BY_ROW, HOGDESCRIPTOR_DESCR_FORMAT_ROW_BY_ROW]"""
+
+QRCodeEncoder_MODE_AUTO: int
+QRCODE_ENCODER_MODE_AUTO: int
+QRCodeEncoder_MODE_NUMERIC: int
+QRCODE_ENCODER_MODE_NUMERIC: int
+QRCodeEncoder_MODE_ALPHANUMERIC: int
+QRCODE_ENCODER_MODE_ALPHANUMERIC: int
+QRCodeEncoder_MODE_BYTE: int
+QRCODE_ENCODER_MODE_BYTE: int
+QRCodeEncoder_MODE_ECI: int
+QRCODE_ENCODER_MODE_ECI: int
+QRCodeEncoder_MODE_KANJI: int
+QRCODE_ENCODER_MODE_KANJI: int
+QRCodeEncoder_MODE_STRUCTURED_APPEND: int
+QRCODE_ENCODER_MODE_STRUCTURED_APPEND: int
+QRCodeEncoder_EncodeMode = int
+"""One of [QRCodeEncoder_MODE_AUTO, QRCODE_ENCODER_MODE_AUTO, QRCodeEncoder_MODE_NUMERIC, QRCODE_ENCODER_MODE_NUMERIC, QRCodeEncoder_MODE_ALPHANUMERIC, QRCODE_ENCODER_MODE_ALPHANUMERIC, QRCodeEncoder_MODE_BYTE, QRCODE_ENCODER_MODE_BYTE, QRCodeEncoder_MODE_ECI, QRCODE_ENCODER_MODE_ECI, QRCodeEncoder_MODE_KANJI, QRCODE_ENCODER_MODE_KANJI, QRCodeEncoder_MODE_STRUCTURED_APPEND, QRCODE_ENCODER_MODE_STRUCTURED_APPEND]"""
+
+QRCodeEncoder_CORRECT_LEVEL_L: int
+QRCODE_ENCODER_CORRECT_LEVEL_L: int
+QRCodeEncoder_CORRECT_LEVEL_M: int
+QRCODE_ENCODER_CORRECT_LEVEL_M: int
+QRCodeEncoder_CORRECT_LEVEL_Q: int
+QRCODE_ENCODER_CORRECT_LEVEL_Q: int
+QRCodeEncoder_CORRECT_LEVEL_H: int
+QRCODE_ENCODER_CORRECT_LEVEL_H: int
+QRCodeEncoder_CorrectionLevel = int
+"""One of [QRCodeEncoder_CORRECT_LEVEL_L, QRCODE_ENCODER_CORRECT_LEVEL_L, QRCodeEncoder_CORRECT_LEVEL_M, QRCODE_ENCODER_CORRECT_LEVEL_M, QRCodeEncoder_CORRECT_LEVEL_Q, QRCODE_ENCODER_CORRECT_LEVEL_Q, QRCodeEncoder_CORRECT_LEVEL_H, QRCODE_ENCODER_CORRECT_LEVEL_H]"""
+
+QRCodeEncoder_ECI_UTF8: int
+QRCODE_ENCODER_ECI_UTF8: int
+QRCodeEncoder_ECIEncodings = int
+"""One of [QRCodeEncoder_ECI_UTF8, QRCODE_ENCODER_ECI_UTF8]"""
+
+FaceRecognizerSF_FR_COSINE: int
+FACE_RECOGNIZER_SF_FR_COSINE: int
+FaceRecognizerSF_FR_NORM_L2: int
+FACE_RECOGNIZER_SF_FR_NORM_L2: int
+FaceRecognizerSF_DisType = int
+"""One of [FaceRecognizerSF_FR_COSINE, FACE_RECOGNIZER_SF_FR_COSINE, FaceRecognizerSF_FR_NORM_L2, FACE_RECOGNIZER_SF_FR_NORM_L2]"""
+
+Stitcher_OK: int
+STITCHER_OK: int
+Stitcher_ERR_NEED_MORE_IMGS: int
+STITCHER_ERR_NEED_MORE_IMGS: int
+Stitcher_ERR_HOMOGRAPHY_EST_FAIL: int
+STITCHER_ERR_HOMOGRAPHY_EST_FAIL: int
+Stitcher_ERR_CAMERA_PARAMS_ADJUST_FAIL: int
+STITCHER_ERR_CAMERA_PARAMS_ADJUST_FAIL: int
+Stitcher_Status = int
+"""One of [Stitcher_OK, STITCHER_OK, Stitcher_ERR_NEED_MORE_IMGS, STITCHER_ERR_NEED_MORE_IMGS, Stitcher_ERR_HOMOGRAPHY_EST_FAIL, STITCHER_ERR_HOMOGRAPHY_EST_FAIL, Stitcher_ERR_CAMERA_PARAMS_ADJUST_FAIL, STITCHER_ERR_CAMERA_PARAMS_ADJUST_FAIL]"""
+
+Stitcher_PANORAMA: int
+STITCHER_PANORAMA: int
+Stitcher_SCANS: int
+STITCHER_SCANS: int
+Stitcher_Mode = int
+"""One of [Stitcher_PANORAMA, STITCHER_PANORAMA, Stitcher_SCANS, STITCHER_SCANS]"""
+
+DISOpticalFlow_PRESET_ULTRAFAST: int
+DISOPTICAL_FLOW_PRESET_ULTRAFAST: int
+DISOpticalFlow_PRESET_FAST: int
+DISOPTICAL_FLOW_PRESET_FAST: int
+DISOpticalFlow_PRESET_MEDIUM: int
+DISOPTICAL_FLOW_PRESET_MEDIUM: int
+
+PCA_DATA_AS_ROW: int
+PCA_DATA_AS_COL: int
+PCA_USE_AVG: int
+PCA_Flags = int
+"""One of [PCA_DATA_AS_ROW, PCA_DATA_AS_COL, PCA_USE_AVG]"""
+
+SVD_MODIFY_A: int
+SVD_NO_UV: int
+SVD_FULL_UV: int
+SVD_Flags = int
+"""One of [SVD_MODIFY_A, SVD_NO_UV, SVD_FULL_UV]"""
+
+RNG_UNIFORM: int
+RNG_NORMAL: int
+
+Formatter_FMT_DEFAULT: int
+FORMATTER_FMT_DEFAULT: int
+Formatter_FMT_MATLAB: int
+FORMATTER_FMT_MATLAB: int
+Formatter_FMT_CSV: int
+FORMATTER_FMT_CSV: int
+Formatter_FMT_PYTHON: int
+FORMATTER_FMT_PYTHON: int
+Formatter_FMT_NUMPY: int
+FORMATTER_FMT_NUMPY: int
+Formatter_FMT_C: int
+FORMATTER_FMT_C: int
+Formatter_FormatType = int
+"""One of [Formatter_FMT_DEFAULT, FORMATTER_FMT_DEFAULT, Formatter_FMT_MATLAB, FORMATTER_FMT_MATLAB, Formatter_FMT_CSV, FORMATTER_FMT_CSV, Formatter_FMT_PYTHON, FORMATTER_FMT_PYTHON, Formatter_FMT_NUMPY, FORMATTER_FMT_NUMPY, Formatter_FMT_C, FORMATTER_FMT_C]"""
+
+_InputArray_KIND_SHIFT: int
+_INPUT_ARRAY_KIND_SHIFT: int
+_InputArray_FIXED_TYPE: int
+_INPUT_ARRAY_FIXED_TYPE: int
+_InputArray_FIXED_SIZE: int
+_INPUT_ARRAY_FIXED_SIZE: int
+_InputArray_KIND_MASK: int
+_INPUT_ARRAY_KIND_MASK: int
+_InputArray_NONE: int
+_INPUT_ARRAY_NONE: int
+_InputArray_MAT: int
+_INPUT_ARRAY_MAT: int
+_InputArray_MATX: int
+_INPUT_ARRAY_MATX: int
+_InputArray_STD_VECTOR: int
+_INPUT_ARRAY_STD_VECTOR: int
+_InputArray_STD_VECTOR_VECTOR: int
+_INPUT_ARRAY_STD_VECTOR_VECTOR: int
+_InputArray_STD_VECTOR_MAT: int
+_INPUT_ARRAY_STD_VECTOR_MAT: int
+_InputArray_EXPR: int
+_INPUT_ARRAY_EXPR: int
+_InputArray_OPENGL_BUFFER: int
+_INPUT_ARRAY_OPENGL_BUFFER: int
+_InputArray_CUDA_HOST_MEM: int
+_INPUT_ARRAY_CUDA_HOST_MEM: int
+_InputArray_CUDA_GPU_MAT: int
+_INPUT_ARRAY_CUDA_GPU_MAT: int
+_InputArray_UMAT: int
+_INPUT_ARRAY_UMAT: int
+_InputArray_STD_VECTOR_UMAT: int
+_INPUT_ARRAY_STD_VECTOR_UMAT: int
+_InputArray_STD_BOOL_VECTOR: int
+_INPUT_ARRAY_STD_BOOL_VECTOR: int
+_InputArray_STD_VECTOR_CUDA_GPU_MAT: int
+_INPUT_ARRAY_STD_VECTOR_CUDA_GPU_MAT: int
+_InputArray_STD_ARRAY: int
+_INPUT_ARRAY_STD_ARRAY: int
+_InputArray_STD_ARRAY_MAT: int
+_INPUT_ARRAY_STD_ARRAY_MAT: int
+_InputArray_KindFlag = int
+"""One of [_InputArray_KIND_SHIFT, _INPUT_ARRAY_KIND_SHIFT, _InputArray_FIXED_TYPE, _INPUT_ARRAY_FIXED_TYPE, _InputArray_FIXED_SIZE, _INPUT_ARRAY_FIXED_SIZE, _InputArray_KIND_MASK, _INPUT_ARRAY_KIND_MASK, _InputArray_NONE, _INPUT_ARRAY_NONE, _InputArray_MAT, _INPUT_ARRAY_MAT, _InputArray_MATX, _INPUT_ARRAY_MATX, _InputArray_STD_VECTOR, _INPUT_ARRAY_STD_VECTOR, _InputArray_STD_VECTOR_VECTOR, _INPUT_ARRAY_STD_VECTOR_VECTOR, _InputArray_STD_VECTOR_MAT, _INPUT_ARRAY_STD_VECTOR_MAT, _InputArray_EXPR, _INPUT_ARRAY_EXPR, _InputArray_OPENGL_BUFFER, _INPUT_ARRAY_OPENGL_BUFFER, _InputArray_CUDA_HOST_MEM, _INPUT_ARRAY_CUDA_HOST_MEM, _InputArray_CUDA_GPU_MAT, _INPUT_ARRAY_CUDA_GPU_MAT, _InputArray_UMAT, _INPUT_ARRAY_UMAT, _InputArray_STD_VECTOR_UMAT, _INPUT_ARRAY_STD_VECTOR_UMAT, _InputArray_STD_BOOL_VECTOR, _INPUT_ARRAY_STD_BOOL_VECTOR, _InputArray_STD_VECTOR_CUDA_GPU_MAT, _INPUT_ARRAY_STD_VECTOR_CUDA_GPU_MAT, _InputArray_STD_ARRAY, _INPUT_ARRAY_STD_ARRAY, _InputArray_STD_ARRAY_MAT, _INPUT_ARRAY_STD_ARRAY_MAT]"""
+
+_OutputArray_DEPTH_MASK_8U: int
+_OUTPUT_ARRAY_DEPTH_MASK_8U: int
+_OutputArray_DEPTH_MASK_8S: int
+_OUTPUT_ARRAY_DEPTH_MASK_8S: int
+_OutputArray_DEPTH_MASK_16U: int
+_OUTPUT_ARRAY_DEPTH_MASK_16U: int
+_OutputArray_DEPTH_MASK_16S: int
+_OUTPUT_ARRAY_DEPTH_MASK_16S: int
+_OutputArray_DEPTH_MASK_32S: int
+_OUTPUT_ARRAY_DEPTH_MASK_32S: int
+_OutputArray_DEPTH_MASK_32F: int
+_OUTPUT_ARRAY_DEPTH_MASK_32F: int
+_OutputArray_DEPTH_MASK_64F: int
+_OUTPUT_ARRAY_DEPTH_MASK_64F: int
+_OutputArray_DEPTH_MASK_16F: int
+_OUTPUT_ARRAY_DEPTH_MASK_16F: int
+_OutputArray_DEPTH_MASK_ALL: int
+_OUTPUT_ARRAY_DEPTH_MASK_ALL: int
+_OutputArray_DEPTH_MASK_ALL_BUT_8S: int
+_OUTPUT_ARRAY_DEPTH_MASK_ALL_BUT_8S: int
+_OutputArray_DEPTH_MASK_ALL_16F: int
+_OUTPUT_ARRAY_DEPTH_MASK_ALL_16F: int
+_OutputArray_DEPTH_MASK_FLT: int
+_OUTPUT_ARRAY_DEPTH_MASK_FLT: int
+_OutputArray_DepthMask = int
+"""One of [_OutputArray_DEPTH_MASK_8U, _OUTPUT_ARRAY_DEPTH_MASK_8U, _OutputArray_DEPTH_MASK_8S, _OUTPUT_ARRAY_DEPTH_MASK_8S, _OutputArray_DEPTH_MASK_16U, _OUTPUT_ARRAY_DEPTH_MASK_16U, _OutputArray_DEPTH_MASK_16S, _OUTPUT_ARRAY_DEPTH_MASK_16S, _OutputArray_DEPTH_MASK_32S, _OUTPUT_ARRAY_DEPTH_MASK_32S, _OutputArray_DEPTH_MASK_32F, _OUTPUT_ARRAY_DEPTH_MASK_32F, _OutputArray_DEPTH_MASK_64F, _OUTPUT_ARRAY_DEPTH_MASK_64F, _OutputArray_DEPTH_MASK_16F, _OUTPUT_ARRAY_DEPTH_MASK_16F, _OutputArray_DEPTH_MASK_ALL, _OUTPUT_ARRAY_DEPTH_MASK_ALL, _OutputArray_DEPTH_MASK_ALL_BUT_8S, _OUTPUT_ARRAY_DEPTH_MASK_ALL_BUT_8S, _OutputArray_DEPTH_MASK_ALL_16F, _OUTPUT_ARRAY_DEPTH_MASK_ALL_16F, _OutputArray_DEPTH_MASK_FLT, _OUTPUT_ARRAY_DEPTH_MASK_FLT]"""
+
+UMatData_COPY_ON_MAP: int
+UMAT_DATA_COPY_ON_MAP: int
+UMatData_HOST_COPY_OBSOLETE: int
+UMAT_DATA_HOST_COPY_OBSOLETE: int
+UMatData_DEVICE_COPY_OBSOLETE: int
+UMAT_DATA_DEVICE_COPY_OBSOLETE: int
+UMatData_TEMP_UMAT: int
+UMAT_DATA_TEMP_UMAT: int
+UMatData_TEMP_COPIED_UMAT: int
+UMAT_DATA_TEMP_COPIED_UMAT: int
+UMatData_USER_ALLOCATED: int
+UMAT_DATA_USER_ALLOCATED: int
+UMatData_DEVICE_MEM_MAPPED: int
+UMAT_DATA_DEVICE_MEM_MAPPED: int
+UMatData_ASYNC_CLEANUP: int
+UMAT_DATA_ASYNC_CLEANUP: int
+UMatData_MemoryFlag = int
+"""One of [UMatData_COPY_ON_MAP, UMAT_DATA_COPY_ON_MAP, UMatData_HOST_COPY_OBSOLETE, UMAT_DATA_HOST_COPY_OBSOLETE, UMatData_DEVICE_COPY_OBSOLETE, UMAT_DATA_DEVICE_COPY_OBSOLETE, UMatData_TEMP_UMAT, UMAT_DATA_TEMP_UMAT, UMatData_TEMP_COPIED_UMAT, UMAT_DATA_TEMP_COPIED_UMAT, UMatData_USER_ALLOCATED, UMAT_DATA_USER_ALLOCATED, UMatData_DEVICE_MEM_MAPPED, UMAT_DATA_DEVICE_MEM_MAPPED, UMatData_ASYNC_CLEANUP, UMAT_DATA_ASYNC_CLEANUP]"""
+
+Mat_MAGIC_VAL: int
+MAT_MAGIC_VAL: int
+Mat_AUTO_STEP: int
+MAT_AUTO_STEP: int
+Mat_CONTINUOUS_FLAG: int
+MAT_CONTINUOUS_FLAG: int
+Mat_SUBMATRIX_FLAG: int
+MAT_SUBMATRIX_FLAG: int
+Mat_MAGIC_MASK: int
+MAT_MAGIC_MASK: int
+Mat_TYPE_MASK: int
+MAT_TYPE_MASK: int
+Mat_DEPTH_MASK: int
+MAT_DEPTH_MASK: int
+
+SparseMat_MAGIC_VAL: int
+SPARSE_MAT_MAGIC_VAL: int
+SparseMat_MAX_DIM: int
+SPARSE_MAT_MAX_DIM: int
+SparseMat_HASH_SCALE: int
+SPARSE_MAT_HASH_SCALE: int
+SparseMat_HASH_BIT: int
+SPARSE_MAT_HASH_BIT: int
+
+QuatEnum_INT_XYZ: int
+QUAT_ENUM_INT_XYZ: int
+QuatEnum_INT_XZY: int
+QUAT_ENUM_INT_XZY: int
+QuatEnum_INT_YXZ: int
+QUAT_ENUM_INT_YXZ: int
+QuatEnum_INT_YZX: int
+QUAT_ENUM_INT_YZX: int
+QuatEnum_INT_ZXY: int
+QUAT_ENUM_INT_ZXY: int
+QuatEnum_INT_ZYX: int
+QUAT_ENUM_INT_ZYX: int
+QuatEnum_INT_XYX: int
+QUAT_ENUM_INT_XYX: int
+QuatEnum_INT_XZX: int
+QUAT_ENUM_INT_XZX: int
+QuatEnum_INT_YXY: int
+QUAT_ENUM_INT_YXY: int
+QuatEnum_INT_YZY: int
+QUAT_ENUM_INT_YZY: int
+QuatEnum_INT_ZXZ: int
+QUAT_ENUM_INT_ZXZ: int
+QuatEnum_INT_ZYZ: int
+QUAT_ENUM_INT_ZYZ: int
+QuatEnum_EXT_XYZ: int
+QUAT_ENUM_EXT_XYZ: int
+QuatEnum_EXT_XZY: int
+QUAT_ENUM_EXT_XZY: int
+QuatEnum_EXT_YXZ: int
+QUAT_ENUM_EXT_YXZ: int
+QuatEnum_EXT_YZX: int
+QUAT_ENUM_EXT_YZX: int
+QuatEnum_EXT_ZXY: int
+QUAT_ENUM_EXT_ZXY: int
+QuatEnum_EXT_ZYX: int
+QUAT_ENUM_EXT_ZYX: int
+QuatEnum_EXT_XYX: int
+QUAT_ENUM_EXT_XYX: int
+QuatEnum_EXT_XZX: int
+QUAT_ENUM_EXT_XZX: int
+QuatEnum_EXT_YXY: int
+QUAT_ENUM_EXT_YXY: int
+QuatEnum_EXT_YZY: int
+QUAT_ENUM_EXT_YZY: int
+QuatEnum_EXT_ZXZ: int
+QUAT_ENUM_EXT_ZXZ: int
+QuatEnum_EXT_ZYZ: int
+QUAT_ENUM_EXT_ZYZ: int
+QuatEnum_EULER_ANGLES_MAX_VALUE: int
+QUAT_ENUM_EULER_ANGLES_MAX_VALUE: int
+QuatEnum_EulerAnglesType = int
+"""One of [QuatEnum_INT_XYZ, QUAT_ENUM_INT_XYZ, QuatEnum_INT_XZY, QUAT_ENUM_INT_XZY, QuatEnum_INT_YXZ, QUAT_ENUM_INT_YXZ, QuatEnum_INT_YZX, QUAT_ENUM_INT_YZX, QuatEnum_INT_ZXY, QUAT_ENUM_INT_ZXY, QuatEnum_INT_ZYX, QUAT_ENUM_INT_ZYX, QuatEnum_INT_XYX, QUAT_ENUM_INT_XYX, QuatEnum_INT_XZX, QUAT_ENUM_INT_XZX, QuatEnum_INT_YXY, QUAT_ENUM_INT_YXY, QuatEnum_INT_YZY, QUAT_ENUM_INT_YZY, QuatEnum_INT_ZXZ, QUAT_ENUM_INT_ZXZ, QuatEnum_INT_ZYZ, QUAT_ENUM_INT_ZYZ, QuatEnum_EXT_XYZ, QUAT_ENUM_EXT_XYZ, QuatEnum_EXT_XZY, QUAT_ENUM_EXT_XZY, QuatEnum_EXT_YXZ, QUAT_ENUM_EXT_YXZ, QuatEnum_EXT_YZX, QUAT_ENUM_EXT_YZX, QuatEnum_EXT_ZXY, QUAT_ENUM_EXT_ZXY, QuatEnum_EXT_ZYX, QUAT_ENUM_EXT_ZYX, QuatEnum_EXT_XYX, QUAT_ENUM_EXT_XYX, QuatEnum_EXT_XZX, QUAT_ENUM_EXT_XZX, QuatEnum_EXT_YXY, QUAT_ENUM_EXT_YXY, QuatEnum_EXT_YZY, QUAT_ENUM_EXT_YZY, QuatEnum_EXT_ZXZ, QUAT_ENUM_EXT_ZXZ, QuatEnum_EXT_ZYZ, QUAT_ENUM_EXT_ZYZ, QuatEnum_EULER_ANGLES_MAX_VALUE, QUAT_ENUM_EULER_ANGLES_MAX_VALUE]"""
+
+TermCriteria_COUNT: int
+TERM_CRITERIA_COUNT: int
+TermCriteria_MAX_ITER: int
+TERM_CRITERIA_MAX_ITER: int
+TermCriteria_EPS: int
+TERM_CRITERIA_EPS: int
+TermCriteria_Type = int
+"""One of [TermCriteria_COUNT, TERM_CRITERIA_COUNT, TermCriteria_MAX_ITER, TERM_CRITERIA_MAX_ITER, TermCriteria_EPS, TERM_CRITERIA_EPS]"""
+
+GFluidKernel_Kind_Filter: int
+GFLUID_KERNEL_KIND_FILTER: int
+GFluidKernel_Kind_Resize: int
+GFLUID_KERNEL_KIND_RESIZE: int
+GFluidKernel_Kind_YUV420toRGB: int
+GFLUID_KERNEL_KIND_YUV420TO_RGB: int
+GFluidKernel_Kind = int
+"""One of [GFluidKernel_Kind_Filter, GFLUID_KERNEL_KIND_FILTER, GFluidKernel_Kind_Resize, GFLUID_KERNEL_KIND_RESIZE, GFluidKernel_Kind_YUV420toRGB, GFLUID_KERNEL_KIND_YUV420TO_RGB]"""
+
+MediaFrame_Access_R: int
+MEDIA_FRAME_ACCESS_R: int
+MediaFrame_Access_W: int
+MEDIA_FRAME_ACCESS_W: int
+MediaFrame_Access = int
+"""One of [MediaFrame_Access_R, MEDIA_FRAME_ACCESS_R, MediaFrame_Access_W, MEDIA_FRAME_ACCESS_W]"""
+
+RMat_Access_R: int
+RMAT_ACCESS_R: int
+RMat_Access_W: int
+RMAT_ACCESS_W: int
+RMat_Access = int
+"""One of [RMat_Access_R, RMAT_ACCESS_R, RMat_Access_W, RMAT_ACCESS_W]"""
+
+
+# Constants
+CV_8U: int
+CV_8UC1: int
+CV_8UC2: int
+CV_8UC3: int
+CV_8UC4: int
+CV_8S: int
+CV_8SC1: int
+CV_8SC2: int
+CV_8SC3: int
+CV_8SC4: int
+CV_16U: int
+CV_16UC1: int
+CV_16UC2: int
+CV_16UC3: int
+CV_16UC4: int
+CV_16S: int
+CV_16SC1: int
+CV_16SC2: int
+CV_16SC3: int
+CV_16SC4: int
+CV_32S: int
+CV_32SC1: int
+CV_32SC2: int
+CV_32SC3: int
+CV_32SC4: int
+CV_32F: int
+CV_32FC1: int
+CV_32FC2: int
+CV_32FC3: int
+CV_32FC4: int
+CV_64F: int
+CV_64FC1: int
+CV_64FC2: int
+CV_64FC3: int
+CV_64FC4: int
+CV_16F: int
+CV_16FC1: int
+CV_16FC2: int
+CV_16FC3: int
+CV_16FC4: int
+__version__: str
+
+# Classes
+class Algorithm:
+ # Functions
+ def clear(self) -> None: ...
+
+ @_typing.overload
+ def write(self, fs: FileStorage) -> None: ...
+ @_typing.overload
+ def write(self, fs: FileStorage, name: str) -> None: ...
+
+ def read(self, fn: FileNode) -> None: ...
+
+ def empty(self) -> bool: ...
+
+ def save(self, filename: str) -> None: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class AsyncArray:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def release(self) -> None: ...
+
+ @_typing.overload
+ def get(self, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def get(self, dst: UMat | None = ...) -> UMat: ...
+ @_typing.overload
+ def get(self, timeoutNs: float, dst: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def get(self, timeoutNs: float, dst: UMat | None = ...) -> tuple[bool, UMat]: ...
+
+ def wait_for(self, timeoutNs: float) -> bool: ...
+
+ def valid(self) -> bool: ...
+
+
+class FileStorage:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, flags: int, encoding: str = ...) -> None: ...
+
+ def open(self, filename: str, flags: int, encoding: str = ...) -> bool: ...
+
+ def isOpened(self) -> bool: ...
+
+ def release(self) -> None: ...
+
+ def releaseAndGetString(self) -> str: ...
+
+ def getFirstTopLevelNode(self) -> FileNode: ...
+
+ def root(self, streamidx: int = ...) -> FileNode: ...
+
+ def getNode(self, nodename: str) -> FileNode: ...
+
+ @_typing.overload
+ def write(self, name: str, val: int) -> None: ...
+ @_typing.overload
+ def write(self, name: str, val: float) -> None: ...
+ @_typing.overload
+ def write(self, name: str, val: str) -> None: ...
+ @_typing.overload
+ def write(self, name: str, val: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def write(self, name: str, val: _typing.Sequence[str]) -> None: ...
+
+ def writeComment(self, comment: str, append: bool = ...) -> None: ...
+
+ def startWriteStruct(self, name: str, flags: int, typeName: str = ...) -> None: ...
+
+ def endWriteStruct(self) -> None: ...
+
+ def getFormat(self) -> int: ...
+
+
+class FileNode:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def getNode(self, nodename: str) -> FileNode: ...
+
+ def at(self, i: int) -> FileNode: ...
+
+ def keys(self) -> _typing.Sequence[str]: ...
+
+ def type(self) -> int: ...
+
+ def empty(self) -> bool: ...
+
+ def isNone(self) -> bool: ...
+
+ def isSeq(self) -> bool: ...
+
+ def isMap(self) -> bool: ...
+
+ def isInt(self) -> bool: ...
+
+ def isReal(self) -> bool: ...
+
+ def isString(self) -> bool: ...
+
+ def isNamed(self) -> bool: ...
+
+ def name(self) -> str: ...
+
+ def size(self) -> int: ...
+
+ def rawSize(self) -> int: ...
+
+ def real(self) -> float: ...
+
+ def string(self) -> str: ...
+
+ def mat(self) -> cv2.typing.MatLike: ...
+
+
+class RotatedRect:
+ center: cv2.typing.Point2f
+ size: cv2.typing.Size2f
+ angle: float
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, center: cv2.typing.Point2f, size: cv2.typing.Size2f, angle: float) -> None: ...
+ @_typing.overload
+ def __init__(self, point1: cv2.typing.Point2f, point2: cv2.typing.Point2f, point3: cv2.typing.Point2f) -> None: ...
+
+ def points(self) -> _typing.Sequence[cv2.typing.Point2f]: ...
+
+ def boundingRect(self) -> cv2.typing.Rect: ...
+
+ def boundingRect2f(self) -> cv2.typing.Rect2f: ...
+
+
+class KeyPoint:
+ pt: cv2.typing.Point2f
+ size: float
+ angle: float
+ response: float
+ octave: int
+ class_id: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, x: float, y: float, size: float, angle: float = ..., response: float = ..., octave: int = ..., class_id: int = ...) -> None: ...
+
+ @staticmethod
+ @_typing.overload
+ def convert(keypoints: _typing.Sequence[KeyPoint], keypointIndexes: _typing.Sequence[int] = ...) -> _typing.Sequence[cv2.typing.Point2f]: ...
+ @staticmethod
+ @_typing.overload
+ def convert(points2f: _typing.Sequence[cv2.typing.Point2f], size: float = ..., response: float = ..., octave: int = ..., class_id: int = ...) -> _typing.Sequence[KeyPoint]: ...
+
+ @staticmethod
+ def overlap(kp1: KeyPoint, kp2: KeyPoint) -> float: ...
+
+
+class DMatch:
+ queryIdx: int
+ trainIdx: int
+ imgIdx: int
+ distance: float
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, _queryIdx: int, _trainIdx: int, _distance: float) -> None: ...
+ @_typing.overload
+ def __init__(self, _queryIdx: int, _trainIdx: int, _imgIdx: int, _distance: float) -> None: ...
+
+
+class TickMeter:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def start(self) -> None: ...
+
+ def stop(self) -> None: ...
+
+ def getTimeTicks(self) -> int: ...
+
+ def getTimeMicro(self) -> float: ...
+
+ def getTimeMilli(self) -> float: ...
+
+ def getTimeSec(self) -> float: ...
+
+ def getCounter(self) -> int: ...
+
+ def getFPS(self) -> float: ...
+
+ def getAvgTimeSec(self) -> float: ...
+
+ def getAvgTimeMilli(self) -> float: ...
+
+ def reset(self) -> None: ...
+
+
+class UMat:
+ offset: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self, usageFlags: UMatUsageFlags = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, rows: int, cols: int, type: int, usageFlags: UMatUsageFlags = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, type: int, usageFlags: UMatUsageFlags = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, rows: int, cols: int, type: int, s: cv2.typing.Scalar, usageFlags: UMatUsageFlags = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, type: int, s: cv2.typing.Scalar, usageFlags: UMatUsageFlags = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, m: UMat) -> None: ...
+ @_typing.overload
+ def __init__(self, m: UMat, rowRange: cv2.typing.Range, colRange: cv2.typing.Range = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, m: UMat, roi: cv2.typing.Rect) -> None: ...
+ @_typing.overload
+ def __init__(self, m: UMat, ranges: _typing.Sequence[cv2.typing.Range]) -> None: ...
+
+ @staticmethod
+ def queue() -> cv2.typing.IntPointer: ...
+
+ @staticmethod
+ def context() -> cv2.typing.IntPointer: ...
+
+ def get(self) -> cv2.typing.MatLike: ...
+
+ def isContinuous(self) -> bool: ...
+
+ def isSubmatrix(self) -> bool: ...
+
+ def handle(self, accessFlags: AccessFlag) -> cv2.typing.IntPointer: ...
+
+
+class GeneralizedHough(Algorithm):
+ # Functions
+ @_typing.overload
+ def setTemplate(self, templ: cv2.typing.MatLike, templCenter: cv2.typing.Point = ...) -> None: ...
+ @_typing.overload
+ def setTemplate(self, templ: UMat, templCenter: cv2.typing.Point = ...) -> None: ...
+ @_typing.overload
+ def setTemplate(self, edges: cv2.typing.MatLike, dx: cv2.typing.MatLike, dy: cv2.typing.MatLike, templCenter: cv2.typing.Point = ...) -> None: ...
+ @_typing.overload
+ def setTemplate(self, edges: UMat, dx: UMat, dy: UMat, templCenter: cv2.typing.Point = ...) -> None: ...
+
+ @_typing.overload
+ def detect(self, image: cv2.typing.MatLike, positions: cv2.typing.MatLike | None = ..., votes: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detect(self, image: UMat, positions: UMat | None = ..., votes: UMat | None = ...) -> tuple[UMat, UMat]: ...
+ @_typing.overload
+ def detect(self, edges: cv2.typing.MatLike, dx: cv2.typing.MatLike, dy: cv2.typing.MatLike, positions: cv2.typing.MatLike | None = ..., votes: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detect(self, edges: UMat, dx: UMat, dy: UMat, positions: UMat | None = ..., votes: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+ def setCannyLowThresh(self, cannyLowThresh: int) -> None: ...
+
+ def getCannyLowThresh(self) -> int: ...
+
+ def setCannyHighThresh(self, cannyHighThresh: int) -> None: ...
+
+ def getCannyHighThresh(self) -> int: ...
+
+ def setMinDist(self, minDist: float) -> None: ...
+
+ def getMinDist(self) -> float: ...
+
+ def setDp(self, dp: float) -> None: ...
+
+ def getDp(self) -> float: ...
+
+ def setMaxBufferSize(self, maxBufferSize: int) -> None: ...
+
+ def getMaxBufferSize(self) -> int: ...
+
+
+class GeneralizedHoughBallard(GeneralizedHough):
+ # Functions
+ def setLevels(self, levels: int) -> None: ...
+
+ def getLevels(self) -> int: ...
+
+ def setVotesThreshold(self, votesThreshold: int) -> None: ...
+
+ def getVotesThreshold(self) -> int: ...
+
+
+class GeneralizedHoughGuil(GeneralizedHough):
+ # Functions
+ def setXi(self, xi: float) -> None: ...
+
+ def getXi(self) -> float: ...
+
+ def setLevels(self, levels: int) -> None: ...
+
+ def getLevels(self) -> int: ...
+
+ def setAngleEpsilon(self, angleEpsilon: float) -> None: ...
+
+ def getAngleEpsilon(self) -> float: ...
+
+ def setMinAngle(self, minAngle: float) -> None: ...
+
+ def getMinAngle(self) -> float: ...
+
+ def setMaxAngle(self, maxAngle: float) -> None: ...
+
+ def getMaxAngle(self) -> float: ...
+
+ def setAngleStep(self, angleStep: float) -> None: ...
+
+ def getAngleStep(self) -> float: ...
+
+ def setAngleThresh(self, angleThresh: int) -> None: ...
+
+ def getAngleThresh(self) -> int: ...
+
+ def setMinScale(self, minScale: float) -> None: ...
+
+ def getMinScale(self) -> float: ...
+
+ def setMaxScale(self, maxScale: float) -> None: ...
+
+ def getMaxScale(self) -> float: ...
+
+ def setScaleStep(self, scaleStep: float) -> None: ...
+
+ def getScaleStep(self) -> float: ...
+
+ def setScaleThresh(self, scaleThresh: int) -> None: ...
+
+ def getScaleThresh(self) -> int: ...
+
+ def setPosThresh(self, posThresh: int) -> None: ...
+
+ def getPosThresh(self) -> int: ...
+
+
+class CLAHE(Algorithm):
+ # Functions
+ @_typing.overload
+ def apply(self, src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+ def setClipLimit(self, clipLimit: float) -> None: ...
+
+ def getClipLimit(self) -> float: ...
+
+ def setTilesGridSize(self, tileGridSize: cv2.typing.Size) -> None: ...
+
+ def getTilesGridSize(self) -> cv2.typing.Size: ...
+
+ def collectGarbage(self) -> None: ...
+
+
+class Subdiv2D:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, rect: cv2.typing.Rect) -> None: ...
+
+ def initDelaunay(self, rect: cv2.typing.Rect) -> None: ...
+
+ @_typing.overload
+ def insert(self, pt: cv2.typing.Point2f) -> int: ...
+ @_typing.overload
+ def insert(self, ptvec: _typing.Sequence[cv2.typing.Point2f]) -> None: ...
+
+ def locate(self, pt: cv2.typing.Point2f) -> tuple[int, int, int]: ...
+
+ def findNearest(self, pt: cv2.typing.Point2f) -> tuple[int, cv2.typing.Point2f]: ...
+
+ def getEdgeList(self) -> _typing.Sequence[cv2.typing.Vec4f]: ...
+
+ def getLeadingEdgeList(self) -> _typing.Sequence[int]: ...
+
+ def getTriangleList(self) -> _typing.Sequence[cv2.typing.Vec6f]: ...
+
+ def getVoronoiFacetList(self, idx: _typing.Sequence[int]) -> tuple[_typing.Sequence[_typing.Sequence[cv2.typing.Point2f]], _typing.Sequence[cv2.typing.Point2f]]: ...
+
+ def getVertex(self, vertex: int) -> tuple[cv2.typing.Point2f, int]: ...
+
+ def getEdge(self, edge: int, nextEdgeType: int) -> int: ...
+
+ def nextEdge(self, edge: int) -> int: ...
+
+ def rotateEdge(self, edge: int, rotate: int) -> int: ...
+
+ def symEdge(self, edge: int) -> int: ...
+
+ def edgeOrg(self, edge: int) -> tuple[int, cv2.typing.Point2f]: ...
+
+ def edgeDst(self, edge: int) -> tuple[int, cv2.typing.Point2f]: ...
+
+
+class LineSegmentDetector(Algorithm):
+ # Functions
+ @_typing.overload
+ def detect(self, image: cv2.typing.MatLike, lines: cv2.typing.MatLike | None = ..., width: cv2.typing.MatLike | None = ..., prec: cv2.typing.MatLike | None = ..., nfa: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detect(self, image: UMat, lines: UMat | None = ..., width: UMat | None = ..., prec: UMat | None = ..., nfa: UMat | None = ...) -> tuple[UMat, UMat, UMat, UMat]: ...
+
+ @_typing.overload
+ def drawSegments(self, image: cv2.typing.MatLike, lines: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def drawSegments(self, image: UMat, lines: UMat) -> UMat: ...
+
+ @_typing.overload
+ def compareSegments(self, size: cv2.typing.Size, lines1: cv2.typing.MatLike, lines2: cv2.typing.MatLike, image: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def compareSegments(self, size: cv2.typing.Size, lines1: UMat, lines2: UMat, image: UMat | None = ...) -> tuple[int, UMat]: ...
+
+
+class Tonemap(Algorithm):
+ # Functions
+ @_typing.overload
+ def process(self, src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+ def getGamma(self) -> float: ...
+
+ def setGamma(self, gamma: float) -> None: ...
+
+
+class TonemapDrago(Tonemap):
+ # Functions
+ def getSaturation(self) -> float: ...
+
+ def setSaturation(self, saturation: float) -> None: ...
+
+ def getBias(self) -> float: ...
+
+ def setBias(self, bias: float) -> None: ...
+
+
+class TonemapReinhard(Tonemap):
+ # Functions
+ def getIntensity(self) -> float: ...
+
+ def setIntensity(self, intensity: float) -> None: ...
+
+ def getLightAdaptation(self) -> float: ...
+
+ def setLightAdaptation(self, light_adapt: float) -> None: ...
+
+ def getColorAdaptation(self) -> float: ...
+
+ def setColorAdaptation(self, color_adapt: float) -> None: ...
+
+
+class TonemapMantiuk(Tonemap):
+ # Functions
+ def getScale(self) -> float: ...
+
+ def setScale(self, scale: float) -> None: ...
+
+ def getSaturation(self) -> float: ...
+
+ def setSaturation(self, saturation: float) -> None: ...
+
+
+class AlignExposures(Algorithm):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], dst: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], dst: _typing.Sequence[cv2.typing.MatLike], times: UMat, response: UMat) -> None: ...
+
+
+class AlignMTB(AlignExposures):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], dst: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], dst: _typing.Sequence[cv2.typing.MatLike], times: UMat, response: UMat) -> None: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], dst: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], dst: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+ @_typing.overload
+ def calculateShift(self, img0: cv2.typing.MatLike, img1: cv2.typing.MatLike) -> cv2.typing.Point: ...
+ @_typing.overload
+ def calculateShift(self, img0: UMat, img1: UMat) -> cv2.typing.Point: ...
+
+ @_typing.overload
+ def shiftMat(self, src: cv2.typing.MatLike, shift: cv2.typing.Point, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def shiftMat(self, src: UMat, shift: cv2.typing.Point, dst: UMat | None = ...) -> UMat: ...
+
+ @_typing.overload
+ def computeBitmaps(self, img: cv2.typing.MatLike, tb: cv2.typing.MatLike | None = ..., eb: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def computeBitmaps(self, img: UMat, tb: UMat | None = ..., eb: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+ def getMaxBits(self) -> int: ...
+
+ def setMaxBits(self, max_bits: int) -> None: ...
+
+ def getExcludeRange(self) -> int: ...
+
+ def setExcludeRange(self, exclude_range: int) -> None: ...
+
+ def getCut(self) -> bool: ...
+
+ def setCut(self, value: bool) -> None: ...
+
+
+class CalibrateCRF(Algorithm):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, dst: UMat | None = ...) -> UMat: ...
+
+
+class CalibrateDebevec(CalibrateCRF):
+ # Functions
+ def getLambda(self) -> float: ...
+
+ def setLambda(self, lambda_: float) -> None: ...
+
+ def getSamples(self) -> int: ...
+
+ def setSamples(self, samples: int) -> None: ...
+
+ def getRandom(self) -> bool: ...
+
+ def setRandom(self, random: bool) -> None: ...
+
+
+class CalibrateRobertson(CalibrateCRF):
+ # Functions
+ def getMaxIter(self) -> int: ...
+
+ def setMaxIter(self, max_iter: int) -> None: ...
+
+ def getThreshold(self) -> float: ...
+
+ def setThreshold(self, threshold: float) -> None: ...
+
+ def getRadiance(self) -> cv2.typing.MatLike: ...
+
+
+class MergeExposures(Algorithm):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, response: UMat, dst: UMat | None = ...) -> UMat: ...
+
+
+class MergeDebevec(MergeExposures):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, response: UMat, dst: UMat | None = ...) -> UMat: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, dst: UMat | None = ...) -> UMat: ...
+
+
+class MergeMertens(MergeExposures):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, response: UMat, dst: UMat | None = ...) -> UMat: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], dst: UMat | None = ...) -> UMat: ...
+
+ def getContrastWeight(self) -> float: ...
+
+ def setContrastWeight(self, contrast_weiht: float) -> None: ...
+
+ def getSaturationWeight(self) -> float: ...
+
+ def setSaturationWeight(self, saturation_weight: float) -> None: ...
+
+ def getExposureWeight(self) -> float: ...
+
+ def setExposureWeight(self, exposure_weight: float) -> None: ...
+
+
+class MergeRobertson(MergeExposures):
+ # Functions
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, response: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, response: UMat, dst: UMat | None = ...) -> UMat: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[cv2.typing.MatLike], times: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def process(self, src: _typing.Sequence[UMat], times: UMat, dst: UMat | None = ...) -> UMat: ...
+
+
+class Feature2D:
+ # Functions
+ @_typing.overload
+ def detect(self, image: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> _typing.Sequence[KeyPoint]: ...
+ @_typing.overload
+ def detect(self, image: UMat, mask: UMat | None = ...) -> _typing.Sequence[KeyPoint]: ...
+ @_typing.overload
+ def detect(self, images: _typing.Sequence[cv2.typing.MatLike], masks: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[_typing.Sequence[KeyPoint]]: ...
+ @_typing.overload
+ def detect(self, images: _typing.Sequence[UMat], masks: _typing.Sequence[UMat] | None = ...) -> _typing.Sequence[_typing.Sequence[KeyPoint]]: ...
+
+ @_typing.overload
+ def compute(self, image: cv2.typing.MatLike, keypoints: _typing.Sequence[KeyPoint], descriptors: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[KeyPoint], cv2.typing.MatLike]: ...
+ @_typing.overload
+ def compute(self, image: UMat, keypoints: _typing.Sequence[KeyPoint], descriptors: UMat | None = ...) -> tuple[_typing.Sequence[KeyPoint], UMat]: ...
+ @_typing.overload
+ def compute(self, images: _typing.Sequence[cv2.typing.MatLike], keypoints: _typing.Sequence[_typing.Sequence[KeyPoint]], descriptors: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[_typing.Sequence[_typing.Sequence[KeyPoint]], _typing.Sequence[cv2.typing.MatLike]]: ...
+ @_typing.overload
+ def compute(self, images: _typing.Sequence[UMat], keypoints: _typing.Sequence[_typing.Sequence[KeyPoint]], descriptors: _typing.Sequence[UMat] | None = ...) -> tuple[_typing.Sequence[_typing.Sequence[KeyPoint]], _typing.Sequence[UMat]]: ...
+
+ @_typing.overload
+ def detectAndCompute(self, image: cv2.typing.MatLike, mask: cv2.typing.MatLike, descriptors: cv2.typing.MatLike | None = ..., useProvidedKeypoints: bool = ...) -> tuple[_typing.Sequence[KeyPoint], cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectAndCompute(self, image: UMat, mask: UMat, descriptors: UMat | None = ..., useProvidedKeypoints: bool = ...) -> tuple[_typing.Sequence[KeyPoint], UMat]: ...
+
+ def descriptorSize(self) -> int: ...
+
+ def descriptorType(self) -> int: ...
+
+ def defaultNorm(self) -> int: ...
+
+ @_typing.overload
+ def write(self, fileName: str) -> None: ...
+ @_typing.overload
+ def write(self, fs: FileStorage, name: str) -> None: ...
+
+ @_typing.overload
+ def read(self, fileName: str) -> None: ...
+ @_typing.overload
+ def read(self, arg1: FileNode) -> None: ...
+
+ def empty(self) -> bool: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class AffineFeature(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, backend: Feature2D, maxTilt: int = ..., minTilt: int = ..., tiltStep: float = ..., rotateStepBase: float = ...) -> AffineFeature: ...
+
+ def setViewParams(self, tilts: _typing.Sequence[float], rolls: _typing.Sequence[float]) -> None: ...
+
+ def getViewParams(self, tilts: _typing.Sequence[float], rolls: _typing.Sequence[float]) -> None: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class SIFT(Feature2D):
+ # Functions
+ @classmethod
+ @_typing.overload
+ def create(cls, nfeatures: int = ..., nOctaveLayers: int = ..., contrastThreshold: float = ..., edgeThreshold: float = ..., sigma: float = ..., enable_precise_upscale: bool = ...) -> SIFT: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, nfeatures: int, nOctaveLayers: int, contrastThreshold: float, edgeThreshold: float, sigma: float, descriptorType: int, enable_precise_upscale: bool = ...) -> SIFT: ...
+
+ def getDefaultName(self) -> str: ...
+
+ def setNFeatures(self, maxFeatures: int) -> None: ...
+
+ def getNFeatures(self) -> int: ...
+
+ def setNOctaveLayers(self, nOctaveLayers: int) -> None: ...
+
+ def getNOctaveLayers(self) -> int: ...
+
+ def setContrastThreshold(self, contrastThreshold: float) -> None: ...
+
+ def getContrastThreshold(self) -> float: ...
+
+ def setEdgeThreshold(self, edgeThreshold: float) -> None: ...
+
+ def getEdgeThreshold(self) -> float: ...
+
+ def setSigma(self, sigma: float) -> None: ...
+
+ def getSigma(self) -> float: ...
+
+
+class BRISK(Feature2D):
+ # Functions
+ @classmethod
+ @_typing.overload
+ def create(cls, thresh: int = ..., octaves: int = ..., patternScale: float = ...) -> BRISK: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, radiusList: _typing.Sequence[float], numberList: _typing.Sequence[int], dMax: float = ..., dMin: float = ..., indexChange: _typing.Sequence[int] = ...) -> BRISK: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, thresh: int, octaves: int, radiusList: _typing.Sequence[float], numberList: _typing.Sequence[int], dMax: float = ..., dMin: float = ..., indexChange: _typing.Sequence[int] = ...) -> BRISK: ...
+
+ def getDefaultName(self) -> str: ...
+
+ def setThreshold(self, threshold: int) -> None: ...
+
+ def getThreshold(self) -> int: ...
+
+ def setOctaves(self, octaves: int) -> None: ...
+
+ def getOctaves(self) -> int: ...
+
+ def setPatternScale(self, patternScale: float) -> None: ...
+
+ def getPatternScale(self) -> float: ...
+
+
+class ORB(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, nfeatures: int = ..., scaleFactor: float = ..., nlevels: int = ..., edgeThreshold: int = ..., firstLevel: int = ..., WTA_K: int = ..., scoreType: ORB_ScoreType = ..., patchSize: int = ..., fastThreshold: int = ...) -> ORB: ...
+
+ def setMaxFeatures(self, maxFeatures: int) -> None: ...
+
+ def getMaxFeatures(self) -> int: ...
+
+ def setScaleFactor(self, scaleFactor: float) -> None: ...
+
+ def getScaleFactor(self) -> float: ...
+
+ def setNLevels(self, nlevels: int) -> None: ...
+
+ def getNLevels(self) -> int: ...
+
+ def setEdgeThreshold(self, edgeThreshold: int) -> None: ...
+
+ def getEdgeThreshold(self) -> int: ...
+
+ def setFirstLevel(self, firstLevel: int) -> None: ...
+
+ def getFirstLevel(self) -> int: ...
+
+ def setWTA_K(self, wta_k: int) -> None: ...
+
+ def getWTA_K(self) -> int: ...
+
+ def setScoreType(self, scoreType: ORB_ScoreType) -> None: ...
+
+ def getScoreType(self) -> ORB_ScoreType: ...
+
+ def setPatchSize(self, patchSize: int) -> None: ...
+
+ def getPatchSize(self) -> int: ...
+
+ def setFastThreshold(self, fastThreshold: int) -> None: ...
+
+ def getFastThreshold(self) -> int: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class MSER(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, delta: int = ..., min_area: int = ..., max_area: int = ..., max_variation: float = ..., min_diversity: float = ..., max_evolution: int = ..., area_threshold: float = ..., min_margin: float = ..., edge_blur_size: int = ...) -> MSER: ...
+
+ @_typing.overload
+ def detectRegions(self, image: cv2.typing.MatLike) -> tuple[_typing.Sequence[_typing.Sequence[cv2.typing.Point]], _typing.Sequence[cv2.typing.Rect]]: ...
+ @_typing.overload
+ def detectRegions(self, image: UMat) -> tuple[_typing.Sequence[_typing.Sequence[cv2.typing.Point]], _typing.Sequence[cv2.typing.Rect]]: ...
+
+ def setDelta(self, delta: int) -> None: ...
+
+ def getDelta(self) -> int: ...
+
+ def setMinArea(self, minArea: int) -> None: ...
+
+ def getMinArea(self) -> int: ...
+
+ def setMaxArea(self, maxArea: int) -> None: ...
+
+ def getMaxArea(self) -> int: ...
+
+ def setMaxVariation(self, maxVariation: float) -> None: ...
+
+ def getMaxVariation(self) -> float: ...
+
+ def setMinDiversity(self, minDiversity: float) -> None: ...
+
+ def getMinDiversity(self) -> float: ...
+
+ def setMaxEvolution(self, maxEvolution: int) -> None: ...
+
+ def getMaxEvolution(self) -> int: ...
+
+ def setAreaThreshold(self, areaThreshold: float) -> None: ...
+
+ def getAreaThreshold(self) -> float: ...
+
+ def setMinMargin(self, min_margin: float) -> None: ...
+
+ def getMinMargin(self) -> float: ...
+
+ def setEdgeBlurSize(self, edge_blur_size: int) -> None: ...
+
+ def getEdgeBlurSize(self) -> int: ...
+
+ def setPass2Only(self, f: bool) -> None: ...
+
+ def getPass2Only(self) -> bool: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class FastFeatureDetector(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, threshold: int = ..., nonmaxSuppression: bool = ..., type: FastFeatureDetector_DetectorType = ...) -> FastFeatureDetector: ...
+
+ def setThreshold(self, threshold: int) -> None: ...
+
+ def getThreshold(self) -> int: ...
+
+ def setNonmaxSuppression(self, f: bool) -> None: ...
+
+ def getNonmaxSuppression(self) -> bool: ...
+
+ def setType(self, type: FastFeatureDetector_DetectorType) -> None: ...
+
+ def getType(self) -> FastFeatureDetector_DetectorType: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class AgastFeatureDetector(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, threshold: int = ..., nonmaxSuppression: bool = ..., type: AgastFeatureDetector_DetectorType = ...) -> AgastFeatureDetector: ...
+
+ def setThreshold(self, threshold: int) -> None: ...
+
+ def getThreshold(self) -> int: ...
+
+ def setNonmaxSuppression(self, f: bool) -> None: ...
+
+ def getNonmaxSuppression(self) -> bool: ...
+
+ def setType(self, type: AgastFeatureDetector_DetectorType) -> None: ...
+
+ def getType(self) -> AgastFeatureDetector_DetectorType: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class GFTTDetector(Feature2D):
+ # Functions
+ @classmethod
+ @_typing.overload
+ def create(cls, maxCorners: int = ..., qualityLevel: float = ..., minDistance: float = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> GFTTDetector: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, maxCorners: int, qualityLevel: float, minDistance: float, blockSize: int, gradiantSize: int, useHarrisDetector: bool = ..., k: float = ...) -> GFTTDetector: ...
+
+ def setMaxFeatures(self, maxFeatures: int) -> None: ...
+
+ def getMaxFeatures(self) -> int: ...
+
+ def setQualityLevel(self, qlevel: float) -> None: ...
+
+ def getQualityLevel(self) -> float: ...
+
+ def setMinDistance(self, minDistance: float) -> None: ...
+
+ def getMinDistance(self) -> float: ...
+
+ def setBlockSize(self, blockSize: int) -> None: ...
+
+ def getBlockSize(self) -> int: ...
+
+ def setGradientSize(self, gradientSize_: int) -> None: ...
+
+ def getGradientSize(self) -> int: ...
+
+ def setHarrisDetector(self, val: bool) -> None: ...
+
+ def getHarrisDetector(self) -> bool: ...
+
+ def setK(self, k: float) -> None: ...
+
+ def getK(self) -> float: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class SimpleBlobDetector(Feature2D):
+ # Classes
+ class Params:
+ thresholdStep: float
+ minThreshold: float
+ maxThreshold: float
+ minRepeatability: int
+ minDistBetweenBlobs: float
+ filterByColor: bool
+ blobColor: int
+ filterByArea: bool
+ minArea: float
+ maxArea: float
+ filterByCircularity: bool
+ minCircularity: float
+ maxCircularity: float
+ filterByInertia: bool
+ minInertiaRatio: float
+ maxInertiaRatio: float
+ filterByConvexity: bool
+ minConvexity: float
+ maxConvexity: float
+ collectContours: bool
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: SimpleBlobDetector.Params = ...) -> SimpleBlobDetector: ...
+
+ def setParams(self, params: SimpleBlobDetector.Params) -> None: ...
+
+ def getParams(self) -> SimpleBlobDetector.Params: ...
+
+ def getDefaultName(self) -> str: ...
+
+ def getBlobContours(self) -> _typing.Sequence[_typing.Sequence[cv2.typing.Point]]: ...
+
+
+class KAZE(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, extended: bool = ..., upright: bool = ..., threshold: float = ..., nOctaves: int = ..., nOctaveLayers: int = ..., diffusivity: KAZE_DiffusivityType = ...) -> KAZE: ...
+
+ def setExtended(self, extended: bool) -> None: ...
+
+ def getExtended(self) -> bool: ...
+
+ def setUpright(self, upright: bool) -> None: ...
+
+ def getUpright(self) -> bool: ...
+
+ def setThreshold(self, threshold: float) -> None: ...
+
+ def getThreshold(self) -> float: ...
+
+ def setNOctaves(self, octaves: int) -> None: ...
+
+ def getNOctaves(self) -> int: ...
+
+ def setNOctaveLayers(self, octaveLayers: int) -> None: ...
+
+ def getNOctaveLayers(self) -> int: ...
+
+ def setDiffusivity(self, diff: KAZE_DiffusivityType) -> None: ...
+
+ def getDiffusivity(self) -> KAZE_DiffusivityType: ...
+
+ def getDefaultName(self) -> str: ...
+
+
+class AKAZE(Feature2D):
+ # Functions
+ @classmethod
+ def create(cls, descriptor_type: AKAZE_DescriptorType = ..., descriptor_size: int = ..., descriptor_channels: int = ..., threshold: float = ..., nOctaves: int = ..., nOctaveLayers: int = ..., diffusivity: KAZE_DiffusivityType = ..., max_points: int = ...) -> AKAZE: ...
+
+ def setDescriptorType(self, dtype: AKAZE_DescriptorType) -> None: ...
+
+ def getDescriptorType(self) -> AKAZE_DescriptorType: ...
+
+ def setDescriptorSize(self, dsize: int) -> None: ...
+
+ def getDescriptorSize(self) -> int: ...
+
+ def setDescriptorChannels(self, dch: int) -> None: ...
+
+ def getDescriptorChannels(self) -> int: ...
+
+ def setThreshold(self, threshold: float) -> None: ...
+
+ def getThreshold(self) -> float: ...
+
+ def setNOctaves(self, octaves: int) -> None: ...
+
+ def getNOctaves(self) -> int: ...
+
+ def setNOctaveLayers(self, octaveLayers: int) -> None: ...
+
+ def getNOctaveLayers(self) -> int: ...
+
+ def setDiffusivity(self, diff: KAZE_DiffusivityType) -> None: ...
+
+ def getDiffusivity(self) -> KAZE_DiffusivityType: ...
+
+ def getDefaultName(self) -> str: ...
+
+ def setMaxPoints(self, max_points: int) -> None: ...
+
+ def getMaxPoints(self) -> int: ...
+
+
+class DescriptorMatcher(Algorithm):
+ # Functions
+ @_typing.overload
+ def add(self, descriptors: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+ @_typing.overload
+ def add(self, descriptors: _typing.Sequence[UMat]) -> None: ...
+
+ def getTrainDescriptors(self) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def clear(self) -> None: ...
+
+ def empty(self) -> bool: ...
+
+ def isMaskSupported(self) -> bool: ...
+
+ def train(self) -> None: ...
+
+ @_typing.overload
+ def match(self, queryDescriptors: cv2.typing.MatLike, trainDescriptors: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> _typing.Sequence[DMatch]: ...
+ @_typing.overload
+ def match(self, queryDescriptors: UMat, trainDescriptors: UMat, mask: UMat | None = ...) -> _typing.Sequence[DMatch]: ...
+ @_typing.overload
+ def match(self, queryDescriptors: cv2.typing.MatLike, masks: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[DMatch]: ...
+ @_typing.overload
+ def match(self, queryDescriptors: UMat, masks: _typing.Sequence[UMat] | None = ...) -> _typing.Sequence[DMatch]: ...
+
+ @_typing.overload
+ def knnMatch(self, queryDescriptors: cv2.typing.MatLike, trainDescriptors: cv2.typing.MatLike, k: int, mask: cv2.typing.MatLike | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def knnMatch(self, queryDescriptors: UMat, trainDescriptors: UMat, k: int, mask: UMat | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def knnMatch(self, queryDescriptors: cv2.typing.MatLike, k: int, masks: _typing.Sequence[cv2.typing.MatLike] | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def knnMatch(self, queryDescriptors: UMat, k: int, masks: _typing.Sequence[UMat] | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+
+ @_typing.overload
+ def radiusMatch(self, queryDescriptors: cv2.typing.MatLike, trainDescriptors: cv2.typing.MatLike, maxDistance: float, mask: cv2.typing.MatLike | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def radiusMatch(self, queryDescriptors: UMat, trainDescriptors: UMat, maxDistance: float, mask: UMat | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def radiusMatch(self, queryDescriptors: cv2.typing.MatLike, maxDistance: float, masks: _typing.Sequence[cv2.typing.MatLike] | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+ @_typing.overload
+ def radiusMatch(self, queryDescriptors: UMat, maxDistance: float, masks: _typing.Sequence[UMat] | None = ..., compactResult: bool = ...) -> _typing.Sequence[_typing.Sequence[DMatch]]: ...
+
+ @_typing.overload
+ def write(self, fileName: str) -> None: ...
+ @_typing.overload
+ def write(self, fs: FileStorage, name: str) -> None: ...
+
+ @_typing.overload
+ def read(self, fileName: str) -> None: ...
+ @_typing.overload
+ def read(self, arg1: FileNode) -> None: ...
+
+ def clone(self, emptyTrainData: bool = ...) -> DescriptorMatcher: ...
+
+ @classmethod
+ @_typing.overload
+ def create(cls, descriptorMatcherType: str) -> DescriptorMatcher: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, matcherType: DescriptorMatcher_MatcherType) -> DescriptorMatcher: ...
+
+
+class BFMatcher(DescriptorMatcher):
+ # Functions
+ def __init__(self, normType: int = ..., crossCheck: bool = ...) -> None: ...
+
+ @classmethod
+ def create(cls, normType: int = ..., crossCheck: bool = ...) -> BFMatcher: ...
+
+
+class FlannBasedMatcher(DescriptorMatcher):
+ # Functions
+ def __init__(self, indexParams: cv2.typing.IndexParams = ..., searchParams: cv2.typing.SearchParams = ...) -> None: ...
+
+ @classmethod
+ def create(cls) -> FlannBasedMatcher: ...
+
+
+class BOWTrainer:
+ # Functions
+ def add(self, descriptors: cv2.typing.MatLike) -> None: ...
+
+ def getDescriptors(self) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def descriptorsCount(self) -> int: ...
+
+ def clear(self) -> None: ...
+
+ @_typing.overload
+ def cluster(self) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def cluster(self, descriptors: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+
+
+class BOWKMeansTrainer(BOWTrainer):
+ # Functions
+ def __init__(self, clusterCount: int, termcrit: cv2.typing.TermCriteria = ..., attempts: int = ..., flags: int = ...) -> None: ...
+
+ @_typing.overload
+ def cluster(self) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def cluster(self, descriptors: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+
+
+class BOWImgDescriptorExtractor:
+ # Functions
+ def __init__(self, dextractor: Feature2D, dmatcher: DescriptorMatcher) -> None: ...
+
+ def setVocabulary(self, vocabulary: cv2.typing.MatLike) -> None: ...
+
+ def getVocabulary(self) -> cv2.typing.MatLike: ...
+
+ def compute(self, image: cv2.typing.MatLike, keypoints: _typing.Sequence[KeyPoint], imgDescriptor: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+
+ def descriptorSize(self) -> int: ...
+
+ def descriptorType(self) -> int: ...
+
+
+class VideoCapture:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, apiPreference: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, apiPreference: int, params: _typing.Sequence[int]) -> None: ...
+ @_typing.overload
+ def __init__(self, index: int, apiPreference: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, index: int, apiPreference: int, params: _typing.Sequence[int]) -> None: ...
+
+ @_typing.overload
+ def open(self, filename: str, apiPreference: int = ...) -> bool: ...
+ @_typing.overload
+ def open(self, filename: str, apiPreference: int, params: _typing.Sequence[int]) -> bool: ...
+ @_typing.overload
+ def open(self, index: int, apiPreference: int = ...) -> bool: ...
+ @_typing.overload
+ def open(self, index: int, apiPreference: int, params: _typing.Sequence[int]) -> bool: ...
+
+ def isOpened(self) -> bool: ...
+
+ def release(self) -> None: ...
+
+ def grab(self) -> bool: ...
+
+ @_typing.overload
+ def retrieve(self, image: cv2.typing.MatLike | None = ..., flag: int = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def retrieve(self, image: UMat | None = ..., flag: int = ...) -> tuple[bool, UMat]: ...
+
+ @_typing.overload
+ def read(self, image: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def read(self, image: UMat | None = ...) -> tuple[bool, UMat]: ...
+
+ def set(self, propId: int, value: float) -> bool: ...
+
+ def get(self, propId: int) -> float: ...
+
+ def getBackendName(self) -> str: ...
+
+ def setExceptionMode(self, enable: bool) -> None: ...
+
+ def getExceptionMode(self) -> bool: ...
+
+ @staticmethod
+ def waitAny(streams: _typing.Sequence[VideoCapture], timeoutNs: int = ...) -> tuple[bool, _typing.Sequence[int]]: ...
+
+
+class VideoWriter:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, fourcc: int, fps: float, frameSize: cv2.typing.Size, isColor: bool = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, apiPreference: int, fourcc: int, fps: float, frameSize: cv2.typing.Size, isColor: bool = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, fourcc: int, fps: float, frameSize: cv2.typing.Size, params: _typing.Sequence[int]) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str, apiPreference: int, fourcc: int, fps: float, frameSize: cv2.typing.Size, params: _typing.Sequence[int]) -> None: ...
+
+ @_typing.overload
+ def open(self, filename: str, fourcc: int, fps: float, frameSize: cv2.typing.Size, isColor: bool = ...) -> bool: ...
+ @_typing.overload
+ def open(self, filename: str, apiPreference: int, fourcc: int, fps: float, frameSize: cv2.typing.Size, isColor: bool = ...) -> bool: ...
+ @_typing.overload
+ def open(self, filename: str, fourcc: int, fps: float, frameSize: cv2.typing.Size, params: _typing.Sequence[int]) -> bool: ...
+ @_typing.overload
+ def open(self, filename: str, apiPreference: int, fourcc: int, fps: float, frameSize: cv2.typing.Size, params: _typing.Sequence[int]) -> bool: ...
+
+ def isOpened(self) -> bool: ...
+
+ def release(self) -> None: ...
+
+ @_typing.overload
+ def write(self, image: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def write(self, image: UMat) -> None: ...
+
+ def set(self, propId: int, value: float) -> bool: ...
+
+ def get(self, propId: int) -> float: ...
+
+ @staticmethod
+ def fourcc(c1: str, c2: str, c3: str, c4: str) -> int: ...
+
+ def getBackendName(self) -> str: ...
+
+
+class UsacParams:
+ confidence: float
+ isParallel: bool
+ loIterations: int
+ loMethod: LocalOptimMethod
+ loSampleSize: int
+ maxIterations: int
+ neighborsSearch: NeighborSearchMethod
+ randomGeneratorState: int
+ sampler: SamplingMethod
+ score: ScoreMethod
+ threshold: float
+ final_polisher: PolishingMethod
+ final_polisher_iterations: int
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class CirclesGridFinderParameters:
+ densityNeighborhoodSize: cv2.typing.Size2f
+ minDensity: float
+ kmeansAttempts: int
+ minDistanceToAddKeypoint: int
+ keypointScale: int
+ minGraphConfidence: float
+ vertexGain: float
+ vertexPenalty: float
+ existingVertexGain: float
+ edgeGain: float
+ edgePenalty: float
+ convexHullFactor: float
+ minRNGEdgeSwitchDist: float
+ squareSize: float
+ maxRectifiedDistance: float
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class StereoMatcher(Algorithm):
+ # Functions
+ @_typing.overload
+ def compute(self, left: cv2.typing.MatLike, right: cv2.typing.MatLike, disparity: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def compute(self, left: UMat, right: UMat, disparity: UMat | None = ...) -> UMat: ...
+
+ def getMinDisparity(self) -> int: ...
+
+ def setMinDisparity(self, minDisparity: int) -> None: ...
+
+ def getNumDisparities(self) -> int: ...
+
+ def setNumDisparities(self, numDisparities: int) -> None: ...
+
+ def getBlockSize(self) -> int: ...
+
+ def setBlockSize(self, blockSize: int) -> None: ...
+
+ def getSpeckleWindowSize(self) -> int: ...
+
+ def setSpeckleWindowSize(self, speckleWindowSize: int) -> None: ...
+
+ def getSpeckleRange(self) -> int: ...
+
+ def setSpeckleRange(self, speckleRange: int) -> None: ...
+
+ def getDisp12MaxDiff(self) -> int: ...
+
+ def setDisp12MaxDiff(self, disp12MaxDiff: int) -> None: ...
+
+
+class StereoBM(StereoMatcher):
+ # Functions
+ def getPreFilterType(self) -> int: ...
+
+ def setPreFilterType(self, preFilterType: int) -> None: ...
+
+ def getPreFilterSize(self) -> int: ...
+
+ def setPreFilterSize(self, preFilterSize: int) -> None: ...
+
+ def getPreFilterCap(self) -> int: ...
+
+ def setPreFilterCap(self, preFilterCap: int) -> None: ...
+
+ def getTextureThreshold(self) -> int: ...
+
+ def setTextureThreshold(self, textureThreshold: int) -> None: ...
+
+ def getUniquenessRatio(self) -> int: ...
+
+ def setUniquenessRatio(self, uniquenessRatio: int) -> None: ...
+
+ def getSmallerBlockSize(self) -> int: ...
+
+ def setSmallerBlockSize(self, blockSize: int) -> None: ...
+
+ def getROI1(self) -> cv2.typing.Rect: ...
+
+ def setROI1(self, roi1: cv2.typing.Rect) -> None: ...
+
+ def getROI2(self) -> cv2.typing.Rect: ...
+
+ def setROI2(self, roi2: cv2.typing.Rect) -> None: ...
+
+ @classmethod
+ def create(cls, numDisparities: int = ..., blockSize: int = ...) -> StereoBM: ...
+
+
+class StereoSGBM(StereoMatcher):
+ # Functions
+ def getPreFilterCap(self) -> int: ...
+
+ def setPreFilterCap(self, preFilterCap: int) -> None: ...
+
+ def getUniquenessRatio(self) -> int: ...
+
+ def setUniquenessRatio(self, uniquenessRatio: int) -> None: ...
+
+ def getP1(self) -> int: ...
+
+ def setP1(self, P1: int) -> None: ...
+
+ def getP2(self) -> int: ...
+
+ def setP2(self, P2: int) -> None: ...
+
+ def getMode(self) -> int: ...
+
+ def setMode(self, mode: int) -> None: ...
+
+ @classmethod
+ def create(cls, minDisparity: int = ..., numDisparities: int = ..., blockSize: int = ..., P1: int = ..., P2: int = ..., disp12MaxDiff: int = ..., preFilterCap: int = ..., uniquenessRatio: int = ..., speckleWindowSize: int = ..., speckleRange: int = ..., mode: int = ...) -> StereoSGBM: ...
+
+
+class BaseCascadeClassifier(Algorithm):
+ ...
+
+class CascadeClassifier:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str) -> None: ...
+
+ def empty(self) -> bool: ...
+
+ def load(self, filename: str) -> bool: ...
+
+ def read(self, node: FileNode) -> bool: ...
+
+ @_typing.overload
+ def detectMultiScale(self, image: cv2.typing.MatLike, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ...) -> _typing.Sequence[cv2.typing.Rect]: ...
+ @_typing.overload
+ def detectMultiScale(self, image: UMat, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ...) -> _typing.Sequence[cv2.typing.Rect]: ...
+
+ @_typing.overload
+ def detectMultiScale2(self, image: cv2.typing.MatLike, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[int]]: ...
+ @_typing.overload
+ def detectMultiScale2(self, image: UMat, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[int]]: ...
+
+ @_typing.overload
+ def detectMultiScale3(self, image: cv2.typing.MatLike, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ..., outputRejectLevels: bool = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[int], _typing.Sequence[float]]: ...
+ @_typing.overload
+ def detectMultiScale3(self, image: UMat, scaleFactor: float = ..., minNeighbors: int = ..., flags: int = ..., minSize: cv2.typing.Size = ..., maxSize: cv2.typing.Size = ..., outputRejectLevels: bool = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[int], _typing.Sequence[float]]: ...
+
+ def isOldFormatCascade(self) -> bool: ...
+
+ def getOriginalWindowSize(self) -> cv2.typing.Size: ...
+
+ def getFeatureType(self) -> int: ...
+
+ @staticmethod
+ def convert(oldcascade: str, newcascade: str) -> bool: ...
+
+
+class HOGDescriptor:
+ @property
+ def winSize(self) -> cv2.typing.Size: ...
+ @property
+ def blockSize(self) -> cv2.typing.Size: ...
+ @property
+ def blockStride(self) -> cv2.typing.Size: ...
+ @property
+ def cellSize(self) -> cv2.typing.Size: ...
+ @property
+ def nbins(self) -> int: ...
+ @property
+ def derivAperture(self) -> int: ...
+ @property
+ def winSigma(self) -> float: ...
+ @property
+ def histogramNormType(self) -> HOGDescriptor_HistogramNormType: ...
+ @property
+ def L2HysThreshold(self) -> float: ...
+ @property
+ def gammaCorrection(self) -> bool: ...
+ @property
+ def svmDetector(self) -> _typing.Sequence[float]: ...
+ @property
+ def nlevels(self) -> int: ...
+ @property
+ def signedGradient(self) -> bool: ...
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, _winSize: cv2.typing.Size, _blockSize: cv2.typing.Size, _blockStride: cv2.typing.Size, _cellSize: cv2.typing.Size, _nbins: int, _derivAperture: int = ..., _winSigma: float = ..., _histogramNormType: HOGDescriptor_HistogramNormType = ..., _L2HysThreshold: float = ..., _gammaCorrection: bool = ..., _nlevels: int = ..., _signedGradient: bool = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, filename: str) -> None: ...
+
+ def getDescriptorSize(self) -> int: ...
+
+ def checkDetectorSize(self) -> bool: ...
+
+ def getWinSigma(self) -> float: ...
+
+ @_typing.overload
+ def setSVMDetector(self, svmdetector: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def setSVMDetector(self, svmdetector: UMat) -> None: ...
+
+ def load(self, filename: str, objname: str = ...) -> bool: ...
+
+ def save(self, filename: str, objname: str = ...) -> None: ...
+
+ @_typing.overload
+ def compute(self, img: cv2.typing.MatLike, winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., locations: _typing.Sequence[cv2.typing.Point] = ...) -> _typing.Sequence[float]: ...
+ @_typing.overload
+ def compute(self, img: UMat, winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., locations: _typing.Sequence[cv2.typing.Point] = ...) -> _typing.Sequence[float]: ...
+
+ @_typing.overload
+ def detect(self, img: cv2.typing.MatLike, hitThreshold: float = ..., winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., searchLocations: _typing.Sequence[cv2.typing.Point] = ...) -> tuple[_typing.Sequence[cv2.typing.Point], _typing.Sequence[float]]: ...
+ @_typing.overload
+ def detect(self, img: UMat, hitThreshold: float = ..., winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., searchLocations: _typing.Sequence[cv2.typing.Point] = ...) -> tuple[_typing.Sequence[cv2.typing.Point], _typing.Sequence[float]]: ...
+
+ @_typing.overload
+ def detectMultiScale(self, img: cv2.typing.MatLike, hitThreshold: float = ..., winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., scale: float = ..., groupThreshold: float = ..., useMeanshiftGrouping: bool = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[float]]: ...
+ @_typing.overload
+ def detectMultiScale(self, img: UMat, hitThreshold: float = ..., winStride: cv2.typing.Size = ..., padding: cv2.typing.Size = ..., scale: float = ..., groupThreshold: float = ..., useMeanshiftGrouping: bool = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[float]]: ...
+
+ @_typing.overload
+ def computeGradient(self, img: cv2.typing.MatLike, grad: cv2.typing.MatLike, angleOfs: cv2.typing.MatLike, paddingTL: cv2.typing.Size = ..., paddingBR: cv2.typing.Size = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def computeGradient(self, img: UMat, grad: UMat, angleOfs: UMat, paddingTL: cv2.typing.Size = ..., paddingBR: cv2.typing.Size = ...) -> tuple[UMat, UMat]: ...
+
+ @staticmethod
+ def getDefaultPeopleDetector() -> _typing.Sequence[float]: ...
+
+ @staticmethod
+ def getDaimlerPeopleDetector() -> _typing.Sequence[float]: ...
+
+
+class QRCodeEncoder:
+ # Classes
+ class Params:
+ version: int
+ correction_level: QRCodeEncoder_CorrectionLevel
+ mode: QRCodeEncoder_EncodeMode
+ structure_number: int
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: QRCodeEncoder.Params = ...) -> QRCodeEncoder: ...
+
+ @_typing.overload
+ def encode(self, encoded_info: str, qrcode: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def encode(self, encoded_info: str, qrcode: UMat | None = ...) -> UMat: ...
+
+ @_typing.overload
+ def encodeStructuredAppend(self, encoded_info: str, qrcodes: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+ @_typing.overload
+ def encodeStructuredAppend(self, encoded_info: str, qrcodes: _typing.Sequence[UMat] | None = ...) -> _typing.Sequence[UMat]: ...
+
+
+class QRCodeDetector(GraphicalCodeDetector):
+ # Functions
+ def __init__(self) -> None: ...
+
+ def setEpsX(self, epsX: float) -> QRCodeDetector: ...
+
+ def setEpsY(self, epsY: float) -> QRCodeDetector: ...
+
+ def setUseAlignmentMarkers(self, useAlignmentMarkers: bool) -> QRCodeDetector: ...
+
+ @_typing.overload
+ def decodeCurved(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike, straight_qrcode: cv2.typing.MatLike | None = ...) -> tuple[str, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def decodeCurved(self, img: UMat, points: UMat, straight_qrcode: UMat | None = ...) -> tuple[str, UMat]: ...
+
+ @_typing.overload
+ def detectAndDecodeCurved(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike | None = ..., straight_qrcode: cv2.typing.MatLike | None = ...) -> tuple[str, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectAndDecodeCurved(self, img: UMat, points: UMat | None = ..., straight_qrcode: UMat | None = ...) -> tuple[str, UMat, UMat]: ...
+
+
+class GraphicalCodeDetector:
+ # Functions
+ @_typing.overload
+ def detect(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detect(self, img: UMat, points: UMat | None = ...) -> tuple[bool, UMat]: ...
+
+ @_typing.overload
+ def decode(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike, straight_code: cv2.typing.MatLike | None = ...) -> tuple[str, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def decode(self, img: UMat, points: UMat, straight_code: UMat | None = ...) -> tuple[str, UMat]: ...
+
+ @_typing.overload
+ def detectAndDecode(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike | None = ..., straight_code: cv2.typing.MatLike | None = ...) -> tuple[str, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectAndDecode(self, img: UMat, points: UMat | None = ..., straight_code: UMat | None = ...) -> tuple[str, UMat, UMat]: ...
+
+ @_typing.overload
+ def detectMulti(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectMulti(self, img: UMat, points: UMat | None = ...) -> tuple[bool, UMat]: ...
+
+ @_typing.overload
+ def decodeMulti(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike, straight_code: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[bool, _typing.Sequence[str], _typing.Sequence[cv2.typing.MatLike]]: ...
+ @_typing.overload
+ def decodeMulti(self, img: UMat, points: UMat, straight_code: _typing.Sequence[UMat] | None = ...) -> tuple[bool, _typing.Sequence[str], _typing.Sequence[UMat]]: ...
+
+ @_typing.overload
+ def detectAndDecodeMulti(self, img: cv2.typing.MatLike, points: cv2.typing.MatLike | None = ..., straight_code: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[bool, _typing.Sequence[str], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike]]: ...
+ @_typing.overload
+ def detectAndDecodeMulti(self, img: UMat, points: UMat | None = ..., straight_code: _typing.Sequence[UMat] | None = ...) -> tuple[bool, _typing.Sequence[str], UMat, _typing.Sequence[UMat]]: ...
+
+
+class QRCodeDetectorAruco(GraphicalCodeDetector):
+ # Classes
+ class Params:
+ minModuleSizeInPyramid: float
+ maxRotation: float
+ maxModuleSizeMismatch: float
+ maxTimingPatternMismatch: float
+ maxPenalties: float
+ maxColorsMismatch: float
+ scaleTimingPatternScore: float
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, params: QRCodeDetectorAruco.Params) -> None: ...
+
+ def getDetectorParameters(self) -> QRCodeDetectorAruco.Params: ...
+
+ def setDetectorParameters(self, params: QRCodeDetectorAruco.Params) -> QRCodeDetectorAruco: ...
+
+ def getArucoParameters(self) -> cv2.aruco.DetectorParameters: ...
+
+ def setArucoParameters(self, params: cv2.aruco.DetectorParameters) -> None: ...
+
+
+class FaceDetectorYN:
+ # Functions
+ def setInputSize(self, input_size: cv2.typing.Size) -> None: ...
+
+ def getInputSize(self) -> cv2.typing.Size: ...
+
+ def setScoreThreshold(self, score_threshold: float) -> None: ...
+
+ def getScoreThreshold(self) -> float: ...
+
+ def setNMSThreshold(self, nms_threshold: float) -> None: ...
+
+ def getNMSThreshold(self) -> float: ...
+
+ def setTopK(self, top_k: int) -> None: ...
+
+ def getTopK(self) -> int: ...
+
+ @_typing.overload
+ def detect(self, image: cv2.typing.MatLike, faces: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detect(self, image: UMat, faces: UMat | None = ...) -> tuple[int, UMat]: ...
+
+ @classmethod
+ @_typing.overload
+ def create(cls, model: str, config: str, input_size: cv2.typing.Size, score_threshold: float = ..., nms_threshold: float = ..., top_k: int = ..., backend_id: int = ..., target_id: int = ...) -> FaceDetectorYN: ...
+ @classmethod
+ @_typing.overload
+ def create(cls, framework: str, bufferModel: numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]], bufferConfig: numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]], input_size: cv2.typing.Size, score_threshold: float = ..., nms_threshold: float = ..., top_k: int = ..., backend_id: int = ..., target_id: int = ...) -> FaceDetectorYN: ...
+
+
+class FaceRecognizerSF:
+ # Functions
+ @_typing.overload
+ def alignCrop(self, src_img: cv2.typing.MatLike, face_box: cv2.typing.MatLike, aligned_img: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def alignCrop(self, src_img: UMat, face_box: UMat, aligned_img: UMat | None = ...) -> UMat: ...
+
+ @_typing.overload
+ def feature(self, aligned_img: cv2.typing.MatLike, face_feature: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def feature(self, aligned_img: UMat, face_feature: UMat | None = ...) -> UMat: ...
+
+ @_typing.overload
+ def match(self, face_feature1: cv2.typing.MatLike, face_feature2: cv2.typing.MatLike, dis_type: int = ...) -> float: ...
+ @_typing.overload
+ def match(self, face_feature1: UMat, face_feature2: UMat, dis_type: int = ...) -> float: ...
+
+ @classmethod
+ def create(cls, model: str, config: str, backend_id: int = ..., target_id: int = ...) -> FaceRecognizerSF: ...
+
+
+class Stitcher:
+ # Functions
+ @classmethod
+ def create(cls, mode: Stitcher_Mode = ...) -> Stitcher: ...
+
+ def registrationResol(self) -> float: ...
+
+ def setRegistrationResol(self, resol_mpx: float) -> None: ...
+
+ def seamEstimationResol(self) -> float: ...
+
+ def setSeamEstimationResol(self, resol_mpx: float) -> None: ...
+
+ def compositingResol(self) -> float: ...
+
+ def setCompositingResol(self, resol_mpx: float) -> None: ...
+
+ def panoConfidenceThresh(self) -> float: ...
+
+ def setPanoConfidenceThresh(self, conf_thresh: float) -> None: ...
+
+ def waveCorrection(self) -> bool: ...
+
+ def setWaveCorrection(self, flag: bool) -> None: ...
+
+ def interpolationFlags(self) -> InterpolationFlags: ...
+
+ def setInterpolationFlags(self, interp_flags: InterpolationFlags) -> None: ...
+
+ @_typing.overload
+ def estimateTransform(self, images: _typing.Sequence[cv2.typing.MatLike], masks: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> Stitcher_Status: ...
+ @_typing.overload
+ def estimateTransform(self, images: _typing.Sequence[UMat], masks: _typing.Sequence[UMat] | None = ...) -> Stitcher_Status: ...
+
+ @_typing.overload
+ def composePanorama(self, pano: cv2.typing.MatLike | None = ...) -> tuple[Stitcher_Status, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def composePanorama(self, pano: UMat | None = ...) -> tuple[Stitcher_Status, UMat]: ...
+ @_typing.overload
+ def composePanorama(self, images: _typing.Sequence[cv2.typing.MatLike], pano: cv2.typing.MatLike | None = ...) -> tuple[Stitcher_Status, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def composePanorama(self, images: _typing.Sequence[UMat], pano: UMat | None = ...) -> tuple[Stitcher_Status, UMat]: ...
+
+ @_typing.overload
+ def stitch(self, images: _typing.Sequence[cv2.typing.MatLike], pano: cv2.typing.MatLike | None = ...) -> tuple[Stitcher_Status, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def stitch(self, images: _typing.Sequence[UMat], pano: UMat | None = ...) -> tuple[Stitcher_Status, UMat]: ...
+ @_typing.overload
+ def stitch(self, images: _typing.Sequence[cv2.typing.MatLike], masks: _typing.Sequence[cv2.typing.MatLike], pano: cv2.typing.MatLike | None = ...) -> tuple[Stitcher_Status, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def stitch(self, images: _typing.Sequence[UMat], masks: _typing.Sequence[UMat], pano: UMat | None = ...) -> tuple[Stitcher_Status, UMat]: ...
+
+ def workScale(self) -> float: ...
+
+
+class PyRotationWarper:
+ # Functions
+ @_typing.overload
+ def __init__(self, type: str, scale: float) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+ @_typing.overload
+ def warpPoint(self, pt: cv2.typing.Point2f, K: cv2.typing.MatLike, R: cv2.typing.MatLike) -> cv2.typing.Point2f: ...
+ @_typing.overload
+ def warpPoint(self, pt: cv2.typing.Point2f, K: UMat, R: UMat) -> cv2.typing.Point2f: ...
+
+ @_typing.overload
+ def warpPointBackward(self, pt: cv2.typing.Point2f, K: cv2.typing.MatLike, R: cv2.typing.MatLike) -> cv2.typing.Point2f: ...
+ @_typing.overload
+ def warpPointBackward(self, pt: cv2.typing.Point2f, K: UMat, R: UMat) -> cv2.typing.Point2f: ...
+ @_typing.overload
+ def warpPointBackward(self, pt: cv2.typing.Point2f, K: cv2.typing.MatLike, R: cv2.typing.MatLike) -> cv2.typing.Point2f: ...
+ @_typing.overload
+ def warpPointBackward(self, pt: cv2.typing.Point2f, K: UMat, R: UMat) -> cv2.typing.Point2f: ...
+
+ @_typing.overload
+ def buildMaps(self, src_size: cv2.typing.Size, K: cv2.typing.MatLike, R: cv2.typing.MatLike, xmap: cv2.typing.MatLike | None = ..., ymap: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.Rect, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def buildMaps(self, src_size: cv2.typing.Size, K: UMat, R: UMat, xmap: UMat | None = ..., ymap: UMat | None = ...) -> tuple[cv2.typing.Rect, UMat, UMat]: ...
+
+ @_typing.overload
+ def warp(self, src: cv2.typing.MatLike, K: cv2.typing.MatLike, R: cv2.typing.MatLike, interp_mode: int, border_mode: int, dst: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.Point, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def warp(self, src: UMat, K: UMat, R: UMat, interp_mode: int, border_mode: int, dst: UMat | None = ...) -> tuple[cv2.typing.Point, UMat]: ...
+
+ @_typing.overload
+ def warpBackward(self, src: cv2.typing.MatLike, K: cv2.typing.MatLike, R: cv2.typing.MatLike, interp_mode: int, border_mode: int, dst_size: cv2.typing.Size, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def warpBackward(self, src: UMat, K: UMat, R: UMat, interp_mode: int, border_mode: int, dst_size: cv2.typing.Size, dst: UMat | None = ...) -> UMat: ...
+
+ @_typing.overload
+ def warpRoi(self, src_size: cv2.typing.Size, K: cv2.typing.MatLike, R: cv2.typing.MatLike) -> cv2.typing.Rect: ...
+ @_typing.overload
+ def warpRoi(self, src_size: cv2.typing.Size, K: UMat, R: UMat) -> cv2.typing.Rect: ...
+
+ def getScale(self) -> float: ...
+
+ def setScale(self, arg1: float) -> None: ...
+
+
+class WarperCreator:
+ ...
+
+class BackgroundSubtractor(Algorithm):
+ # Functions
+ @_typing.overload
+ def apply(self, image: cv2.typing.MatLike, fgmask: cv2.typing.MatLike | None = ..., learningRate: float = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, image: UMat, fgmask: UMat | None = ..., learningRate: float = ...) -> UMat: ...
+
+ @_typing.overload
+ def getBackgroundImage(self, backgroundImage: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def getBackgroundImage(self, backgroundImage: UMat | None = ...) -> UMat: ...
+
+
+class BackgroundSubtractorMOG2(BackgroundSubtractor):
+ # Functions
+ def getHistory(self) -> int: ...
+
+ def setHistory(self, history: int) -> None: ...
+
+ def getNMixtures(self) -> int: ...
+
+ def setNMixtures(self, nmixtures: int) -> None: ...
+
+ def getBackgroundRatio(self) -> float: ...
+
+ def setBackgroundRatio(self, ratio: float) -> None: ...
+
+ def getVarThreshold(self) -> float: ...
+
+ def setVarThreshold(self, varThreshold: float) -> None: ...
+
+ def getVarThresholdGen(self) -> float: ...
+
+ def setVarThresholdGen(self, varThresholdGen: float) -> None: ...
+
+ def getVarInit(self) -> float: ...
+
+ def setVarInit(self, varInit: float) -> None: ...
+
+ def getVarMin(self) -> float: ...
+
+ def setVarMin(self, varMin: float) -> None: ...
+
+ def getVarMax(self) -> float: ...
+
+ def setVarMax(self, varMax: float) -> None: ...
+
+ def getComplexityReductionThreshold(self) -> float: ...
+
+ def setComplexityReductionThreshold(self, ct: float) -> None: ...
+
+ def getDetectShadows(self) -> bool: ...
+
+ def setDetectShadows(self, detectShadows: bool) -> None: ...
+
+ def getShadowValue(self) -> int: ...
+
+ def setShadowValue(self, value: int) -> None: ...
+
+ def getShadowThreshold(self) -> float: ...
+
+ def setShadowThreshold(self, threshold: float) -> None: ...
+
+ @_typing.overload
+ def apply(self, image: cv2.typing.MatLike, fgmask: cv2.typing.MatLike | None = ..., learningRate: float = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, image: UMat, fgmask: UMat | None = ..., learningRate: float = ...) -> UMat: ...
+
+
+class BackgroundSubtractorKNN(BackgroundSubtractor):
+ # Functions
+ def getHistory(self) -> int: ...
+
+ def setHistory(self, history: int) -> None: ...
+
+ def getNSamples(self) -> int: ...
+
+ def setNSamples(self, _nN: int) -> None: ...
+
+ def getDist2Threshold(self) -> float: ...
+
+ def setDist2Threshold(self, _dist2Threshold: float) -> None: ...
+
+ def getkNNSamples(self) -> int: ...
+
+ def setkNNSamples(self, _nkNN: int) -> None: ...
+
+ def getDetectShadows(self) -> bool: ...
+
+ def setDetectShadows(self, detectShadows: bool) -> None: ...
+
+ def getShadowValue(self) -> int: ...
+
+ def setShadowValue(self, value: int) -> None: ...
+
+ def getShadowThreshold(self) -> float: ...
+
+ def setShadowThreshold(self, threshold: float) -> None: ...
+
+
+class KalmanFilter:
+ statePre: cv2.typing.MatLike
+ statePost: cv2.typing.MatLike
+ transitionMatrix: cv2.typing.MatLike
+ controlMatrix: cv2.typing.MatLike
+ measurementMatrix: cv2.typing.MatLike
+ processNoiseCov: cv2.typing.MatLike
+ measurementNoiseCov: cv2.typing.MatLike
+ errorCovPre: cv2.typing.MatLike
+ gain: cv2.typing.MatLike
+ errorCovPost: cv2.typing.MatLike
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, dynamParams: int, measureParams: int, controlParams: int = ..., type: int = ...) -> None: ...
+
+ def predict(self, control: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+
+ def correct(self, measurement: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+
+
+class DenseOpticalFlow(Algorithm):
+ # Functions
+ @_typing.overload
+ def calc(self, I0: cv2.typing.MatLike, I1: cv2.typing.MatLike, flow: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def calc(self, I0: UMat, I1: UMat, flow: UMat) -> UMat: ...
+
+ def collectGarbage(self) -> None: ...
+
+
+class SparseOpticalFlow(Algorithm):
+ # Functions
+ @_typing.overload
+ def calc(self, prevImg: cv2.typing.MatLike, nextImg: cv2.typing.MatLike, prevPts: cv2.typing.MatLike, nextPts: cv2.typing.MatLike, status: cv2.typing.MatLike | None = ..., err: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def calc(self, prevImg: UMat, nextImg: UMat, prevPts: UMat, nextPts: UMat, status: UMat | None = ..., err: UMat | None = ...) -> tuple[UMat, UMat, UMat]: ...
+
+
+class FarnebackOpticalFlow(DenseOpticalFlow):
+ # Functions
+ def getNumLevels(self) -> int: ...
+
+ def setNumLevels(self, numLevels: int) -> None: ...
+
+ def getPyrScale(self) -> float: ...
+
+ def setPyrScale(self, pyrScale: float) -> None: ...
+
+ def getFastPyramids(self) -> bool: ...
+
+ def setFastPyramids(self, fastPyramids: bool) -> None: ...
+
+ def getWinSize(self) -> int: ...
+
+ def setWinSize(self, winSize: int) -> None: ...
+
+ def getNumIters(self) -> int: ...
+
+ def setNumIters(self, numIters: int) -> None: ...
+
+ def getPolyN(self) -> int: ...
+
+ def setPolyN(self, polyN: int) -> None: ...
+
+ def getPolySigma(self) -> float: ...
+
+ def setPolySigma(self, polySigma: float) -> None: ...
+
+ def getFlags(self) -> int: ...
+
+ def setFlags(self, flags: int) -> None: ...
+
+ @classmethod
+ def create(cls, numLevels: int = ..., pyrScale: float = ..., fastPyramids: bool = ..., winSize: int = ..., numIters: int = ..., polyN: int = ..., polySigma: float = ..., flags: int = ...) -> FarnebackOpticalFlow: ...
+
+
+class VariationalRefinement(DenseOpticalFlow):
+ # Functions
+ @_typing.overload
+ def calcUV(self, I0: cv2.typing.MatLike, I1: cv2.typing.MatLike, flow_u: cv2.typing.MatLike, flow_v: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def calcUV(self, I0: UMat, I1: UMat, flow_u: UMat, flow_v: UMat) -> tuple[UMat, UMat]: ...
+
+ def getFixedPointIterations(self) -> int: ...
+
+ def setFixedPointIterations(self, val: int) -> None: ...
+
+ def getSorIterations(self) -> int: ...
+
+ def setSorIterations(self, val: int) -> None: ...
+
+ def getOmega(self) -> float: ...
+
+ def setOmega(self, val: float) -> None: ...
+
+ def getAlpha(self) -> float: ...
+
+ def setAlpha(self, val: float) -> None: ...
+
+ def getDelta(self) -> float: ...
+
+ def setDelta(self, val: float) -> None: ...
+
+ def getGamma(self) -> float: ...
+
+ def setGamma(self, val: float) -> None: ...
+
+ def getEpsilon(self) -> float: ...
+
+ def setEpsilon(self, val: float) -> None: ...
+
+ @classmethod
+ def create(cls) -> VariationalRefinement: ...
+
+
+class DISOpticalFlow(DenseOpticalFlow):
+ # Functions
+ def getFinestScale(self) -> int: ...
+
+ def setFinestScale(self, val: int) -> None: ...
+
+ def getPatchSize(self) -> int: ...
+
+ def setPatchSize(self, val: int) -> None: ...
+
+ def getPatchStride(self) -> int: ...
+
+ def setPatchStride(self, val: int) -> None: ...
+
+ def getGradientDescentIterations(self) -> int: ...
+
+ def setGradientDescentIterations(self, val: int) -> None: ...
+
+ def getVariationalRefinementIterations(self) -> int: ...
+
+ def setVariationalRefinementIterations(self, val: int) -> None: ...
+
+ def getVariationalRefinementAlpha(self) -> float: ...
+
+ def setVariationalRefinementAlpha(self, val: float) -> None: ...
+
+ def getVariationalRefinementDelta(self) -> float: ...
+
+ def setVariationalRefinementDelta(self, val: float) -> None: ...
+
+ def getVariationalRefinementGamma(self) -> float: ...
+
+ def setVariationalRefinementGamma(self, val: float) -> None: ...
+
+ def getVariationalRefinementEpsilon(self) -> float: ...
+
+ def setVariationalRefinementEpsilon(self, val: float) -> None: ...
+
+ def getUseMeanNormalization(self) -> bool: ...
+
+ def setUseMeanNormalization(self, val: bool) -> None: ...
+
+ def getUseSpatialPropagation(self) -> bool: ...
+
+ def setUseSpatialPropagation(self, val: bool) -> None: ...
+
+ @classmethod
+ def create(cls, preset: int = ...) -> DISOpticalFlow: ...
+
+
+class SparsePyrLKOpticalFlow(SparseOpticalFlow):
+ # Functions
+ def getWinSize(self) -> cv2.typing.Size: ...
+
+ def setWinSize(self, winSize: cv2.typing.Size) -> None: ...
+
+ def getMaxLevel(self) -> int: ...
+
+ def setMaxLevel(self, maxLevel: int) -> None: ...
+
+ def getTermCriteria(self) -> cv2.typing.TermCriteria: ...
+
+ def setTermCriteria(self, crit: cv2.typing.TermCriteria) -> None: ...
+
+ def getFlags(self) -> int: ...
+
+ def setFlags(self, flags: int) -> None: ...
+
+ def getMinEigThreshold(self) -> float: ...
+
+ def setMinEigThreshold(self, minEigThreshold: float) -> None: ...
+
+ @classmethod
+ def create(cls, winSize: cv2.typing.Size = ..., maxLevel: int = ..., crit: cv2.typing.TermCriteria = ..., flags: int = ..., minEigThreshold: float = ...) -> SparsePyrLKOpticalFlow: ...
+
+
+class Tracker:
+ # Functions
+ @_typing.overload
+ def init(self, image: cv2.typing.MatLike, boundingBox: cv2.typing.Rect) -> None: ...
+ @_typing.overload
+ def init(self, image: UMat, boundingBox: cv2.typing.Rect) -> None: ...
+
+ @_typing.overload
+ def update(self, image: cv2.typing.MatLike) -> tuple[bool, cv2.typing.Rect]: ...
+ @_typing.overload
+ def update(self, image: UMat) -> tuple[bool, cv2.typing.Rect]: ...
+
+
+class TrackerMIL(Tracker):
+ # Classes
+ class Params:
+ samplerInitInRadius: float
+ samplerInitMaxNegNum: int
+ samplerSearchWinSize: float
+ samplerTrackInRadius: float
+ samplerTrackMaxPosNum: int
+ samplerTrackMaxNegNum: int
+ featureSetNumFeatures: int
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: TrackerMIL.Params = ...) -> TrackerMIL: ...
+
+
+class TrackerGOTURN(Tracker):
+ # Classes
+ class Params:
+ modelTxt: str
+ modelBin: str
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: TrackerGOTURN.Params = ...) -> TrackerGOTURN: ...
+
+
+class TrackerDaSiamRPN(Tracker):
+ # Classes
+ class Params:
+ model: str
+ kernel_cls1: str
+ kernel_r1: str
+ backend: int
+ target: int
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: TrackerDaSiamRPN.Params = ...) -> TrackerDaSiamRPN: ...
+
+ def getTrackingScore(self) -> float: ...
+
+
+class TrackerNano(Tracker):
+ # Classes
+ class Params:
+ backbone: str
+ neckhead: str
+ backend: int
+ target: int
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: TrackerNano.Params = ...) -> TrackerNano: ...
+
+ def getTrackingScore(self) -> float: ...
+
+
+class TrackerVit(Tracker):
+ # Classes
+ class Params:
+ net: str
+ backend: int
+ target: int
+ meanvalue: cv2.typing.Scalar
+ stdvalue: cv2.typing.Scalar
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+
+ # Functions
+ @classmethod
+ def create(cls, parameters: TrackerVit.Params = ...) -> TrackerVit: ...
+
+ def getTrackingScore(self) -> float: ...
+
+
+class GArrayDesc:
+ ...
+
+class GComputation:
+ # Functions
+ @_typing.overload
+ def __init__(self, ins: cv2.typing.GProtoInputArgs, outs: cv2.typing.GProtoOutputArgs) -> None: ...
+ @_typing.overload
+ def __init__(self, in_: GMat, out: GMat) -> None: ...
+ @_typing.overload
+ def __init__(self, in_: GMat, out: GScalar) -> None: ...
+ @_typing.overload
+ def __init__(self, in1: GMat, in2: GMat, out: GMat) -> None: ...
+
+ def apply(self, callback: cv2.typing.ExtractArgsCallback, args: _typing.Sequence[GCompileArg] = ...) -> _typing.Sequence[cv2.typing.GRunArg]: ...
+
+ @_typing.overload
+ def compileStreaming(self, in_metas: _typing.Sequence[cv2.typing.GMetaArg], args: _typing.Sequence[GCompileArg] = ...) -> GStreamingCompiled: ...
+ @_typing.overload
+ def compileStreaming(self, args: _typing.Sequence[GCompileArg] = ...) -> GStreamingCompiled: ...
+ @_typing.overload
+ def compileStreaming(self, callback: cv2.typing.ExtractMetaCallback, args: _typing.Sequence[GCompileArg] = ...) -> GStreamingCompiled: ...
+
+
+class GFrame:
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class GKernelPackage:
+ # Functions
+ def size(self) -> int: ...
+
+
+class GMat:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, m: cv2.typing.MatLike) -> None: ...
+
+
+class GMatDesc:
+ @property
+ def depth(self) -> int: ...
+ @property
+ def chan(self) -> int: ...
+ @property
+ def size(self) -> cv2.typing.Size: ...
+ @property
+ def planar(self) -> bool: ...
+ @property
+ def dims(self) -> _typing.Sequence[int]: ...
+
+ # Functions
+ @_typing.overload
+ def __init__(self, d: int, c: int, s: cv2.typing.Size, p: bool = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, d: int, dd: _typing.Sequence[int]) -> None: ...
+ @_typing.overload
+ def __init__(self, d: int, dd: _typing.Sequence[int]) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+ @_typing.overload
+ def withSizeDelta(self, delta: cv2.typing.Size) -> GMatDesc: ...
+ @_typing.overload
+ def withSizeDelta(self, dx: int, dy: int) -> GMatDesc: ...
+
+ def withSize(self, sz: cv2.typing.Size) -> GMatDesc: ...
+
+ def withDepth(self, ddepth: int) -> GMatDesc: ...
+
+ def withType(self, ddepth: int, dchan: int) -> GMatDesc: ...
+
+ @_typing.overload
+ def asPlanar(self) -> GMatDesc: ...
+ @_typing.overload
+ def asPlanar(self, planes: int) -> GMatDesc: ...
+
+ def asInterleaved(self) -> GMatDesc: ...
+
+
+class GOpaqueDesc:
+ ...
+
+class GScalar:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, s: cv2.typing.Scalar) -> None: ...
+
+
+class GScalarDesc:
+ ...
+
+class GStreamingCompiled:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def setSource(self, callback: cv2.typing.ExtractArgsCallback) -> None: ...
+
+ def start(self) -> None: ...
+
+ def pull(self) -> tuple[bool, _typing.Sequence[cv2.typing.GRunArg] | _typing.Sequence[cv2.typing.GOptRunArg]]: ...
+
+ def stop(self) -> None: ...
+
+ def running(self) -> bool: ...
+
+
+class GOpaqueT:
+ # Functions
+ def __init__(self, type: cv2.gapi.ArgType) -> None: ...
+
+ def type(self) -> cv2.gapi.ArgType: ...
+
+
+class GArrayT:
+ # Functions
+ def __init__(self, type: cv2.gapi.ArgType) -> None: ...
+
+ def type(self) -> cv2.gapi.ArgType: ...
+
+
+class GCompileArg:
+ # Functions
+ @_typing.overload
+ def __init__(self, arg: GKernelPackage) -> None: ...
+ @_typing.overload
+ def __init__(self, arg: cv2.gapi.GNetPackage) -> None: ...
+ @_typing.overload
+ def __init__(self, arg: cv2.gapi.streaming.queue_capacity) -> None: ...
+ @_typing.overload
+ def __init__(self, arg: cv2.gapi.ot.ObjectTrackerParams) -> None: ...
+
+
+class GInferInputs:
+ # Functions
+ def __init__(self) -> None: ...
+
+ @_typing.overload
+ def setInput(self, name: str, value: GMat) -> GInferInputs: ...
+ @_typing.overload
+ def setInput(self, name: str, value: GFrame) -> GInferInputs: ...
+
+
+class GInferListInputs:
+ # Functions
+ def __init__(self) -> None: ...
+
+ @_typing.overload
+ def setInput(self, name: str, value: GArrayT) -> GInferListInputs: ...
+ @_typing.overload
+ def setInput(self, name: str, value: GArrayT) -> GInferListInputs: ...
+
+
+class GInferOutputs:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def at(self, name: str) -> GMat: ...
+
+
+class GInferListOutputs:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def at(self, name: str) -> GArrayT: ...
+
+
+class error(Exception):
+ code: int
+ err: str
+ file: str
+ func: str
+ line: int
+ msg: str
+
+
+# Functions
+@_typing.overload
+def CamShift(probImage: cv2.typing.MatLike, window: cv2.typing.Rect, criteria: cv2.typing.TermCriteria) -> tuple[cv2.typing.RotatedRect, cv2.typing.Rect]: ...
+@_typing.overload
+def CamShift(probImage: UMat, window: cv2.typing.Rect, criteria: cv2.typing.TermCriteria) -> tuple[cv2.typing.RotatedRect, cv2.typing.Rect]: ...
+
+@_typing.overload
+def Canny(image: cv2.typing.MatLike, threshold1: float, threshold2: float, edges: cv2.typing.MatLike | None = ..., apertureSize: int = ..., L2gradient: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def Canny(image: UMat, threshold1: float, threshold2: float, edges: UMat | None = ..., apertureSize: int = ..., L2gradient: bool = ...) -> UMat: ...
+@_typing.overload
+def Canny(dx: cv2.typing.MatLike, dy: cv2.typing.MatLike, threshold1: float, threshold2: float, edges: cv2.typing.MatLike | None = ..., L2gradient: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def Canny(dx: UMat, dy: UMat, threshold1: float, threshold2: float, edges: UMat | None = ..., L2gradient: bool = ...) -> UMat: ...
+
+@_typing.overload
+def EMD(signature1: cv2.typing.MatLike, signature2: cv2.typing.MatLike, distType: int, cost: cv2.typing.MatLike | None = ..., lowerBound: float | None = ..., flow: cv2.typing.MatLike | None = ...) -> tuple[float, float, cv2.typing.MatLike]: ...
+@_typing.overload
+def EMD(signature1: UMat, signature2: UMat, distType: int, cost: UMat | None = ..., lowerBound: float | None = ..., flow: UMat | None = ...) -> tuple[float, float, UMat]: ...
+
+@_typing.overload
+def GaussianBlur(src: cv2.typing.MatLike, ksize: cv2.typing.Size, sigmaX: float, dst: cv2.typing.MatLike | None = ..., sigmaY: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def GaussianBlur(src: UMat, ksize: cv2.typing.Size, sigmaX: float, dst: UMat | None = ..., sigmaY: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def HoughCircles(image: cv2.typing.MatLike, method: int, dp: float, minDist: float, circles: cv2.typing.MatLike | None = ..., param1: float = ..., param2: float = ..., minRadius: int = ..., maxRadius: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HoughCircles(image: UMat, method: int, dp: float, minDist: float, circles: UMat | None = ..., param1: float = ..., param2: float = ..., minRadius: int = ..., maxRadius: int = ...) -> UMat: ...
+
+@_typing.overload
+def HoughLines(image: cv2.typing.MatLike, rho: float, theta: float, threshold: int, lines: cv2.typing.MatLike | None = ..., srn: float = ..., stn: float = ..., min_theta: float = ..., max_theta: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HoughLines(image: UMat, rho: float, theta: float, threshold: int, lines: UMat | None = ..., srn: float = ..., stn: float = ..., min_theta: float = ..., max_theta: float = ...) -> UMat: ...
+
+@_typing.overload
+def HoughLinesP(image: cv2.typing.MatLike, rho: float, theta: float, threshold: int, lines: cv2.typing.MatLike | None = ..., minLineLength: float = ..., maxLineGap: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HoughLinesP(image: UMat, rho: float, theta: float, threshold: int, lines: UMat | None = ..., minLineLength: float = ..., maxLineGap: float = ...) -> UMat: ...
+
+@_typing.overload
+def HoughLinesPointSet(point: cv2.typing.MatLike, lines_max: int, threshold: int, min_rho: float, max_rho: float, rho_step: float, min_theta: float, max_theta: float, theta_step: float, lines: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HoughLinesPointSet(point: UMat, lines_max: int, threshold: int, min_rho: float, max_rho: float, rho_step: float, min_theta: float, max_theta: float, theta_step: float, lines: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def HoughLinesWithAccumulator(image: cv2.typing.MatLike, rho: float, theta: float, threshold: int, lines: cv2.typing.MatLike | None = ..., srn: float = ..., stn: float = ..., min_theta: float = ..., max_theta: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HoughLinesWithAccumulator(image: UMat, rho: float, theta: float, threshold: int, lines: UMat | None = ..., srn: float = ..., stn: float = ..., min_theta: float = ..., max_theta: float = ...) -> UMat: ...
+
+@_typing.overload
+def HuMoments(m: cv2.typing.Moments, hu: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def HuMoments(m: cv2.typing.Moments, hu: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def LUT(src: cv2.typing.MatLike, lut: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def LUT(src: UMat, lut: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def Laplacian(src: cv2.typing.MatLike, ddepth: int, dst: cv2.typing.MatLike | None = ..., ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def Laplacian(src: UMat, ddepth: int, dst: UMat | None = ..., ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def Mahalanobis(v1: cv2.typing.MatLike, v2: cv2.typing.MatLike, icovar: cv2.typing.MatLike) -> float: ...
+@_typing.overload
+def Mahalanobis(v1: UMat, v2: UMat, icovar: UMat) -> float: ...
+
+@_typing.overload
+def PCABackProject(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, eigenvectors: cv2.typing.MatLike, result: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def PCABackProject(data: UMat, mean: UMat, eigenvectors: UMat, result: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def PCACompute(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, eigenvectors: cv2.typing.MatLike | None = ..., maxComponents: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def PCACompute(data: UMat, mean: UMat, eigenvectors: UMat | None = ..., maxComponents: int = ...) -> tuple[UMat, UMat]: ...
+@_typing.overload
+def PCACompute(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, retainedVariance: float, eigenvectors: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def PCACompute(data: UMat, mean: UMat, retainedVariance: float, eigenvectors: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def PCACompute2(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, eigenvectors: cv2.typing.MatLike | None = ..., eigenvalues: cv2.typing.MatLike | None = ..., maxComponents: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def PCACompute2(data: UMat, mean: UMat, eigenvectors: UMat | None = ..., eigenvalues: UMat | None = ..., maxComponents: int = ...) -> tuple[UMat, UMat, UMat]: ...
+@_typing.overload
+def PCACompute2(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, retainedVariance: float, eigenvectors: cv2.typing.MatLike | None = ..., eigenvalues: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def PCACompute2(data: UMat, mean: UMat, retainedVariance: float, eigenvectors: UMat | None = ..., eigenvalues: UMat | None = ...) -> tuple[UMat, UMat, UMat]: ...
+
+@_typing.overload
+def PCAProject(data: cv2.typing.MatLike, mean: cv2.typing.MatLike, eigenvectors: cv2.typing.MatLike, result: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def PCAProject(data: UMat, mean: UMat, eigenvectors: UMat, result: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def PSNR(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, R: float = ...) -> float: ...
+@_typing.overload
+def PSNR(src1: UMat, src2: UMat, R: float = ...) -> float: ...
+
+@_typing.overload
+def RQDecomp3x3(src: cv2.typing.MatLike, mtxR: cv2.typing.MatLike | None = ..., mtxQ: cv2.typing.MatLike | None = ..., Qx: cv2.typing.MatLike | None = ..., Qy: cv2.typing.MatLike | None = ..., Qz: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.Vec3d, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def RQDecomp3x3(src: UMat, mtxR: UMat | None = ..., mtxQ: UMat | None = ..., Qx: UMat | None = ..., Qy: UMat | None = ..., Qz: UMat | None = ...) -> tuple[cv2.typing.Vec3d, UMat, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def Rodrigues(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., jacobian: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def Rodrigues(src: UMat, dst: UMat | None = ..., jacobian: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def SVBackSubst(w: cv2.typing.MatLike, u: cv2.typing.MatLike, vt: cv2.typing.MatLike, rhs: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def SVBackSubst(w: UMat, u: UMat, vt: UMat, rhs: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def SVDecomp(src: cv2.typing.MatLike, w: cv2.typing.MatLike | None = ..., u: cv2.typing.MatLike | None = ..., vt: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def SVDecomp(src: UMat, w: UMat | None = ..., u: UMat | None = ..., vt: UMat | None = ..., flags: int = ...) -> tuple[UMat, UMat, UMat]: ...
+
+@_typing.overload
+def Scharr(src: cv2.typing.MatLike, ddepth: int, dx: int, dy: int, dst: cv2.typing.MatLike | None = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def Scharr(src: UMat, ddepth: int, dx: int, dy: int, dst: UMat | None = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def Sobel(src: cv2.typing.MatLike, ddepth: int, dx: int, dy: int, dst: cv2.typing.MatLike | None = ..., ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def Sobel(src: UMat, ddepth: int, dx: int, dy: int, dst: UMat | None = ..., ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def absdiff(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def absdiff(src1: UMat, src2: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def accumulate(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def accumulate(src: UMat, dst: UMat, mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def accumulateProduct(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def accumulateProduct(src1: UMat, src2: UMat, dst: UMat, mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def accumulateSquare(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def accumulateSquare(src: UMat, dst: UMat, mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def accumulateWeighted(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, alpha: float, mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def accumulateWeighted(src: UMat, dst: UMat, alpha: float, mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def adaptiveThreshold(src: cv2.typing.MatLike, maxValue: float, adaptiveMethod: int, thresholdType: int, blockSize: int, C: float, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def adaptiveThreshold(src: UMat, maxValue: float, adaptiveMethod: int, thresholdType: int, blockSize: int, C: float, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def add(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def add(src1: UMat, src2: UMat, dst: UMat | None = ..., mask: UMat | None = ..., dtype: int = ...) -> UMat: ...
+
+def addText(img: cv2.typing.MatLike, text: str, org: cv2.typing.Point, nameFont: str, pointSize: int = ..., color: cv2.typing.Scalar = ..., weight: int = ..., style: int = ..., spacing: int = ...) -> None: ...
+
+@_typing.overload
+def addWeighted(src1: cv2.typing.MatLike, alpha: float, src2: cv2.typing.MatLike, beta: float, gamma: float, dst: cv2.typing.MatLike | None = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def addWeighted(src1: UMat, alpha: float, src2: UMat, beta: float, gamma: float, dst: UMat | None = ..., dtype: int = ...) -> UMat: ...
+
+@_typing.overload
+def applyColorMap(src: cv2.typing.MatLike, colormap: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def applyColorMap(src: UMat, colormap: int, dst: UMat | None = ...) -> UMat: ...
+@_typing.overload
+def applyColorMap(src: cv2.typing.MatLike, userColor: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def applyColorMap(src: UMat, userColor: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def approxPolyDP(curve: cv2.typing.MatLike, epsilon: float, closed: bool, approxCurve: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def approxPolyDP(curve: UMat, epsilon: float, closed: bool, approxCurve: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def arcLength(curve: cv2.typing.MatLike, closed: bool) -> float: ...
+@_typing.overload
+def arcLength(curve: UMat, closed: bool) -> float: ...
+
+@_typing.overload
+def arrowedLine(img: cv2.typing.MatLike, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., line_type: int = ..., shift: int = ..., tipLength: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def arrowedLine(img: UMat, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., line_type: int = ..., shift: int = ..., tipLength: float = ...) -> UMat: ...
+
+@_typing.overload
+def batchDistance(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dtype: int, dist: cv2.typing.MatLike | None = ..., nidx: cv2.typing.MatLike | None = ..., normType: int = ..., K: int = ..., mask: cv2.typing.MatLike | None = ..., update: int = ..., crosscheck: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def batchDistance(src1: UMat, src2: UMat, dtype: int, dist: UMat | None = ..., nidx: UMat | None = ..., normType: int = ..., K: int = ..., mask: UMat | None = ..., update: int = ..., crosscheck: bool = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def bilateralFilter(src: cv2.typing.MatLike, d: int, sigmaColor: float, sigmaSpace: float, dst: cv2.typing.MatLike | None = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def bilateralFilter(src: UMat, d: int, sigmaColor: float, sigmaSpace: float, dst: UMat | None = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def bitwise_and(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def bitwise_and(src1: UMat, src2: UMat, dst: UMat | None = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def bitwise_not(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def bitwise_not(src: UMat, dst: UMat | None = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def bitwise_or(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def bitwise_or(src1: UMat, src2: UMat, dst: UMat | None = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def bitwise_xor(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def bitwise_xor(src1: UMat, src2: UMat, dst: UMat | None = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def blendLinear(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, weights1: cv2.typing.MatLike, weights2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def blendLinear(src1: UMat, src2: UMat, weights1: UMat, weights2: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def blur(src: cv2.typing.MatLike, ksize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def blur(src: UMat, ksize: cv2.typing.Size, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., borderType: int = ...) -> UMat: ...
+
+def borderInterpolate(p: int, len: int, borderType: int) -> int: ...
+
+@_typing.overload
+def boundingRect(array: cv2.typing.MatLike) -> cv2.typing.Rect: ...
+@_typing.overload
+def boundingRect(array: UMat) -> cv2.typing.Rect: ...
+
+@_typing.overload
+def boxFilter(src: cv2.typing.MatLike, ddepth: int, ksize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def boxFilter(src: UMat, ddepth: int, ksize: cv2.typing.Size, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def boxPoints(box: cv2.typing.RotatedRect, points: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def boxPoints(box: cv2.typing.RotatedRect, points: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def broadcast(src: cv2.typing.MatLike, shape: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def broadcast(src: UMat, shape: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def buildOpticalFlowPyramid(img: cv2.typing.MatLike, winSize: cv2.typing.Size, maxLevel: int, pyramid: _typing.Sequence[cv2.typing.MatLike] | None = ..., withDerivatives: bool = ..., pyrBorder: int = ..., derivBorder: int = ..., tryReuseInputImage: bool = ...) -> tuple[int, _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def buildOpticalFlowPyramid(img: UMat, winSize: cv2.typing.Size, maxLevel: int, pyramid: _typing.Sequence[UMat] | None = ..., withDerivatives: bool = ..., pyrBorder: int = ..., derivBorder: int = ..., tryReuseInputImage: bool = ...) -> tuple[int, _typing.Sequence[UMat]]: ...
+
+@_typing.overload
+def calcBackProject(images: _typing.Sequence[cv2.typing.MatLike], channels: _typing.Sequence[int], hist: cv2.typing.MatLike, ranges: _typing.Sequence[float], scale: float, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def calcBackProject(images: _typing.Sequence[UMat], channels: _typing.Sequence[int], hist: UMat, ranges: _typing.Sequence[float], scale: float, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def calcCovarMatrix(samples: cv2.typing.MatLike, mean: cv2.typing.MatLike, flags: int, covar: cv2.typing.MatLike | None = ..., ctype: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calcCovarMatrix(samples: UMat, mean: UMat, flags: int, covar: UMat | None = ..., ctype: int = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def calcHist(images: _typing.Sequence[cv2.typing.MatLike], channels: _typing.Sequence[int], mask: cv2.typing.MatLike | None, histSize: _typing.Sequence[int], ranges: _typing.Sequence[float], hist: cv2.typing.MatLike | None = ..., accumulate: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def calcHist(images: _typing.Sequence[UMat], channels: _typing.Sequence[int], mask: UMat | None, histSize: _typing.Sequence[int], ranges: _typing.Sequence[float], hist: UMat | None = ..., accumulate: bool = ...) -> UMat: ...
+
+@_typing.overload
+def calcOpticalFlowFarneback(prev: cv2.typing.MatLike, next: cv2.typing.MatLike, flow: cv2.typing.MatLike, pyr_scale: float, levels: int, winsize: int, iterations: int, poly_n: int, poly_sigma: float, flags: int) -> cv2.typing.MatLike: ...
+@_typing.overload
+def calcOpticalFlowFarneback(prev: UMat, next: UMat, flow: UMat, pyr_scale: float, levels: int, winsize: int, iterations: int, poly_n: int, poly_sigma: float, flags: int) -> UMat: ...
+
+@_typing.overload
+def calcOpticalFlowPyrLK(prevImg: cv2.typing.MatLike, nextImg: cv2.typing.MatLike, prevPts: cv2.typing.MatLike, nextPts: cv2.typing.MatLike, status: cv2.typing.MatLike | None = ..., err: cv2.typing.MatLike | None = ..., winSize: cv2.typing.Size = ..., maxLevel: int = ..., criteria: cv2.typing.TermCriteria = ..., flags: int = ..., minEigThreshold: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calcOpticalFlowPyrLK(prevImg: UMat, nextImg: UMat, prevPts: UMat, nextPts: UMat, status: UMat | None = ..., err: UMat | None = ..., winSize: cv2.typing.Size = ..., maxLevel: int = ..., criteria: cv2.typing.TermCriteria = ..., flags: int = ..., minEigThreshold: float = ...) -> tuple[UMat, UMat, UMat]: ...
+
+@_typing.overload
+def calibrateCamera(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def calibrateCamera(objectPoints: _typing.Sequence[UMat], imagePoints: _typing.Sequence[UMat], imageSize: cv2.typing.Size, cameraMatrix: UMat, distCoeffs: UMat, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, _typing.Sequence[UMat], _typing.Sequence[UMat]]: ...
+
+@_typing.overload
+def calibrateCameraExtended(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., stdDeviationsIntrinsics: cv2.typing.MatLike | None = ..., stdDeviationsExtrinsics: cv2.typing.MatLike | None = ..., perViewErrors: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calibrateCameraExtended(objectPoints: _typing.Sequence[UMat], imagePoints: _typing.Sequence[UMat], imageSize: cv2.typing.Size, cameraMatrix: UMat, distCoeffs: UMat, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., stdDeviationsIntrinsics: UMat | None = ..., stdDeviationsExtrinsics: UMat | None = ..., perViewErrors: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, _typing.Sequence[UMat], _typing.Sequence[UMat], UMat, UMat, UMat]: ...
+
+@_typing.overload
+def calibrateCameraRO(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, iFixedPoint: int, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., newObjPoints: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+@_typing.overload
+def calibrateCameraRO(objectPoints: _typing.Sequence[UMat], imagePoints: _typing.Sequence[UMat], imageSize: cv2.typing.Size, iFixedPoint: int, cameraMatrix: UMat, distCoeffs: UMat, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., newObjPoints: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, _typing.Sequence[UMat], _typing.Sequence[UMat], UMat]: ...
+
+@_typing.overload
+def calibrateCameraROExtended(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, iFixedPoint: int, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., newObjPoints: cv2.typing.MatLike | None = ..., stdDeviationsIntrinsics: cv2.typing.MatLike | None = ..., stdDeviationsExtrinsics: cv2.typing.MatLike | None = ..., stdDeviationsObjPoints: cv2.typing.MatLike | None = ..., perViewErrors: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calibrateCameraROExtended(objectPoints: _typing.Sequence[UMat], imagePoints: _typing.Sequence[UMat], imageSize: cv2.typing.Size, iFixedPoint: int, cameraMatrix: UMat, distCoeffs: UMat, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., newObjPoints: UMat | None = ..., stdDeviationsIntrinsics: UMat | None = ..., stdDeviationsExtrinsics: UMat | None = ..., stdDeviationsObjPoints: UMat | None = ..., perViewErrors: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, _typing.Sequence[UMat], _typing.Sequence[UMat], UMat, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def calibrateHandEye(R_gripper2base: _typing.Sequence[cv2.typing.MatLike], t_gripper2base: _typing.Sequence[cv2.typing.MatLike], R_target2cam: _typing.Sequence[cv2.typing.MatLike], t_target2cam: _typing.Sequence[cv2.typing.MatLike], R_cam2gripper: cv2.typing.MatLike | None = ..., t_cam2gripper: cv2.typing.MatLike | None = ..., method: HandEyeCalibrationMethod = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calibrateHandEye(R_gripper2base: _typing.Sequence[UMat], t_gripper2base: _typing.Sequence[UMat], R_target2cam: _typing.Sequence[UMat], t_target2cam: _typing.Sequence[UMat], R_cam2gripper: UMat | None = ..., t_cam2gripper: UMat | None = ..., method: HandEyeCalibrationMethod = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def calibrateRobotWorldHandEye(R_world2cam: _typing.Sequence[cv2.typing.MatLike], t_world2cam: _typing.Sequence[cv2.typing.MatLike], R_base2gripper: _typing.Sequence[cv2.typing.MatLike], t_base2gripper: _typing.Sequence[cv2.typing.MatLike], R_base2world: cv2.typing.MatLike | None = ..., t_base2world: cv2.typing.MatLike | None = ..., R_gripper2cam: cv2.typing.MatLike | None = ..., t_gripper2cam: cv2.typing.MatLike | None = ..., method: RobotWorldHandEyeCalibrationMethod = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def calibrateRobotWorldHandEye(R_world2cam: _typing.Sequence[UMat], t_world2cam: _typing.Sequence[UMat], R_base2gripper: _typing.Sequence[UMat], t_base2gripper: _typing.Sequence[UMat], R_base2world: UMat | None = ..., t_base2world: UMat | None = ..., R_gripper2cam: UMat | None = ..., t_gripper2cam: UMat | None = ..., method: RobotWorldHandEyeCalibrationMethod = ...) -> tuple[UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def calibrationMatrixValues(cameraMatrix: cv2.typing.MatLike, imageSize: cv2.typing.Size, apertureWidth: float, apertureHeight: float) -> tuple[float, float, float, cv2.typing.Point2d, float]: ...
+@_typing.overload
+def calibrationMatrixValues(cameraMatrix: UMat, imageSize: cv2.typing.Size, apertureWidth: float, apertureHeight: float) -> tuple[float, float, float, cv2.typing.Point2d, float]: ...
+
+@_typing.overload
+def cartToPolar(x: cv2.typing.MatLike, y: cv2.typing.MatLike, magnitude: cv2.typing.MatLike | None = ..., angle: cv2.typing.MatLike | None = ..., angleInDegrees: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def cartToPolar(x: UMat, y: UMat, magnitude: UMat | None = ..., angle: UMat | None = ..., angleInDegrees: bool = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def checkChessboard(img: cv2.typing.MatLike, size: cv2.typing.Size) -> bool: ...
+@_typing.overload
+def checkChessboard(img: UMat, size: cv2.typing.Size) -> bool: ...
+
+def checkHardwareSupport(feature: int) -> bool: ...
+
+@_typing.overload
+def checkRange(a: cv2.typing.MatLike, quiet: bool = ..., minVal: float = ..., maxVal: float = ...) -> tuple[bool, cv2.typing.Point]: ...
+@_typing.overload
+def checkRange(a: UMat, quiet: bool = ..., minVal: float = ..., maxVal: float = ...) -> tuple[bool, cv2.typing.Point]: ...
+
+@_typing.overload
+def circle(img: cv2.typing.MatLike, center: cv2.typing.Point, radius: int, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def circle(img: UMat, center: cv2.typing.Point, radius: int, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+
+def clipLine(imgRect: cv2.typing.Rect, pt1: cv2.typing.Point, pt2: cv2.typing.Point) -> tuple[bool, cv2.typing.Point, cv2.typing.Point]: ...
+
+@_typing.overload
+def colorChange(src: cv2.typing.MatLike, mask: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., red_mul: float = ..., green_mul: float = ..., blue_mul: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def colorChange(src: UMat, mask: UMat, dst: UMat | None = ..., red_mul: float = ..., green_mul: float = ..., blue_mul: float = ...) -> UMat: ...
+
+@_typing.overload
+def compare(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, cmpop: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def compare(src1: UMat, src2: UMat, cmpop: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def compareHist(H1: cv2.typing.MatLike, H2: cv2.typing.MatLike, method: int) -> float: ...
+@_typing.overload
+def compareHist(H1: UMat, H2: UMat, method: int) -> float: ...
+
+@_typing.overload
+def completeSymm(m: cv2.typing.MatLike, lowerToUpper: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def completeSymm(m: UMat, lowerToUpper: bool = ...) -> UMat: ...
+
+@_typing.overload
+def composeRT(rvec1: cv2.typing.MatLike, tvec1: cv2.typing.MatLike, rvec2: cv2.typing.MatLike, tvec2: cv2.typing.MatLike, rvec3: cv2.typing.MatLike | None = ..., tvec3: cv2.typing.MatLike | None = ..., dr3dr1: cv2.typing.MatLike | None = ..., dr3dt1: cv2.typing.MatLike | None = ..., dr3dr2: cv2.typing.MatLike | None = ..., dr3dt2: cv2.typing.MatLike | None = ..., dt3dr1: cv2.typing.MatLike | None = ..., dt3dt1: cv2.typing.MatLike | None = ..., dt3dr2: cv2.typing.MatLike | None = ..., dt3dt2: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def composeRT(rvec1: UMat, tvec1: UMat, rvec2: UMat, tvec2: UMat, rvec3: UMat | None = ..., tvec3: UMat | None = ..., dr3dr1: UMat | None = ..., dr3dt1: UMat | None = ..., dr3dr2: UMat | None = ..., dr3dt2: UMat | None = ..., dt3dr1: UMat | None = ..., dt3dt1: UMat | None = ..., dt3dr2: UMat | None = ..., dt3dt2: UMat | None = ...) -> tuple[UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def computeCorrespondEpilines(points: cv2.typing.MatLike, whichImage: int, F: cv2.typing.MatLike, lines: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def computeCorrespondEpilines(points: UMat, whichImage: int, F: UMat, lines: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def computeECC(templateImage: cv2.typing.MatLike, inputImage: cv2.typing.MatLike, inputMask: cv2.typing.MatLike | None = ...) -> float: ...
+@_typing.overload
+def computeECC(templateImage: UMat, inputImage: UMat, inputMask: UMat | None = ...) -> float: ...
+
+@_typing.overload
+def connectedComponents(image: cv2.typing.MatLike, labels: cv2.typing.MatLike | None = ..., connectivity: int = ..., ltype: int = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def connectedComponents(image: UMat, labels: UMat | None = ..., connectivity: int = ..., ltype: int = ...) -> tuple[int, UMat]: ...
+
+@_typing.overload
+def connectedComponentsWithAlgorithm(image: cv2.typing.MatLike, connectivity: int, ltype: int, ccltype: int, labels: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def connectedComponentsWithAlgorithm(image: UMat, connectivity: int, ltype: int, ccltype: int, labels: UMat | None = ...) -> tuple[int, UMat]: ...
+
+@_typing.overload
+def connectedComponentsWithStats(image: cv2.typing.MatLike, labels: cv2.typing.MatLike | None = ..., stats: cv2.typing.MatLike | None = ..., centroids: cv2.typing.MatLike | None = ..., connectivity: int = ..., ltype: int = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def connectedComponentsWithStats(image: UMat, labels: UMat | None = ..., stats: UMat | None = ..., centroids: UMat | None = ..., connectivity: int = ..., ltype: int = ...) -> tuple[int, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def connectedComponentsWithStatsWithAlgorithm(image: cv2.typing.MatLike, connectivity: int, ltype: int, ccltype: int, labels: cv2.typing.MatLike | None = ..., stats: cv2.typing.MatLike | None = ..., centroids: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def connectedComponentsWithStatsWithAlgorithm(image: UMat, connectivity: int, ltype: int, ccltype: int, labels: UMat | None = ..., stats: UMat | None = ..., centroids: UMat | None = ...) -> tuple[int, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def contourArea(contour: cv2.typing.MatLike, oriented: bool = ...) -> float: ...
+@_typing.overload
+def contourArea(contour: UMat, oriented: bool = ...) -> float: ...
+
+@_typing.overload
+def convertFp16(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convertFp16(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def convertMaps(map1: cv2.typing.MatLike, map2: cv2.typing.MatLike, dstmap1type: int, dstmap1: cv2.typing.MatLike | None = ..., dstmap2: cv2.typing.MatLike | None = ..., nninterpolation: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def convertMaps(map1: UMat, map2: UMat, dstmap1type: int, dstmap1: UMat | None = ..., dstmap2: UMat | None = ..., nninterpolation: bool = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def convertPointsFromHomogeneous(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convertPointsFromHomogeneous(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def convertPointsToHomogeneous(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convertPointsToHomogeneous(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def convertScaleAbs(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., alpha: float = ..., beta: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convertScaleAbs(src: UMat, dst: UMat | None = ..., alpha: float = ..., beta: float = ...) -> UMat: ...
+
+@_typing.overload
+def convexHull(points: cv2.typing.MatLike, hull: cv2.typing.MatLike | None = ..., clockwise: bool = ..., returnPoints: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convexHull(points: UMat, hull: UMat | None = ..., clockwise: bool = ..., returnPoints: bool = ...) -> UMat: ...
+
+@_typing.overload
+def convexityDefects(contour: cv2.typing.MatLike, convexhull: cv2.typing.MatLike, convexityDefects: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def convexityDefects(contour: UMat, convexhull: UMat, convexityDefects: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def copyMakeBorder(src: cv2.typing.MatLike, top: int, bottom: int, left: int, right: int, borderType: int, dst: cv2.typing.MatLike | None = ..., value: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def copyMakeBorder(src: UMat, top: int, bottom: int, left: int, right: int, borderType: int, dst: UMat | None = ..., value: cv2.typing.Scalar = ...) -> UMat: ...
+
+@_typing.overload
+def copyTo(src: cv2.typing.MatLike, mask: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def copyTo(src: UMat, mask: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def cornerEigenValsAndVecs(src: cv2.typing.MatLike, blockSize: int, ksize: int, dst: cv2.typing.MatLike | None = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cornerEigenValsAndVecs(src: UMat, blockSize: int, ksize: int, dst: UMat | None = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def cornerHarris(src: cv2.typing.MatLike, blockSize: int, ksize: int, k: float, dst: cv2.typing.MatLike | None = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cornerHarris(src: UMat, blockSize: int, ksize: int, k: float, dst: UMat | None = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def cornerMinEigenVal(src: cv2.typing.MatLike, blockSize: int, dst: cv2.typing.MatLike | None = ..., ksize: int = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cornerMinEigenVal(src: UMat, blockSize: int, dst: UMat | None = ..., ksize: int = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def cornerSubPix(image: cv2.typing.MatLike, corners: cv2.typing.MatLike, winSize: cv2.typing.Size, zeroZone: cv2.typing.Size, criteria: cv2.typing.TermCriteria) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cornerSubPix(image: UMat, corners: UMat, winSize: cv2.typing.Size, zeroZone: cv2.typing.Size, criteria: cv2.typing.TermCriteria) -> UMat: ...
+
+@_typing.overload
+def correctMatches(F: cv2.typing.MatLike, points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, newPoints1: cv2.typing.MatLike | None = ..., newPoints2: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def correctMatches(F: UMat, points1: UMat, points2: UMat, newPoints1: UMat | None = ..., newPoints2: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def countNonZero(src: cv2.typing.MatLike) -> int: ...
+@_typing.overload
+def countNonZero(src: UMat) -> int: ...
+
+def createAlignMTB(max_bits: int = ..., exclude_range: int = ..., cut: bool = ...) -> AlignMTB: ...
+
+def createBackgroundSubtractorKNN(history: int = ..., dist2Threshold: float = ..., detectShadows: bool = ...) -> BackgroundSubtractorKNN: ...
+
+def createBackgroundSubtractorMOG2(history: int = ..., varThreshold: float = ..., detectShadows: bool = ...) -> BackgroundSubtractorMOG2: ...
+
+def createCLAHE(clipLimit: float = ..., tileGridSize: cv2.typing.Size = ...) -> CLAHE: ...
+
+def createCalibrateDebevec(samples: int = ..., lambda_: float = ..., random: bool = ...) -> CalibrateDebevec: ...
+
+def createCalibrateRobertson(max_iter: int = ..., threshold: float = ...) -> CalibrateRobertson: ...
+
+def createGeneralizedHoughBallard() -> GeneralizedHoughBallard: ...
+
+def createGeneralizedHoughGuil() -> GeneralizedHoughGuil: ...
+
+@_typing.overload
+def createHanningWindow(winSize: cv2.typing.Size, type: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def createHanningWindow(winSize: cv2.typing.Size, type: int, dst: UMat | None = ...) -> UMat: ...
+
+def createLineSegmentDetector(refine: int = ..., scale: float = ..., sigma_scale: float = ..., quant: float = ..., ang_th: float = ..., log_eps: float = ..., density_th: float = ..., n_bins: int = ...) -> LineSegmentDetector: ...
+
+def createMergeDebevec() -> MergeDebevec: ...
+
+def createMergeMertens(contrast_weight: float = ..., saturation_weight: float = ..., exposure_weight: float = ...) -> MergeMertens: ...
+
+def createMergeRobertson() -> MergeRobertson: ...
+
+def createTonemap(gamma: float = ...) -> Tonemap: ...
+
+def createTonemapDrago(gamma: float = ..., saturation: float = ..., bias: float = ...) -> TonemapDrago: ...
+
+def createTonemapMantiuk(gamma: float = ..., scale: float = ..., saturation: float = ...) -> TonemapMantiuk: ...
+
+def createTonemapReinhard(gamma: float = ..., intensity: float = ..., light_adapt: float = ..., color_adapt: float = ...) -> TonemapReinhard: ...
+
+def cubeRoot(val: float) -> float: ...
+
+def currentUIFramework() -> str: ...
+
+@_typing.overload
+def cvtColor(src: cv2.typing.MatLike, code: int, dst: cv2.typing.MatLike | None = ..., dstCn: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cvtColor(src: UMat, code: int, dst: UMat | None = ..., dstCn: int = ...) -> UMat: ...
+
+@_typing.overload
+def cvtColorTwoPlane(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, code: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def cvtColorTwoPlane(src1: UMat, src2: UMat, code: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def dct(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def dct(src: UMat, dst: UMat | None = ..., flags: int = ...) -> UMat: ...
+
+@_typing.overload
+def decolor(src: cv2.typing.MatLike, grayscale: cv2.typing.MatLike | None = ..., color_boost: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def decolor(src: UMat, grayscale: UMat | None = ..., color_boost: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def decomposeEssentialMat(E: cv2.typing.MatLike, R1: cv2.typing.MatLike | None = ..., R2: cv2.typing.MatLike | None = ..., t: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def decomposeEssentialMat(E: UMat, R1: UMat | None = ..., R2: UMat | None = ..., t: UMat | None = ...) -> tuple[UMat, UMat, UMat]: ...
+
+@_typing.overload
+def decomposeHomographyMat(H: cv2.typing.MatLike, K: cv2.typing.MatLike, rotations: _typing.Sequence[cv2.typing.MatLike] | None = ..., translations: _typing.Sequence[cv2.typing.MatLike] | None = ..., normals: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[int, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def decomposeHomographyMat(H: UMat, K: UMat, rotations: _typing.Sequence[UMat] | None = ..., translations: _typing.Sequence[UMat] | None = ..., normals: _typing.Sequence[UMat] | None = ...) -> tuple[int, _typing.Sequence[UMat], _typing.Sequence[UMat], _typing.Sequence[UMat]]: ...
+
+@_typing.overload
+def decomposeProjectionMatrix(projMatrix: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike | None = ..., rotMatrix: cv2.typing.MatLike | None = ..., transVect: cv2.typing.MatLike | None = ..., rotMatrixX: cv2.typing.MatLike | None = ..., rotMatrixY: cv2.typing.MatLike | None = ..., rotMatrixZ: cv2.typing.MatLike | None = ..., eulerAngles: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def decomposeProjectionMatrix(projMatrix: UMat, cameraMatrix: UMat | None = ..., rotMatrix: UMat | None = ..., transVect: UMat | None = ..., rotMatrixX: UMat | None = ..., rotMatrixY: UMat | None = ..., rotMatrixZ: UMat | None = ..., eulerAngles: UMat | None = ...) -> tuple[UMat, UMat, UMat, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def demosaicing(src: cv2.typing.MatLike, code: int, dst: cv2.typing.MatLike | None = ..., dstCn: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def demosaicing(src: UMat, code: int, dst: UMat | None = ..., dstCn: int = ...) -> UMat: ...
+
+def denoise_TVL1(observations: _typing.Sequence[cv2.typing.MatLike], result: cv2.typing.MatLike, lambda_: float = ..., niters: int = ...) -> None: ...
+
+def destroyAllWindows() -> None: ...
+
+def destroyWindow(winname: str) -> None: ...
+
+@_typing.overload
+def detailEnhance(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., sigma_s: float = ..., sigma_r: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def detailEnhance(src: UMat, dst: UMat | None = ..., sigma_s: float = ..., sigma_r: float = ...) -> UMat: ...
+
+@_typing.overload
+def determinant(mtx: cv2.typing.MatLike) -> float: ...
+@_typing.overload
+def determinant(mtx: UMat) -> float: ...
+
+@_typing.overload
+def dft(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ..., nonzeroRows: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def dft(src: UMat, dst: UMat | None = ..., flags: int = ..., nonzeroRows: int = ...) -> UMat: ...
+
+@_typing.overload
+def dilate(src: cv2.typing.MatLike, kernel: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def dilate(src: UMat, kernel: UMat, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+def displayOverlay(winname: str, text: str, delayms: int = ...) -> None: ...
+
+def displayStatusBar(winname: str, text: str, delayms: int = ...) -> None: ...
+
+@_typing.overload
+def distanceTransform(src: cv2.typing.MatLike, distanceType: int, maskSize: int, dst: cv2.typing.MatLike | None = ..., dstType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def distanceTransform(src: UMat, distanceType: int, maskSize: int, dst: UMat | None = ..., dstType: int = ...) -> UMat: ...
+
+@_typing.overload
+def distanceTransformWithLabels(src: cv2.typing.MatLike, distanceType: int, maskSize: int, dst: cv2.typing.MatLike | None = ..., labels: cv2.typing.MatLike | None = ..., labelType: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def distanceTransformWithLabels(src: UMat, distanceType: int, maskSize: int, dst: UMat | None = ..., labels: UMat | None = ..., labelType: int = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def divSpectrums(a: cv2.typing.MatLike, b: cv2.typing.MatLike, flags: int, c: cv2.typing.MatLike | None = ..., conjB: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def divSpectrums(a: UMat, b: UMat, flags: int, c: UMat | None = ..., conjB: bool = ...) -> UMat: ...
+
+@_typing.overload
+def divide(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., scale: float = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def divide(src1: UMat, src2: UMat, dst: UMat | None = ..., scale: float = ..., dtype: int = ...) -> UMat: ...
+@_typing.overload
+def divide(scale: float, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def divide(scale: float, src2: UMat, dst: UMat | None = ..., dtype: int = ...) -> UMat: ...
+
+@_typing.overload
+def drawChessboardCorners(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, corners: cv2.typing.MatLike, patternWasFound: bool) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawChessboardCorners(image: UMat, patternSize: cv2.typing.Size, corners: UMat, patternWasFound: bool) -> UMat: ...
+
+@_typing.overload
+def drawContours(image: cv2.typing.MatLike, contours: _typing.Sequence[cv2.typing.MatLike], contourIdx: int, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., hierarchy: cv2.typing.MatLike | None = ..., maxLevel: int = ..., offset: cv2.typing.Point = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawContours(image: UMat, contours: _typing.Sequence[UMat], contourIdx: int, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., hierarchy: UMat | None = ..., maxLevel: int = ..., offset: cv2.typing.Point = ...) -> UMat: ...
+
+@_typing.overload
+def drawFrameAxes(image: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike, tvec: cv2.typing.MatLike, length: float, thickness: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawFrameAxes(image: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat, tvec: UMat, length: float, thickness: int = ...) -> UMat: ...
+
+@_typing.overload
+def drawKeypoints(image: cv2.typing.MatLike, keypoints: _typing.Sequence[KeyPoint], outImage: cv2.typing.MatLike, color: cv2.typing.Scalar = ..., flags: DrawMatchesFlags = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawKeypoints(image: UMat, keypoints: _typing.Sequence[KeyPoint], outImage: UMat, color: cv2.typing.Scalar = ..., flags: DrawMatchesFlags = ...) -> UMat: ...
+
+@_typing.overload
+def drawMarker(img: cv2.typing.MatLike, position: cv2.typing.Point, color: cv2.typing.Scalar, markerType: int = ..., markerSize: int = ..., thickness: int = ..., line_type: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawMarker(img: UMat, position: cv2.typing.Point, color: cv2.typing.Scalar, markerType: int = ..., markerSize: int = ..., thickness: int = ..., line_type: int = ...) -> UMat: ...
+
+@_typing.overload
+def drawMatches(img1: cv2.typing.MatLike, keypoints1: _typing.Sequence[KeyPoint], img2: cv2.typing.MatLike, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[DMatch], outImg: cv2.typing.MatLike, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[str] = ..., flags: DrawMatchesFlags = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawMatches(img1: UMat, keypoints1: _typing.Sequence[KeyPoint], img2: UMat, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[DMatch], outImg: UMat, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[str] = ..., flags: DrawMatchesFlags = ...) -> UMat: ...
+@_typing.overload
+def drawMatches(img1: cv2.typing.MatLike, keypoints1: _typing.Sequence[KeyPoint], img2: cv2.typing.MatLike, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[DMatch], outImg: cv2.typing.MatLike, matchesThickness: int, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[str] = ..., flags: DrawMatchesFlags = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawMatches(img1: UMat, keypoints1: _typing.Sequence[KeyPoint], img2: UMat, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[DMatch], outImg: UMat, matchesThickness: int, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[str] = ..., flags: DrawMatchesFlags = ...) -> UMat: ...
+
+@_typing.overload
+def drawMatchesKnn(img1: cv2.typing.MatLike, keypoints1: _typing.Sequence[KeyPoint], img2: cv2.typing.MatLike, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[_typing.Sequence[DMatch]], outImg: cv2.typing.MatLike, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[_typing.Sequence[str]] = ..., flags: DrawMatchesFlags = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawMatchesKnn(img1: UMat, keypoints1: _typing.Sequence[KeyPoint], img2: UMat, keypoints2: _typing.Sequence[KeyPoint], matches1to2: _typing.Sequence[_typing.Sequence[DMatch]], outImg: UMat, matchColor: cv2.typing.Scalar = ..., singlePointColor: cv2.typing.Scalar = ..., matchesMask: _typing.Sequence[_typing.Sequence[str]] = ..., flags: DrawMatchesFlags = ...) -> UMat: ...
+
+@_typing.overload
+def edgePreservingFilter(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ..., sigma_s: float = ..., sigma_r: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def edgePreservingFilter(src: UMat, dst: UMat | None = ..., flags: int = ..., sigma_s: float = ..., sigma_r: float = ...) -> UMat: ...
+
+@_typing.overload
+def eigen(src: cv2.typing.MatLike, eigenvalues: cv2.typing.MatLike | None = ..., eigenvectors: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def eigen(src: UMat, eigenvalues: UMat | None = ..., eigenvectors: UMat | None = ...) -> tuple[bool, UMat, UMat]: ...
+
+@_typing.overload
+def eigenNonSymmetric(src: cv2.typing.MatLike, eigenvalues: cv2.typing.MatLike | None = ..., eigenvectors: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def eigenNonSymmetric(src: UMat, eigenvalues: UMat | None = ..., eigenvectors: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def ellipse(img: cv2.typing.MatLike, center: cv2.typing.Point, axes: cv2.typing.Size, angle: float, startAngle: float, endAngle: float, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def ellipse(img: UMat, center: cv2.typing.Point, axes: cv2.typing.Size, angle: float, startAngle: float, endAngle: float, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+@_typing.overload
+def ellipse(img: cv2.typing.MatLike, box: cv2.typing.RotatedRect, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def ellipse(img: UMat, box: cv2.typing.RotatedRect, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ...) -> UMat: ...
+
+def ellipse2Poly(center: cv2.typing.Point, axes: cv2.typing.Size, angle: int, arcStart: int, arcEnd: int, delta: int) -> _typing.Sequence[cv2.typing.Point]: ...
+
+def empty_array_desc() -> GArrayDesc: ...
+
+def empty_gopaque_desc() -> GOpaqueDesc: ...
+
+def empty_scalar_desc() -> GScalarDesc: ...
+
+@_typing.overload
+def equalizeHist(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def equalizeHist(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def erode(src: cv2.typing.MatLike, kernel: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def erode(src: UMat, kernel: UMat, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+@_typing.overload
+def estimateAffine2D(from_: cv2.typing.MatLike, to: cv2.typing.MatLike, inliers: cv2.typing.MatLike | None = ..., method: int = ..., ransacReprojThreshold: float = ..., maxIters: int = ..., confidence: float = ..., refineIters: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateAffine2D(from_: UMat, to: UMat, inliers: UMat | None = ..., method: int = ..., ransacReprojThreshold: float = ..., maxIters: int = ..., confidence: float = ..., refineIters: int = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def estimateAffine2D(pts1: cv2.typing.MatLike, pts2: cv2.typing.MatLike, params: UsacParams, inliers: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateAffine2D(pts1: UMat, pts2: UMat, params: UsacParams, inliers: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+
+@_typing.overload
+def estimateAffine3D(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, out: cv2.typing.MatLike | None = ..., inliers: cv2.typing.MatLike | None = ..., ransacThreshold: float = ..., confidence: float = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateAffine3D(src: UMat, dst: UMat, out: UMat | None = ..., inliers: UMat | None = ..., ransacThreshold: float = ..., confidence: float = ...) -> tuple[int, UMat, UMat]: ...
+@_typing.overload
+def estimateAffine3D(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, force_rotation: bool = ...) -> tuple[cv2.typing.MatLike, float]: ...
+@_typing.overload
+def estimateAffine3D(src: UMat, dst: UMat, force_rotation: bool = ...) -> tuple[cv2.typing.MatLike, float]: ...
+
+@_typing.overload
+def estimateAffinePartial2D(from_: cv2.typing.MatLike, to: cv2.typing.MatLike, inliers: cv2.typing.MatLike | None = ..., method: int = ..., ransacReprojThreshold: float = ..., maxIters: int = ..., confidence: float = ..., refineIters: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateAffinePartial2D(from_: UMat, to: UMat, inliers: UMat | None = ..., method: int = ..., ransacReprojThreshold: float = ..., maxIters: int = ..., confidence: float = ..., refineIters: int = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+
+@_typing.overload
+def estimateChessboardSharpness(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, corners: cv2.typing.MatLike, rise_distance: float = ..., vertical: bool = ..., sharpness: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.Scalar, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateChessboardSharpness(image: UMat, patternSize: cv2.typing.Size, corners: UMat, rise_distance: float = ..., vertical: bool = ..., sharpness: UMat | None = ...) -> tuple[cv2.typing.Scalar, UMat]: ...
+
+@_typing.overload
+def estimateTranslation3D(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, out: cv2.typing.MatLike | None = ..., inliers: cv2.typing.MatLike | None = ..., ransacThreshold: float = ..., confidence: float = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def estimateTranslation3D(src: UMat, dst: UMat, out: UMat | None = ..., inliers: UMat | None = ..., ransacThreshold: float = ..., confidence: float = ...) -> tuple[int, UMat, UMat]: ...
+
+@_typing.overload
+def exp(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def exp(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def extractChannel(src: cv2.typing.MatLike, coi: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def extractChannel(src: UMat, coi: int, dst: UMat | None = ...) -> UMat: ...
+
+def fastAtan2(y: float, x: float) -> float: ...
+
+@_typing.overload
+def fastNlMeansDenoising(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., h: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoising(src: UMat, dst: UMat | None = ..., h: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> UMat: ...
+@_typing.overload
+def fastNlMeansDenoising(src: cv2.typing.MatLike, h: _typing.Sequence[float], dst: cv2.typing.MatLike | None = ..., templateWindowSize: int = ..., searchWindowSize: int = ..., normType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoising(src: UMat, h: _typing.Sequence[float], dst: UMat | None = ..., templateWindowSize: int = ..., searchWindowSize: int = ..., normType: int = ...) -> UMat: ...
+
+@_typing.overload
+def fastNlMeansDenoisingColored(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., h: float = ..., hColor: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoisingColored(src: UMat, dst: UMat | None = ..., h: float = ..., hColor: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> UMat: ...
+
+@_typing.overload
+def fastNlMeansDenoisingColoredMulti(srcImgs: _typing.Sequence[cv2.typing.MatLike], imgToDenoiseIndex: int, temporalWindowSize: int, dst: cv2.typing.MatLike | None = ..., h: float = ..., hColor: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoisingColoredMulti(srcImgs: _typing.Sequence[UMat], imgToDenoiseIndex: int, temporalWindowSize: int, dst: UMat | None = ..., h: float = ..., hColor: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> UMat: ...
+
+@_typing.overload
+def fastNlMeansDenoisingMulti(srcImgs: _typing.Sequence[cv2.typing.MatLike], imgToDenoiseIndex: int, temporalWindowSize: int, dst: cv2.typing.MatLike | None = ..., h: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoisingMulti(srcImgs: _typing.Sequence[UMat], imgToDenoiseIndex: int, temporalWindowSize: int, dst: UMat | None = ..., h: float = ..., templateWindowSize: int = ..., searchWindowSize: int = ...) -> UMat: ...
+@_typing.overload
+def fastNlMeansDenoisingMulti(srcImgs: _typing.Sequence[cv2.typing.MatLike], imgToDenoiseIndex: int, temporalWindowSize: int, h: _typing.Sequence[float], dst: cv2.typing.MatLike | None = ..., templateWindowSize: int = ..., searchWindowSize: int = ..., normType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fastNlMeansDenoisingMulti(srcImgs: _typing.Sequence[UMat], imgToDenoiseIndex: int, temporalWindowSize: int, h: _typing.Sequence[float], dst: UMat | None = ..., templateWindowSize: int = ..., searchWindowSize: int = ..., normType: int = ...) -> UMat: ...
+
+@_typing.overload
+def fillConvexPoly(img: cv2.typing.MatLike, points: cv2.typing.MatLike, color: cv2.typing.Scalar, lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fillConvexPoly(img: UMat, points: UMat, color: cv2.typing.Scalar, lineType: int = ..., shift: int = ...) -> UMat: ...
+
+@_typing.overload
+def fillPoly(img: cv2.typing.MatLike, pts: _typing.Sequence[cv2.typing.MatLike], color: cv2.typing.Scalar, lineType: int = ..., shift: int = ..., offset: cv2.typing.Point = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fillPoly(img: UMat, pts: _typing.Sequence[UMat], color: cv2.typing.Scalar, lineType: int = ..., shift: int = ..., offset: cv2.typing.Point = ...) -> UMat: ...
+
+@_typing.overload
+def filter2D(src: cv2.typing.MatLike, ddepth: int, kernel: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., delta: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def filter2D(src: UMat, ddepth: int, kernel: UMat, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., delta: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def filterHomographyDecompByVisibleRefpoints(rotations: _typing.Sequence[cv2.typing.MatLike], normals: _typing.Sequence[cv2.typing.MatLike], beforePoints: cv2.typing.MatLike, afterPoints: cv2.typing.MatLike, possibleSolutions: cv2.typing.MatLike | None = ..., pointsMask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def filterHomographyDecompByVisibleRefpoints(rotations: _typing.Sequence[UMat], normals: _typing.Sequence[UMat], beforePoints: UMat, afterPoints: UMat, possibleSolutions: UMat | None = ..., pointsMask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def filterSpeckles(img: cv2.typing.MatLike, newVal: float, maxSpeckleSize: int, maxDiff: float, buf: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def filterSpeckles(img: UMat, newVal: float, maxSpeckleSize: int, maxDiff: float, buf: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def find4QuadCornerSubpix(img: cv2.typing.MatLike, corners: cv2.typing.MatLike, region_size: cv2.typing.Size) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def find4QuadCornerSubpix(img: UMat, corners: UMat, region_size: cv2.typing.Size) -> tuple[bool, UMat]: ...
+
+@_typing.overload
+def findChessboardCorners(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, corners: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def findChessboardCorners(image: UMat, patternSize: cv2.typing.Size, corners: UMat | None = ..., flags: int = ...) -> tuple[bool, UMat]: ...
+
+@_typing.overload
+def findChessboardCornersSB(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, corners: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def findChessboardCornersSB(image: UMat, patternSize: cv2.typing.Size, corners: UMat | None = ..., flags: int = ...) -> tuple[bool, UMat]: ...
+
+@_typing.overload
+def findChessboardCornersSBWithMeta(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, flags: int, corners: cv2.typing.MatLike | None = ..., meta: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findChessboardCornersSBWithMeta(image: UMat, patternSize: cv2.typing.Size, flags: int, corners: UMat | None = ..., meta: UMat | None = ...) -> tuple[bool, UMat, UMat]: ...
+
+@_typing.overload
+def findCirclesGrid(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, flags: int, blobDetector: cv2.typing.FeatureDetector, parameters: CirclesGridFinderParameters, centers: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def findCirclesGrid(image: UMat, patternSize: cv2.typing.Size, flags: int, blobDetector: cv2.typing.FeatureDetector, parameters: CirclesGridFinderParameters, centers: UMat | None = ...) -> tuple[bool, UMat]: ...
+@_typing.overload
+def findCirclesGrid(image: cv2.typing.MatLike, patternSize: cv2.typing.Size, centers: cv2.typing.MatLike | None = ..., flags: int = ..., blobDetector: cv2.typing.FeatureDetector = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def findCirclesGrid(image: UMat, patternSize: cv2.typing.Size, centers: UMat | None = ..., flags: int = ..., blobDetector: cv2.typing.FeatureDetector = ...) -> tuple[bool, UMat]: ...
+
+@_typing.overload
+def findContours(image: cv2.typing.MatLike, mode: int, method: int, contours: _typing.Sequence[cv2.typing.MatLike] | None = ..., hierarchy: cv2.typing.MatLike | None = ..., offset: cv2.typing.Point = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+@_typing.overload
+def findContours(image: UMat, mode: int, method: int, contours: _typing.Sequence[UMat] | None = ..., hierarchy: UMat | None = ..., offset: cv2.typing.Point = ...) -> tuple[_typing.Sequence[UMat], UMat]: ...
+
+@_typing.overload
+def findContoursLinkRuns(image: cv2.typing.MatLike, contours: _typing.Sequence[cv2.typing.MatLike] | None = ..., hierarchy: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+@_typing.overload
+def findContoursLinkRuns(image: UMat, contours: _typing.Sequence[UMat] | None = ..., hierarchy: UMat | None = ...) -> tuple[_typing.Sequence[UMat], UMat]: ...
+@_typing.overload
+def findContoursLinkRuns(image: cv2.typing.MatLike, contours: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+@_typing.overload
+def findContoursLinkRuns(image: UMat, contours: _typing.Sequence[UMat] | None = ...) -> _typing.Sequence[UMat]: ...
+
+@_typing.overload
+def findEssentialMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, method: int = ..., prob: float = ..., threshold: float = ..., maxIters: int = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findEssentialMat(points1: UMat, points2: UMat, cameraMatrix: UMat, method: int = ..., prob: float = ..., threshold: float = ..., maxIters: int = ..., mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findEssentialMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, focal: float = ..., pp: cv2.typing.Point2d = ..., method: int = ..., prob: float = ..., threshold: float = ..., maxIters: int = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findEssentialMat(points1: UMat, points2: UMat, focal: float = ..., pp: cv2.typing.Point2d = ..., method: int = ..., prob: float = ..., threshold: float = ..., maxIters: int = ..., mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findEssentialMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, method: int = ..., prob: float = ..., threshold: float = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findEssentialMat(points1: UMat, points2: UMat, cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, method: int = ..., prob: float = ..., threshold: float = ..., mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findEssentialMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, dist_coeff1: cv2.typing.MatLike, dist_coeff2: cv2.typing.MatLike, params: UsacParams, mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findEssentialMat(points1: UMat, points2: UMat, cameraMatrix1: UMat, cameraMatrix2: UMat, dist_coeff1: UMat, dist_coeff2: UMat, params: UsacParams, mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+
+@_typing.overload
+def findFundamentalMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, method: int, ransacReprojThreshold: float, confidence: float, maxIters: int, mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findFundamentalMat(points1: UMat, points2: UMat, method: int, ransacReprojThreshold: float, confidence: float, maxIters: int, mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findFundamentalMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, method: int = ..., ransacReprojThreshold: float = ..., confidence: float = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findFundamentalMat(points1: UMat, points2: UMat, method: int = ..., ransacReprojThreshold: float = ..., confidence: float = ..., mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findFundamentalMat(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, params: UsacParams, mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findFundamentalMat(points1: UMat, points2: UMat, params: UsacParams, mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+
+@_typing.overload
+def findHomography(srcPoints: cv2.typing.MatLike, dstPoints: cv2.typing.MatLike, method: int = ..., ransacReprojThreshold: float = ..., mask: cv2.typing.MatLike | None = ..., maxIters: int = ..., confidence: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findHomography(srcPoints: UMat, dstPoints: UMat, method: int = ..., ransacReprojThreshold: float = ..., mask: UMat | None = ..., maxIters: int = ..., confidence: float = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+@_typing.overload
+def findHomography(srcPoints: cv2.typing.MatLike, dstPoints: cv2.typing.MatLike, params: UsacParams, mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def findHomography(srcPoints: UMat, dstPoints: UMat, params: UsacParams, mask: UMat | None = ...) -> tuple[cv2.typing.MatLike, UMat]: ...
+
+@_typing.overload
+def findNonZero(src: cv2.typing.MatLike, idx: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def findNonZero(src: UMat, idx: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def findTransformECC(templateImage: cv2.typing.MatLike, inputImage: cv2.typing.MatLike, warpMatrix: cv2.typing.MatLike, motionType: int, criteria: cv2.typing.TermCriteria, inputMask: cv2.typing.MatLike, gaussFiltSize: int) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def findTransformECC(templateImage: UMat, inputImage: UMat, warpMatrix: UMat, motionType: int, criteria: cv2.typing.TermCriteria, inputMask: UMat, gaussFiltSize: int) -> tuple[float, UMat]: ...
+@_typing.overload
+def findTransformECC(templateImage: cv2.typing.MatLike, inputImage: cv2.typing.MatLike, warpMatrix: cv2.typing.MatLike, motionType: int = ..., criteria: cv2.typing.TermCriteria = ..., inputMask: cv2.typing.MatLike | None = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def findTransformECC(templateImage: UMat, inputImage: UMat, warpMatrix: UMat, motionType: int = ..., criteria: cv2.typing.TermCriteria = ..., inputMask: UMat | None = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def fitEllipse(points: cv2.typing.MatLike) -> cv2.typing.RotatedRect: ...
+@_typing.overload
+def fitEllipse(points: UMat) -> cv2.typing.RotatedRect: ...
+
+@_typing.overload
+def fitEllipseAMS(points: cv2.typing.MatLike) -> cv2.typing.RotatedRect: ...
+@_typing.overload
+def fitEllipseAMS(points: UMat) -> cv2.typing.RotatedRect: ...
+
+@_typing.overload
+def fitEllipseDirect(points: cv2.typing.MatLike) -> cv2.typing.RotatedRect: ...
+@_typing.overload
+def fitEllipseDirect(points: UMat) -> cv2.typing.RotatedRect: ...
+
+@_typing.overload
+def fitLine(points: cv2.typing.MatLike, distType: int, param: float, reps: float, aeps: float, line: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def fitLine(points: UMat, distType: int, param: float, reps: float, aeps: float, line: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def flip(src: cv2.typing.MatLike, flipCode: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def flip(src: UMat, flipCode: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def flipND(src: cv2.typing.MatLike, axis: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def flipND(src: UMat, axis: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def floodFill(image: cv2.typing.MatLike, mask: cv2.typing.MatLike, seedPoint: cv2.typing.Point, newVal: cv2.typing.Scalar, loDiff: cv2.typing.Scalar = ..., upDiff: cv2.typing.Scalar = ..., flags: int = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.Rect]: ...
+@_typing.overload
+def floodFill(image: UMat, mask: UMat, seedPoint: cv2.typing.Point, newVal: cv2.typing.Scalar, loDiff: cv2.typing.Scalar = ..., upDiff: cv2.typing.Scalar = ..., flags: int = ...) -> tuple[int, UMat, UMat, cv2.typing.Rect]: ...
+
+@_typing.overload
+def gemm(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, alpha: float, src3: cv2.typing.MatLike, beta: float, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def gemm(src1: UMat, src2: UMat, alpha: float, src3: UMat, beta: float, dst: UMat | None = ..., flags: int = ...) -> UMat: ...
+
+@_typing.overload
+def getAffineTransform(src: cv2.typing.MatLike, dst: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def getAffineTransform(src: UMat, dst: UMat) -> cv2.typing.MatLike: ...
+
+def getBuildInformation() -> str: ...
+
+def getCPUFeaturesLine() -> str: ...
+
+def getCPUTickCount() -> int: ...
+
+@_typing.overload
+def getDefaultNewCameraMatrix(cameraMatrix: cv2.typing.MatLike, imgsize: cv2.typing.Size = ..., centerPrincipalPoint: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def getDefaultNewCameraMatrix(cameraMatrix: UMat, imgsize: cv2.typing.Size = ..., centerPrincipalPoint: bool = ...) -> cv2.typing.MatLike: ...
+
+@_typing.overload
+def getDerivKernels(dx: int, dy: int, ksize: int, kx: cv2.typing.MatLike | None = ..., ky: cv2.typing.MatLike | None = ..., normalize: bool = ..., ktype: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def getDerivKernels(dx: int, dy: int, ksize: int, kx: UMat | None = ..., ky: UMat | None = ..., normalize: bool = ..., ktype: int = ...) -> tuple[UMat, UMat]: ...
+
+def getFontScaleFromHeight(fontFace: int, pixelHeight: int, thickness: int = ...) -> float: ...
+
+def getGaborKernel(ksize: cv2.typing.Size, sigma: float, theta: float, lambd: float, gamma: float, psi: float = ..., ktype: int = ...) -> cv2.typing.MatLike: ...
+
+def getGaussianKernel(ksize: int, sigma: float, ktype: int = ...) -> cv2.typing.MatLike: ...
+
+def getHardwareFeatureName(feature: int) -> str: ...
+
+def getLogLevel() -> int: ...
+
+def getNumThreads() -> int: ...
+
+def getNumberOfCPUs() -> int: ...
+
+def getOptimalDFTSize(vecsize: int) -> int: ...
+
+@_typing.overload
+def getOptimalNewCameraMatrix(cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, imageSize: cv2.typing.Size, alpha: float, newImgSize: cv2.typing.Size = ..., centerPrincipalPoint: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.Rect]: ...
+@_typing.overload
+def getOptimalNewCameraMatrix(cameraMatrix: UMat, distCoeffs: UMat, imageSize: cv2.typing.Size, alpha: float, newImgSize: cv2.typing.Size = ..., centerPrincipalPoint: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.Rect]: ...
+
+@_typing.overload
+def getPerspectiveTransform(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, solveMethod: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def getPerspectiveTransform(src: UMat, dst: UMat, solveMethod: int = ...) -> cv2.typing.MatLike: ...
+
+@_typing.overload
+def getRectSubPix(image: cv2.typing.MatLike, patchSize: cv2.typing.Size, center: cv2.typing.Point2f, patch: cv2.typing.MatLike | None = ..., patchType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def getRectSubPix(image: UMat, patchSize: cv2.typing.Size, center: cv2.typing.Point2f, patch: UMat | None = ..., patchType: int = ...) -> UMat: ...
+
+def getRotationMatrix2D(center: cv2.typing.Point2f, angle: float, scale: float) -> cv2.typing.MatLike: ...
+
+def getStructuringElement(shape: int, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ...) -> cv2.typing.MatLike: ...
+
+def getTextSize(text: str, fontFace: int, fontScale: float, thickness: int) -> tuple[cv2.typing.Size, int]: ...
+
+def getThreadNum() -> int: ...
+
+def getTickCount() -> int: ...
+
+def getTickFrequency() -> float: ...
+
+def getTrackbarPos(trackbarname: str, winname: str) -> int: ...
+
+def getValidDisparityROI(roi1: cv2.typing.Rect, roi2: cv2.typing.Rect, minDisparity: int, numberOfDisparities: int, blockSize: int) -> cv2.typing.Rect: ...
+
+def getVersionMajor() -> int: ...
+
+def getVersionMinor() -> int: ...
+
+def getVersionRevision() -> int: ...
+
+def getVersionString() -> str: ...
+
+def getWindowImageRect(winname: str) -> cv2.typing.Rect: ...
+
+def getWindowProperty(winname: str, prop_id: int) -> float: ...
+
+@_typing.overload
+def goodFeaturesToTrack(image: cv2.typing.MatLike, maxCorners: int, qualityLevel: float, minDistance: float, corners: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def goodFeaturesToTrack(image: UMat, maxCorners: int, qualityLevel: float, minDistance: float, corners: UMat | None = ..., mask: UMat | None = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> UMat: ...
+@_typing.overload
+def goodFeaturesToTrack(image: cv2.typing.MatLike, maxCorners: int, qualityLevel: float, minDistance: float, mask: cv2.typing.MatLike, blockSize: int, gradientSize: int, corners: cv2.typing.MatLike | None = ..., useHarrisDetector: bool = ..., k: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def goodFeaturesToTrack(image: UMat, maxCorners: int, qualityLevel: float, minDistance: float, mask: UMat, blockSize: int, gradientSize: int, corners: UMat | None = ..., useHarrisDetector: bool = ..., k: float = ...) -> UMat: ...
+
+@_typing.overload
+def goodFeaturesToTrackWithQuality(image: cv2.typing.MatLike, maxCorners: int, qualityLevel: float, minDistance: float, mask: cv2.typing.MatLike, corners: cv2.typing.MatLike | None = ..., cornersQuality: cv2.typing.MatLike | None = ..., blockSize: int = ..., gradientSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def goodFeaturesToTrackWithQuality(image: UMat, maxCorners: int, qualityLevel: float, minDistance: float, mask: UMat, corners: UMat | None = ..., cornersQuality: UMat | None = ..., blockSize: int = ..., gradientSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def grabCut(img: cv2.typing.MatLike, mask: cv2.typing.MatLike, rect: cv2.typing.Rect, bgdModel: cv2.typing.MatLike, fgdModel: cv2.typing.MatLike, iterCount: int, mode: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def grabCut(img: UMat, mask: UMat, rect: cv2.typing.Rect, bgdModel: UMat, fgdModel: UMat, iterCount: int, mode: int = ...) -> tuple[UMat, UMat, UMat]: ...
+
+def groupRectangles(rectList: _typing.Sequence[cv2.typing.Rect], groupThreshold: int, eps: float = ...) -> tuple[_typing.Sequence[cv2.typing.Rect], _typing.Sequence[int]]: ...
+
+@_typing.overload
+def hasNonZero(src: cv2.typing.MatLike) -> bool: ...
+@_typing.overload
+def hasNonZero(src: UMat) -> bool: ...
+
+def haveImageReader(filename: str) -> bool: ...
+
+def haveImageWriter(filename: str) -> bool: ...
+
+def haveOpenVX() -> bool: ...
+
+@_typing.overload
+def hconcat(src: _typing.Sequence[cv2.typing.MatLike], dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def hconcat(src: _typing.Sequence[UMat], dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def idct(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def idct(src: UMat, dst: UMat | None = ..., flags: int = ...) -> UMat: ...
+
+@_typing.overload
+def idft(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ..., nonzeroRows: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def idft(src: UMat, dst: UMat | None = ..., flags: int = ..., nonzeroRows: int = ...) -> UMat: ...
+
+@_typing.overload
+def illuminationChange(src: cv2.typing.MatLike, mask: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., alpha: float = ..., beta: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def illuminationChange(src: UMat, mask: UMat, dst: UMat | None = ..., alpha: float = ..., beta: float = ...) -> UMat: ...
+
+def imcount(filename: str, flags: int = ...) -> int: ...
+
+@_typing.overload
+def imdecode(buf: cv2.typing.MatLike, flags: int) -> cv2.typing.MatLike: ...
+@_typing.overload
+def imdecode(buf: UMat, flags: int) -> cv2.typing.MatLike: ...
+
+@_typing.overload
+def imdecodemulti(buf: cv2.typing.MatLike, flags: int, mats: _typing.Sequence[cv2.typing.MatLike] | None = ..., range: cv2.typing.Range = ...) -> tuple[bool, _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def imdecodemulti(buf: UMat, flags: int, mats: _typing.Sequence[cv2.typing.MatLike] | None = ..., range: cv2.typing.Range = ...) -> tuple[bool, _typing.Sequence[cv2.typing.MatLike]]: ...
+
+@_typing.overload
+def imencode(ext: str, img: cv2.typing.MatLike, params: _typing.Sequence[int] = ...) -> tuple[bool, numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]]]: ...
+@_typing.overload
+def imencode(ext: str, img: UMat, params: _typing.Sequence[int] = ...) -> tuple[bool, numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]]]: ...
+
+@_typing.overload
+def imread(filename: str, flags: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def imread(filename: str, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def imread(filename: str, dst: UMat | None = ..., flags: int = ...) -> UMat: ...
+
+@_typing.overload
+def imreadmulti(filename: str, mats: _typing.Sequence[cv2.typing.MatLike] | None = ..., flags: int = ...) -> tuple[bool, _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def imreadmulti(filename: str, start: int, count: int, mats: _typing.Sequence[cv2.typing.MatLike] | None = ..., flags: int = ...) -> tuple[bool, _typing.Sequence[cv2.typing.MatLike]]: ...
+
+@_typing.overload
+def imshow(winname: str, mat: cv2.typing.MatLike) -> None: ...
+@_typing.overload
+def imshow(winname: str, mat: cv2.cuda.GpuMat) -> None: ...
+@_typing.overload
+def imshow(winname: str, mat: UMat) -> None: ...
+
+@_typing.overload
+def imwrite(filename: str, img: cv2.typing.MatLike, params: _typing.Sequence[int] = ...) -> bool: ...
+@_typing.overload
+def imwrite(filename: str, img: UMat, params: _typing.Sequence[int] = ...) -> bool: ...
+
+@_typing.overload
+def imwritemulti(filename: str, img: _typing.Sequence[cv2.typing.MatLike], params: _typing.Sequence[int] = ...) -> bool: ...
+@_typing.overload
+def imwritemulti(filename: str, img: _typing.Sequence[UMat], params: _typing.Sequence[int] = ...) -> bool: ...
+
+@_typing.overload
+def inRange(src: cv2.typing.MatLike, lowerb: cv2.typing.MatLike, upperb: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def inRange(src: UMat, lowerb: UMat, upperb: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def initCameraMatrix2D(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, aspectRatio: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def initCameraMatrix2D(objectPoints: _typing.Sequence[UMat], imagePoints: _typing.Sequence[UMat], imageSize: cv2.typing.Size, aspectRatio: float = ...) -> cv2.typing.MatLike: ...
+
+@_typing.overload
+def initInverseRectificationMap(cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, R: cv2.typing.MatLike, newCameraMatrix: cv2.typing.MatLike, size: cv2.typing.Size, m1type: int, map1: cv2.typing.MatLike | None = ..., map2: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def initInverseRectificationMap(cameraMatrix: UMat, distCoeffs: UMat, R: UMat, newCameraMatrix: UMat, size: cv2.typing.Size, m1type: int, map1: UMat | None = ..., map2: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def initUndistortRectifyMap(cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, R: cv2.typing.MatLike, newCameraMatrix: cv2.typing.MatLike, size: cv2.typing.Size, m1type: int, map1: cv2.typing.MatLike | None = ..., map2: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def initUndistortRectifyMap(cameraMatrix: UMat, distCoeffs: UMat, R: UMat, newCameraMatrix: UMat, size: cv2.typing.Size, m1type: int, map1: UMat | None = ..., map2: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def inpaint(src: cv2.typing.MatLike, inpaintMask: cv2.typing.MatLike, inpaintRadius: float, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def inpaint(src: UMat, inpaintMask: UMat, inpaintRadius: float, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def insertChannel(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, coi: int) -> cv2.typing.MatLike: ...
+@_typing.overload
+def insertChannel(src: UMat, dst: UMat, coi: int) -> UMat: ...
+
+@_typing.overload
+def integral(src: cv2.typing.MatLike, sum: cv2.typing.MatLike | None = ..., sdepth: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def integral(src: UMat, sum: UMat | None = ..., sdepth: int = ...) -> UMat: ...
+
+@_typing.overload
+def integral2(src: cv2.typing.MatLike, sum: cv2.typing.MatLike | None = ..., sqsum: cv2.typing.MatLike | None = ..., sdepth: int = ..., sqdepth: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def integral2(src: UMat, sum: UMat | None = ..., sqsum: UMat | None = ..., sdepth: int = ..., sqdepth: int = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def integral3(src: cv2.typing.MatLike, sum: cv2.typing.MatLike | None = ..., sqsum: cv2.typing.MatLike | None = ..., tilted: cv2.typing.MatLike | None = ..., sdepth: int = ..., sqdepth: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def integral3(src: UMat, sum: UMat | None = ..., sqsum: UMat | None = ..., tilted: UMat | None = ..., sdepth: int = ..., sqdepth: int = ...) -> tuple[UMat, UMat, UMat]: ...
+
+@_typing.overload
+def intersectConvexConvex(p1: cv2.typing.MatLike, p2: cv2.typing.MatLike, p12: cv2.typing.MatLike | None = ..., handleNested: bool = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def intersectConvexConvex(p1: UMat, p2: UMat, p12: UMat | None = ..., handleNested: bool = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def invert(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def invert(src: UMat, dst: UMat | None = ..., flags: int = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def invertAffineTransform(M: cv2.typing.MatLike, iM: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def invertAffineTransform(M: UMat, iM: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def isContourConvex(contour: cv2.typing.MatLike) -> bool: ...
+@_typing.overload
+def isContourConvex(contour: UMat) -> bool: ...
+
+@_typing.overload
+def kmeans(data: cv2.typing.MatLike, K: int, bestLabels: cv2.typing.MatLike, criteria: cv2.typing.TermCriteria, attempts: int, flags: int, centers: cv2.typing.MatLike | None = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def kmeans(data: UMat, K: int, bestLabels: UMat, criteria: cv2.typing.TermCriteria, attempts: int, flags: int, centers: UMat | None = ...) -> tuple[float, UMat, UMat]: ...
+
+@_typing.overload
+def line(img: cv2.typing.MatLike, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def line(img: UMat, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+
+@_typing.overload
+def linearPolar(src: cv2.typing.MatLike, center: cv2.typing.Point2f, maxRadius: float, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def linearPolar(src: UMat, center: cv2.typing.Point2f, maxRadius: float, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def log(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def log(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def logPolar(src: cv2.typing.MatLike, center: cv2.typing.Point2f, M: float, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def logPolar(src: UMat, center: cv2.typing.Point2f, M: float, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def magnitude(x: cv2.typing.MatLike, y: cv2.typing.MatLike, magnitude: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def magnitude(x: UMat, y: UMat, magnitude: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def matMulDeriv(A: cv2.typing.MatLike, B: cv2.typing.MatLike, dABdA: cv2.typing.MatLike | None = ..., dABdB: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def matMulDeriv(A: UMat, B: UMat, dABdA: UMat | None = ..., dABdB: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def matchShapes(contour1: cv2.typing.MatLike, contour2: cv2.typing.MatLike, method: int, parameter: float) -> float: ...
+@_typing.overload
+def matchShapes(contour1: UMat, contour2: UMat, method: int, parameter: float) -> float: ...
+
+@_typing.overload
+def matchTemplate(image: cv2.typing.MatLike, templ: cv2.typing.MatLike, method: int, result: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def matchTemplate(image: UMat, templ: UMat, method: int, result: UMat | None = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def max(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def max(src1: UMat, src2: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def mean(src: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> cv2.typing.Scalar: ...
+@_typing.overload
+def mean(src: UMat, mask: UMat | None = ...) -> cv2.typing.Scalar: ...
+
+@_typing.overload
+def meanShift(probImage: cv2.typing.MatLike, window: cv2.typing.Rect, criteria: cv2.typing.TermCriteria) -> tuple[int, cv2.typing.Rect]: ...
+@_typing.overload
+def meanShift(probImage: UMat, window: cv2.typing.Rect, criteria: cv2.typing.TermCriteria) -> tuple[int, cv2.typing.Rect]: ...
+
+@_typing.overload
+def meanStdDev(src: cv2.typing.MatLike, mean: cv2.typing.MatLike | None = ..., stddev: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def meanStdDev(src: UMat, mean: UMat | None = ..., stddev: UMat | None = ..., mask: UMat | None = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def medianBlur(src: cv2.typing.MatLike, ksize: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def medianBlur(src: UMat, ksize: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def merge(mv: _typing.Sequence[cv2.typing.MatLike], dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def merge(mv: _typing.Sequence[UMat], dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def min(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def min(src1: UMat, src2: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def minAreaRect(points: cv2.typing.MatLike) -> cv2.typing.RotatedRect: ...
+@_typing.overload
+def minAreaRect(points: UMat) -> cv2.typing.RotatedRect: ...
+
+@_typing.overload
+def minEnclosingCircle(points: cv2.typing.MatLike) -> tuple[cv2.typing.Point2f, float]: ...
+@_typing.overload
+def minEnclosingCircle(points: UMat) -> tuple[cv2.typing.Point2f, float]: ...
+
+@_typing.overload
+def minEnclosingTriangle(points: cv2.typing.MatLike, triangle: cv2.typing.MatLike | None = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def minEnclosingTriangle(points: UMat, triangle: UMat | None = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def minMaxLoc(src: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> tuple[float, float, cv2.typing.Point, cv2.typing.Point]: ...
+@_typing.overload
+def minMaxLoc(src: UMat, mask: UMat | None = ...) -> tuple[float, float, cv2.typing.Point, cv2.typing.Point]: ...
+
+@_typing.overload
+def mixChannels(src: _typing.Sequence[cv2.typing.MatLike], dst: _typing.Sequence[cv2.typing.MatLike], fromTo: _typing.Sequence[int]) -> _typing.Sequence[cv2.typing.MatLike]: ...
+@_typing.overload
+def mixChannels(src: _typing.Sequence[UMat], dst: _typing.Sequence[UMat], fromTo: _typing.Sequence[int]) -> _typing.Sequence[UMat]: ...
+
+@_typing.overload
+def moments(array: cv2.typing.MatLike, binaryImage: bool = ...) -> cv2.typing.Moments: ...
+@_typing.overload
+def moments(array: UMat, binaryImage: bool = ...) -> cv2.typing.Moments: ...
+
+@_typing.overload
+def morphologyEx(src: cv2.typing.MatLike, op: int, kernel: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def morphologyEx(src: UMat, op: int, kernel: UMat, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+def moveWindow(winname: str, x: int, y: int) -> None: ...
+
+@_typing.overload
+def mulSpectrums(a: cv2.typing.MatLike, b: cv2.typing.MatLike, flags: int, c: cv2.typing.MatLike | None = ..., conjB: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def mulSpectrums(a: UMat, b: UMat, flags: int, c: UMat | None = ..., conjB: bool = ...) -> UMat: ...
+
+@_typing.overload
+def mulTransposed(src: cv2.typing.MatLike, aTa: bool, dst: cv2.typing.MatLike | None = ..., delta: cv2.typing.MatLike | None = ..., scale: float = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def mulTransposed(src: UMat, aTa: bool, dst: UMat | None = ..., delta: UMat | None = ..., scale: float = ..., dtype: int = ...) -> UMat: ...
+
+@_typing.overload
+def multiply(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., scale: float = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def multiply(src1: UMat, src2: UMat, dst: UMat | None = ..., scale: float = ..., dtype: int = ...) -> UMat: ...
+
+def namedWindow(winname: str, flags: int = ...) -> None: ...
+
+@_typing.overload
+def norm(src1: cv2.typing.MatLike, normType: int = ..., mask: cv2.typing.MatLike | None = ...) -> float: ...
+@_typing.overload
+def norm(src1: UMat, normType: int = ..., mask: UMat | None = ...) -> float: ...
+@_typing.overload
+def norm(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, normType: int = ..., mask: cv2.typing.MatLike | None = ...) -> float: ...
+@_typing.overload
+def norm(src1: UMat, src2: UMat, normType: int = ..., mask: UMat | None = ...) -> float: ...
+
+@_typing.overload
+def normalize(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, alpha: float = ..., beta: float = ..., norm_type: int = ..., dtype: int = ..., mask: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def normalize(src: UMat, dst: UMat, alpha: float = ..., beta: float = ..., norm_type: int = ..., dtype: int = ..., mask: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def patchNaNs(a: cv2.typing.MatLike, val: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def patchNaNs(a: UMat, val: float = ...) -> UMat: ...
+
+@_typing.overload
+def pencilSketch(src: cv2.typing.MatLike, dst1: cv2.typing.MatLike | None = ..., dst2: cv2.typing.MatLike | None = ..., sigma_s: float = ..., sigma_r: float = ..., shade_factor: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def pencilSketch(src: UMat, dst1: UMat | None = ..., dst2: UMat | None = ..., sigma_s: float = ..., sigma_r: float = ..., shade_factor: float = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def perspectiveTransform(src: cv2.typing.MatLike, m: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def perspectiveTransform(src: UMat, m: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def phase(x: cv2.typing.MatLike, y: cv2.typing.MatLike, angle: cv2.typing.MatLike | None = ..., angleInDegrees: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def phase(x: UMat, y: UMat, angle: UMat | None = ..., angleInDegrees: bool = ...) -> UMat: ...
+
+@_typing.overload
+def phaseCorrelate(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, window: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.Point2d, float]: ...
+@_typing.overload
+def phaseCorrelate(src1: UMat, src2: UMat, window: UMat | None = ...) -> tuple[cv2.typing.Point2d, float]: ...
+
+@_typing.overload
+def pointPolygonTest(contour: cv2.typing.MatLike, pt: cv2.typing.Point2f, measureDist: bool) -> float: ...
+@_typing.overload
+def pointPolygonTest(contour: UMat, pt: cv2.typing.Point2f, measureDist: bool) -> float: ...
+
+@_typing.overload
+def polarToCart(magnitude: cv2.typing.MatLike, angle: cv2.typing.MatLike, x: cv2.typing.MatLike | None = ..., y: cv2.typing.MatLike | None = ..., angleInDegrees: bool = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def polarToCart(magnitude: UMat, angle: UMat, x: UMat | None = ..., y: UMat | None = ..., angleInDegrees: bool = ...) -> tuple[UMat, UMat]: ...
+
+def pollKey() -> int: ...
+
+@_typing.overload
+def polylines(img: cv2.typing.MatLike, pts: _typing.Sequence[cv2.typing.MatLike], isClosed: bool, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def polylines(img: UMat, pts: _typing.Sequence[UMat], isClosed: bool, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+
+@_typing.overload
+def pow(src: cv2.typing.MatLike, power: float, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def pow(src: UMat, power: float, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def preCornerDetect(src: cv2.typing.MatLike, ksize: int, dst: cv2.typing.MatLike | None = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def preCornerDetect(src: UMat, ksize: int, dst: UMat | None = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def projectPoints(objectPoints: cv2.typing.MatLike, rvec: cv2.typing.MatLike, tvec: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike | None = ..., jacobian: cv2.typing.MatLike | None = ..., aspectRatio: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def projectPoints(objectPoints: UMat, rvec: UMat, tvec: UMat, cameraMatrix: UMat, distCoeffs: UMat, imagePoints: UMat | None = ..., jacobian: UMat | None = ..., aspectRatio: float = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def putText(img: cv2.typing.MatLike, text: str, org: cv2.typing.Point, fontFace: int, fontScale: float, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., bottomLeftOrigin: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def putText(img: UMat, text: str, org: cv2.typing.Point, fontFace: int, fontScale: float, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., bottomLeftOrigin: bool = ...) -> UMat: ...
+
+@_typing.overload
+def pyrDown(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., dstsize: cv2.typing.Size = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def pyrDown(src: UMat, dst: UMat | None = ..., dstsize: cv2.typing.Size = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def pyrMeanShiftFiltering(src: cv2.typing.MatLike, sp: float, sr: float, dst: cv2.typing.MatLike | None = ..., maxLevel: int = ..., termcrit: cv2.typing.TermCriteria = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def pyrMeanShiftFiltering(src: UMat, sp: float, sr: float, dst: UMat | None = ..., maxLevel: int = ..., termcrit: cv2.typing.TermCriteria = ...) -> UMat: ...
+
+@_typing.overload
+def pyrUp(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., dstsize: cv2.typing.Size = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def pyrUp(src: UMat, dst: UMat | None = ..., dstsize: cv2.typing.Size = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def randShuffle(dst: cv2.typing.MatLike, iterFactor: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def randShuffle(dst: UMat, iterFactor: float = ...) -> UMat: ...
+
+@_typing.overload
+def randn(dst: cv2.typing.MatLike, mean: cv2.typing.MatLike, stddev: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def randn(dst: UMat, mean: UMat, stddev: UMat) -> UMat: ...
+
+@_typing.overload
+def randu(dst: cv2.typing.MatLike, low: cv2.typing.MatLike, high: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def randu(dst: UMat, low: UMat, high: UMat) -> UMat: ...
+
+def readOpticalFlow(path: str) -> cv2.typing.MatLike: ...
+
+@_typing.overload
+def recoverPose(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, E: cv2.typing.MatLike | None = ..., R: cv2.typing.MatLike | None = ..., t: cv2.typing.MatLike | None = ..., method: int = ..., prob: float = ..., threshold: float = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def recoverPose(points1: UMat, points2: UMat, cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, E: UMat | None = ..., R: UMat | None = ..., t: UMat | None = ..., method: int = ..., prob: float = ..., threshold: float = ..., mask: UMat | None = ...) -> tuple[int, UMat, UMat, UMat, UMat]: ...
+@_typing.overload
+def recoverPose(E: cv2.typing.MatLike, points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, R: cv2.typing.MatLike | None = ..., t: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def recoverPose(E: UMat, points1: UMat, points2: UMat, cameraMatrix: UMat, R: UMat | None = ..., t: UMat | None = ..., mask: UMat | None = ...) -> tuple[int, UMat, UMat, UMat]: ...
+@_typing.overload
+def recoverPose(E: cv2.typing.MatLike, points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, R: cv2.typing.MatLike | None = ..., t: cv2.typing.MatLike | None = ..., focal: float = ..., pp: cv2.typing.Point2d = ..., mask: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def recoverPose(E: UMat, points1: UMat, points2: UMat, R: UMat | None = ..., t: UMat | None = ..., focal: float = ..., pp: cv2.typing.Point2d = ..., mask: UMat | None = ...) -> tuple[int, UMat, UMat, UMat]: ...
+@_typing.overload
+def recoverPose(E: cv2.typing.MatLike, points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distanceThresh: float, R: cv2.typing.MatLike | None = ..., t: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ..., triangulatedPoints: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def recoverPose(E: UMat, points1: UMat, points2: UMat, cameraMatrix: UMat, distanceThresh: float, R: UMat | None = ..., t: UMat | None = ..., mask: UMat | None = ..., triangulatedPoints: UMat | None = ...) -> tuple[int, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def rectangle(img: cv2.typing.MatLike, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def rectangle(img: UMat, pt1: cv2.typing.Point, pt2: cv2.typing.Point, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+@_typing.overload
+def rectangle(img: cv2.typing.MatLike, rec: cv2.typing.Rect, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def rectangle(img: UMat, rec: cv2.typing.Rect, color: cv2.typing.Scalar, thickness: int = ..., lineType: int = ..., shift: int = ...) -> UMat: ...
+
+def rectangleIntersectionArea(a: cv2.typing.Rect2d, b: cv2.typing.Rect2d) -> float: ...
+
+@_typing.overload
+def rectify3Collinear(cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, cameraMatrix3: cv2.typing.MatLike, distCoeffs3: cv2.typing.MatLike, imgpt1: _typing.Sequence[cv2.typing.MatLike], imgpt3: _typing.Sequence[cv2.typing.MatLike], imageSize: cv2.typing.Size, R12: cv2.typing.MatLike, T12: cv2.typing.MatLike, R13: cv2.typing.MatLike, T13: cv2.typing.MatLike, alpha: float, newImgSize: cv2.typing.Size, flags: int, R1: cv2.typing.MatLike | None = ..., R2: cv2.typing.MatLike | None = ..., R3: cv2.typing.MatLike | None = ..., P1: cv2.typing.MatLike | None = ..., P2: cv2.typing.MatLike | None = ..., P3: cv2.typing.MatLike | None = ..., Q: cv2.typing.MatLike | None = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.Rect, cv2.typing.Rect]: ...
+@_typing.overload
+def rectify3Collinear(cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, cameraMatrix3: UMat, distCoeffs3: UMat, imgpt1: _typing.Sequence[UMat], imgpt3: _typing.Sequence[UMat], imageSize: cv2.typing.Size, R12: UMat, T12: UMat, R13: UMat, T13: UMat, alpha: float, newImgSize: cv2.typing.Size, flags: int, R1: UMat | None = ..., R2: UMat | None = ..., R3: UMat | None = ..., P1: UMat | None = ..., P2: UMat | None = ..., P3: UMat | None = ..., Q: UMat | None = ...) -> tuple[float, UMat, UMat, UMat, UMat, UMat, UMat, UMat, cv2.typing.Rect, cv2.typing.Rect]: ...
+
+@_typing.overload
+def reduce(src: cv2.typing.MatLike, dim: int, rtype: int, dst: cv2.typing.MatLike | None = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def reduce(src: UMat, dim: int, rtype: int, dst: UMat | None = ..., dtype: int = ...) -> UMat: ...
+
+@_typing.overload
+def reduceArgMax(src: cv2.typing.MatLike, axis: int, dst: cv2.typing.MatLike | None = ..., lastIndex: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def reduceArgMax(src: UMat, axis: int, dst: UMat | None = ..., lastIndex: bool = ...) -> UMat: ...
+
+@_typing.overload
+def reduceArgMin(src: cv2.typing.MatLike, axis: int, dst: cv2.typing.MatLike | None = ..., lastIndex: bool = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def reduceArgMin(src: UMat, axis: int, dst: UMat | None = ..., lastIndex: bool = ...) -> UMat: ...
+
+@_typing.overload
+def remap(src: cv2.typing.MatLike, map1: cv2.typing.MatLike, map2: cv2.typing.MatLike, interpolation: int, dst: cv2.typing.MatLike | None = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def remap(src: UMat, map1: UMat, map2: UMat, interpolation: int, dst: UMat | None = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+@_typing.overload
+def repeat(src: cv2.typing.MatLike, ny: int, nx: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def repeat(src: UMat, ny: int, nx: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def reprojectImageTo3D(disparity: cv2.typing.MatLike, Q: cv2.typing.MatLike, _3dImage: cv2.typing.MatLike | None = ..., handleMissingValues: bool = ..., ddepth: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def reprojectImageTo3D(disparity: UMat, Q: UMat, _3dImage: UMat | None = ..., handleMissingValues: bool = ..., ddepth: int = ...) -> UMat: ...
+
+@_typing.overload
+def resize(src: cv2.typing.MatLike, dsize: cv2.typing.Size | None, dst: cv2.typing.MatLike | None = ..., fx: float = ..., fy: float = ..., interpolation: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def resize(src: UMat, dsize: cv2.typing.Size | None, dst: UMat | None = ..., fx: float = ..., fy: float = ..., interpolation: int = ...) -> UMat: ...
+
+@_typing.overload
+def resizeWindow(winname: str, width: int, height: int) -> None: ...
+@_typing.overload
+def resizeWindow(winname: str, size: cv2.typing.Size) -> None: ...
+
+@_typing.overload
+def rotate(src: cv2.typing.MatLike, rotateCode: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def rotate(src: UMat, rotateCode: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def rotatedRectangleIntersection(rect1: cv2.typing.RotatedRect, rect2: cv2.typing.RotatedRect, intersectingRegion: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def rotatedRectangleIntersection(rect1: cv2.typing.RotatedRect, rect2: cv2.typing.RotatedRect, intersectingRegion: UMat | None = ...) -> tuple[int, UMat]: ...
+
+@_typing.overload
+def sampsonDistance(pt1: cv2.typing.MatLike, pt2: cv2.typing.MatLike, F: cv2.typing.MatLike) -> float: ...
+@_typing.overload
+def sampsonDistance(pt1: UMat, pt2: UMat, F: UMat) -> float: ...
+
+@_typing.overload
+def scaleAdd(src1: cv2.typing.MatLike, alpha: float, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def scaleAdd(src1: UMat, alpha: float, src2: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def seamlessClone(src: cv2.typing.MatLike, dst: cv2.typing.MatLike, mask: cv2.typing.MatLike, p: cv2.typing.Point, flags: int, blend: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def seamlessClone(src: UMat, dst: UMat, mask: UMat, p: cv2.typing.Point, flags: int, blend: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def selectROI(windowName: str, img: cv2.typing.MatLike, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> cv2.typing.Rect: ...
+@_typing.overload
+def selectROI(windowName: str, img: UMat, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> cv2.typing.Rect: ...
+@_typing.overload
+def selectROI(img: cv2.typing.MatLike, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> cv2.typing.Rect: ...
+@_typing.overload
+def selectROI(img: UMat, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> cv2.typing.Rect: ...
+
+@_typing.overload
+def selectROIs(windowName: str, img: cv2.typing.MatLike, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> _typing.Sequence[cv2.typing.Rect]: ...
+@_typing.overload
+def selectROIs(windowName: str, img: UMat, showCrosshair: bool = ..., fromCenter: bool = ..., printNotice: bool = ...) -> _typing.Sequence[cv2.typing.Rect]: ...
+
+@_typing.overload
+def sepFilter2D(src: cv2.typing.MatLike, ddepth: int, kernelX: cv2.typing.MatLike, kernelY: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., delta: float = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def sepFilter2D(src: UMat, ddepth: int, kernelX: UMat, kernelY: UMat, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., delta: float = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def setIdentity(mtx: cv2.typing.MatLike, s: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def setIdentity(mtx: UMat, s: cv2.typing.Scalar = ...) -> UMat: ...
+
+def setLogLevel(level: int) -> int: ...
+
+def setNumThreads(nthreads: int) -> None: ...
+
+def setRNGSeed(seed: int) -> None: ...
+
+def setTrackbarMax(trackbarname: str, winname: str, maxval: int) -> None: ...
+
+def setTrackbarMin(trackbarname: str, winname: str, minval: int) -> None: ...
+
+def setTrackbarPos(trackbarname: str, winname: str, pos: int) -> None: ...
+
+def setUseOpenVX(flag: bool) -> None: ...
+
+def setUseOptimized(onoff: bool) -> None: ...
+
+def setWindowProperty(winname: str, prop_id: int, prop_value: float) -> None: ...
+
+def setWindowTitle(winname: str, title: str) -> None: ...
+
+@_typing.overload
+def solve(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+@_typing.overload
+def solve(src1: UMat, src2: UMat, dst: UMat | None = ..., flags: int = ...) -> tuple[bool, UMat]: ...
+
+@_typing.overload
+def solveCubic(coeffs: cv2.typing.MatLike, roots: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def solveCubic(coeffs: UMat, roots: UMat | None = ...) -> tuple[int, UMat]: ...
+
+@_typing.overload
+def solveLP(Func: cv2.typing.MatLike, Constr: cv2.typing.MatLike, constr_eps: float, z: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def solveLP(Func: UMat, Constr: UMat, constr_eps: float, z: UMat | None = ...) -> tuple[int, UMat]: ...
+@_typing.overload
+def solveLP(Func: cv2.typing.MatLike, Constr: cv2.typing.MatLike, z: cv2.typing.MatLike | None = ...) -> tuple[int, cv2.typing.MatLike]: ...
+@_typing.overload
+def solveLP(Func: UMat, Constr: UMat, z: UMat | None = ...) -> tuple[int, UMat]: ...
+
+@_typing.overload
+def solveP3P(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, flags: int, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[int, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike]]: ...
+@_typing.overload
+def solveP3P(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, flags: int, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ...) -> tuple[int, _typing.Sequence[UMat], _typing.Sequence[UMat]]: ...
+
+@_typing.overload
+def solvePnP(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike | None = ..., tvec: cv2.typing.MatLike | None = ..., useExtrinsicGuess: bool = ..., flags: int = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnP(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat | None = ..., tvec: UMat | None = ..., useExtrinsicGuess: bool = ..., flags: int = ...) -> tuple[bool, UMat, UMat]: ...
+
+@_typing.overload
+def solvePnPGeneric(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., useExtrinsicGuess: bool = ..., flags: SolvePnPMethod = ..., rvec: cv2.typing.MatLike | None = ..., tvec: cv2.typing.MatLike | None = ..., reprojectionError: cv2.typing.MatLike | None = ...) -> tuple[int, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnPGeneric(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., useExtrinsicGuess: bool = ..., flags: SolvePnPMethod = ..., rvec: UMat | None = ..., tvec: UMat | None = ..., reprojectionError: UMat | None = ...) -> tuple[int, _typing.Sequence[UMat], _typing.Sequence[UMat], UMat]: ...
+
+@_typing.overload
+def solvePnPRansac(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike | None = ..., tvec: cv2.typing.MatLike | None = ..., useExtrinsicGuess: bool = ..., iterationsCount: int = ..., reprojectionError: float = ..., confidence: float = ..., inliers: cv2.typing.MatLike | None = ..., flags: int = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnPRansac(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat | None = ..., tvec: UMat | None = ..., useExtrinsicGuess: bool = ..., iterationsCount: int = ..., reprojectionError: float = ..., confidence: float = ..., inliers: UMat | None = ..., flags: int = ...) -> tuple[bool, UMat, UMat, UMat]: ...
+@_typing.overload
+def solvePnPRansac(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike | None = ..., tvec: cv2.typing.MatLike | None = ..., inliers: cv2.typing.MatLike | None = ..., params: UsacParams = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnPRansac(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat | None = ..., tvec: UMat | None = ..., inliers: UMat | None = ..., params: UsacParams = ...) -> tuple[bool, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def solvePnPRefineLM(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike, tvec: cv2.typing.MatLike, criteria: cv2.typing.TermCriteria = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnPRefineLM(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat, tvec: UMat, criteria: cv2.typing.TermCriteria = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def solvePnPRefineVVS(objectPoints: cv2.typing.MatLike, imagePoints: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, rvec: cv2.typing.MatLike, tvec: cv2.typing.MatLike, criteria: cv2.typing.TermCriteria = ..., VVSlambda: float = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePnPRefineVVS(objectPoints: UMat, imagePoints: UMat, cameraMatrix: UMat, distCoeffs: UMat, rvec: UMat, tvec: UMat, criteria: cv2.typing.TermCriteria = ..., VVSlambda: float = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def solvePoly(coeffs: cv2.typing.MatLike, roots: cv2.typing.MatLike | None = ..., maxIters: int = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def solvePoly(coeffs: UMat, roots: UMat | None = ..., maxIters: int = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def sort(src: cv2.typing.MatLike, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def sort(src: UMat, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def sortIdx(src: cv2.typing.MatLike, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def sortIdx(src: UMat, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def spatialGradient(src: cv2.typing.MatLike, dx: cv2.typing.MatLike | None = ..., dy: cv2.typing.MatLike | None = ..., ksize: int = ..., borderType: int = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def spatialGradient(src: UMat, dx: UMat | None = ..., dy: UMat | None = ..., ksize: int = ..., borderType: int = ...) -> tuple[UMat, UMat]: ...
+
+@_typing.overload
+def split(m: cv2.typing.MatLike, mv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+@_typing.overload
+def split(m: UMat, mv: _typing.Sequence[UMat] | None = ...) -> _typing.Sequence[UMat]: ...
+
+@_typing.overload
+def sqrBoxFilter(src: cv2.typing.MatLike, ddepth: int, ksize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ..., anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def sqrBoxFilter(src: UMat, ddepth: int, ksize: cv2.typing.Size, dst: UMat | None = ..., anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ...) -> UMat: ...
+
+@_typing.overload
+def sqrt(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def sqrt(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def stackBlur(src: cv2.typing.MatLike, ksize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def stackBlur(src: UMat, ksize: cv2.typing.Size, dst: UMat | None = ...) -> UMat: ...
+
+def startWindowThread() -> int: ...
+
+@_typing.overload
+def stereoCalibrate(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints1: _typing.Sequence[cv2.typing.MatLike], imagePoints2: _typing.Sequence[cv2.typing.MatLike], cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, imageSize: cv2.typing.Size, R: cv2.typing.MatLike | None = ..., T: cv2.typing.MatLike | None = ..., E: cv2.typing.MatLike | None = ..., F: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def stereoCalibrate(objectPoints: _typing.Sequence[UMat], imagePoints1: _typing.Sequence[UMat], imagePoints2: _typing.Sequence[UMat], cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, imageSize: cv2.typing.Size, R: UMat | None = ..., T: UMat | None = ..., E: UMat | None = ..., F: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat]: ...
+@_typing.overload
+def stereoCalibrate(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints1: _typing.Sequence[cv2.typing.MatLike], imagePoints2: _typing.Sequence[cv2.typing.MatLike], cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, imageSize: cv2.typing.Size, R: cv2.typing.MatLike, T: cv2.typing.MatLike, E: cv2.typing.MatLike | None = ..., F: cv2.typing.MatLike | None = ..., perViewErrors: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def stereoCalibrate(objectPoints: _typing.Sequence[UMat], imagePoints1: _typing.Sequence[UMat], imagePoints2: _typing.Sequence[UMat], cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, imageSize: cv2.typing.Size, R: UMat, T: UMat, E: UMat | None = ..., F: UMat | None = ..., perViewErrors: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat]: ...
+
+@_typing.overload
+def stereoCalibrateExtended(objectPoints: _typing.Sequence[cv2.typing.MatLike], imagePoints1: _typing.Sequence[cv2.typing.MatLike], imagePoints2: _typing.Sequence[cv2.typing.MatLike], cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, imageSize: cv2.typing.Size, R: cv2.typing.MatLike, T: cv2.typing.MatLike, E: cv2.typing.MatLike | None = ..., F: cv2.typing.MatLike | None = ..., rvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., tvecs: _typing.Sequence[cv2.typing.MatLike] | None = ..., perViewErrors: cv2.typing.MatLike | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+@_typing.overload
+def stereoCalibrateExtended(objectPoints: _typing.Sequence[UMat], imagePoints1: _typing.Sequence[UMat], imagePoints2: _typing.Sequence[UMat], cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, imageSize: cv2.typing.Size, R: UMat, T: UMat, E: UMat | None = ..., F: UMat | None = ..., rvecs: _typing.Sequence[UMat] | None = ..., tvecs: _typing.Sequence[UMat] | None = ..., perViewErrors: UMat | None = ..., flags: int = ..., criteria: cv2.typing.TermCriteria = ...) -> tuple[float, UMat, UMat, UMat, UMat, UMat, UMat, UMat, UMat, _typing.Sequence[UMat], _typing.Sequence[UMat], UMat]: ...
+
+@_typing.overload
+def stereoRectify(cameraMatrix1: cv2.typing.MatLike, distCoeffs1: cv2.typing.MatLike, cameraMatrix2: cv2.typing.MatLike, distCoeffs2: cv2.typing.MatLike, imageSize: cv2.typing.Size, R: cv2.typing.MatLike, T: cv2.typing.MatLike, R1: cv2.typing.MatLike | None = ..., R2: cv2.typing.MatLike | None = ..., P1: cv2.typing.MatLike | None = ..., P2: cv2.typing.MatLike | None = ..., Q: cv2.typing.MatLike | None = ..., flags: int = ..., alpha: float = ..., newImageSize: cv2.typing.Size = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.MatLike, cv2.typing.Rect, cv2.typing.Rect]: ...
+@_typing.overload
+def stereoRectify(cameraMatrix1: UMat, distCoeffs1: UMat, cameraMatrix2: UMat, distCoeffs2: UMat, imageSize: cv2.typing.Size, R: UMat, T: UMat, R1: UMat | None = ..., R2: UMat | None = ..., P1: UMat | None = ..., P2: UMat | None = ..., Q: UMat | None = ..., flags: int = ..., alpha: float = ..., newImageSize: cv2.typing.Size = ...) -> tuple[UMat, UMat, UMat, UMat, UMat, cv2.typing.Rect, cv2.typing.Rect]: ...
+
+@_typing.overload
+def stereoRectifyUncalibrated(points1: cv2.typing.MatLike, points2: cv2.typing.MatLike, F: cv2.typing.MatLike, imgSize: cv2.typing.Size, H1: cv2.typing.MatLike | None = ..., H2: cv2.typing.MatLike | None = ..., threshold: float = ...) -> tuple[bool, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+@_typing.overload
+def stereoRectifyUncalibrated(points1: UMat, points2: UMat, F: UMat, imgSize: cv2.typing.Size, H1: UMat | None = ..., H2: UMat | None = ..., threshold: float = ...) -> tuple[bool, UMat, UMat]: ...
+
+@_typing.overload
+def stylization(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., sigma_s: float = ..., sigma_r: float = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def stylization(src: UMat, dst: UMat | None = ..., sigma_s: float = ..., sigma_r: float = ...) -> UMat: ...
+
+@_typing.overload
+def subtract(src1: cv2.typing.MatLike, src2: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., mask: cv2.typing.MatLike | None = ..., dtype: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def subtract(src1: UMat, src2: UMat, dst: UMat | None = ..., mask: UMat | None = ..., dtype: int = ...) -> UMat: ...
+
+@_typing.overload
+def sumElems(src: cv2.typing.MatLike) -> cv2.typing.Scalar: ...
+@_typing.overload
+def sumElems(src: UMat) -> cv2.typing.Scalar: ...
+
+@_typing.overload
+def textureFlattening(src: cv2.typing.MatLike, mask: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., low_threshold: float = ..., high_threshold: float = ..., kernel_size: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def textureFlattening(src: UMat, mask: UMat, dst: UMat | None = ..., low_threshold: float = ..., high_threshold: float = ..., kernel_size: int = ...) -> UMat: ...
+
+@_typing.overload
+def threshold(src: cv2.typing.MatLike, thresh: float, maxval: float, type: int, dst: cv2.typing.MatLike | None = ...) -> tuple[float, cv2.typing.MatLike]: ...
+@_typing.overload
+def threshold(src: UMat, thresh: float, maxval: float, type: int, dst: UMat | None = ...) -> tuple[float, UMat]: ...
+
+@_typing.overload
+def trace(mtx: cv2.typing.MatLike) -> cv2.typing.Scalar: ...
+@_typing.overload
+def trace(mtx: UMat) -> cv2.typing.Scalar: ...
+
+@_typing.overload
+def transform(src: cv2.typing.MatLike, m: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def transform(src: UMat, m: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def transpose(src: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def transpose(src: UMat, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def transposeND(src: cv2.typing.MatLike, order: _typing.Sequence[int], dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def transposeND(src: UMat, order: _typing.Sequence[int], dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def triangulatePoints(projMatr1: cv2.typing.MatLike, projMatr2: cv2.typing.MatLike, projPoints1: cv2.typing.MatLike, projPoints2: cv2.typing.MatLike, points4D: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def triangulatePoints(projMatr1: UMat, projMatr2: UMat, projPoints1: UMat, projPoints2: UMat, points4D: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def undistort(src: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., newCameraMatrix: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def undistort(src: UMat, cameraMatrix: UMat, distCoeffs: UMat, dst: UMat | None = ..., newCameraMatrix: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def undistortImagePoints(src: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., arg1: cv2.typing.TermCriteria = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def undistortImagePoints(src: UMat, cameraMatrix: UMat, distCoeffs: UMat, dst: UMat | None = ..., arg1: cv2.typing.TermCriteria = ...) -> UMat: ...
+
+@_typing.overload
+def undistortPoints(src: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, dst: cv2.typing.MatLike | None = ..., R: cv2.typing.MatLike | None = ..., P: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def undistortPoints(src: UMat, cameraMatrix: UMat, distCoeffs: UMat, dst: UMat | None = ..., R: UMat | None = ..., P: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def undistortPointsIter(src: cv2.typing.MatLike, cameraMatrix: cv2.typing.MatLike, distCoeffs: cv2.typing.MatLike, R: cv2.typing.MatLike, P: cv2.typing.MatLike, criteria: cv2.typing.TermCriteria, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def undistortPointsIter(src: UMat, cameraMatrix: UMat, distCoeffs: UMat, R: UMat, P: UMat, criteria: cv2.typing.TermCriteria, dst: UMat | None = ...) -> UMat: ...
+
+def useOpenVX() -> bool: ...
+
+def useOptimized() -> bool: ...
+
+@_typing.overload
+def validateDisparity(disparity: cv2.typing.MatLike, cost: cv2.typing.MatLike, minDisparity: int, numberOfDisparities: int, disp12MaxDisp: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def validateDisparity(disparity: UMat, cost: UMat, minDisparity: int, numberOfDisparities: int, disp12MaxDisp: int = ...) -> UMat: ...
+
+@_typing.overload
+def vconcat(src: _typing.Sequence[cv2.typing.MatLike], dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def vconcat(src: _typing.Sequence[UMat], dst: UMat | None = ...) -> UMat: ...
+
+def waitKey(delay: int = ...) -> int: ...
+
+def waitKeyEx(delay: int = ...) -> int: ...
+
+@_typing.overload
+def warpAffine(src: cv2.typing.MatLike, M: cv2.typing.MatLike, dsize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ..., flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def warpAffine(src: UMat, M: UMat, dsize: cv2.typing.Size, dst: UMat | None = ..., flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+@_typing.overload
+def warpPerspective(src: cv2.typing.MatLike, M: cv2.typing.MatLike, dsize: cv2.typing.Size, dst: cv2.typing.MatLike | None = ..., flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def warpPerspective(src: UMat, M: UMat, dsize: cv2.typing.Size, dst: UMat | None = ..., flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> UMat: ...
+
+@_typing.overload
+def warpPolar(src: cv2.typing.MatLike, dsize: cv2.typing.Size, center: cv2.typing.Point2f, maxRadius: float, flags: int, dst: cv2.typing.MatLike | None = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def warpPolar(src: UMat, dsize: cv2.typing.Size, center: cv2.typing.Point2f, maxRadius: float, flags: int, dst: UMat | None = ...) -> UMat: ...
+
+@_typing.overload
+def watershed(image: cv2.typing.MatLike, markers: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def watershed(image: UMat, markers: UMat) -> UMat: ...
+
+@_typing.overload
+def writeOpticalFlow(path: str, flow: cv2.typing.MatLike) -> bool: ...
+@_typing.overload
+def writeOpticalFlow(path: str, flow: UMat) -> bool: ...
+
+def createTrackbar(trackbarName: str, windowName: str, value: int, count: int, onChange: _typing.Callable[[int], None]) -> None: ...
+
+def createButton(buttonName: str, onChange: _typing.Callable[[tuple[int] | tuple[int, _typing.Any]], None], userData: _typing.Any | None = ..., buttonType: int = ..., initialButtonState: int = ...) -> None: ...
+
+def setMouseCallback(windowName: str, onMouse: _typing.Callable[[int, int, int, int, _typing.Any | None], None], param: _typing.Any | None = ...) -> None: ...
+
+def CV_8UC(channels: int) -> int: ...
+
+def CV_8SC(channels: int) -> int: ...
+
+def CV_16UC(channels: int) -> int: ...
+
+def CV_16SC(channels: int) -> int: ...
+
+def CV_32SC(channels: int) -> int: ...
+
+def CV_32FC(channels: int) -> int: ...
+
+def CV_64FC(channels: int) -> int: ...
+
+def CV_16FC(channels: int) -> int: ...
+
+def CV_MAKETYPE(depth: int, channels: int) -> int: ...
+
+def dnn_registerLayer(layerTypeName: str, layerClass: _typing.Type[cv2.dnn.LayerProtocol]) -> None: ...
+
+def dnn_unregisterLayer(layerTypeName: str) -> None: ...
+
+def redirectError(onError: _typing.Callable[[int, str, str, str, int], None] | None) -> None: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/aruco/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/aruco/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..9e252203471e3b1a9867e94fa677120996e1c136
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/aruco/__init__.pyi
@@ -0,0 +1,303 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.typing
+import typing as _typing
+
+
+# Enumerations
+CORNER_REFINE_NONE: int
+CORNER_REFINE_SUBPIX: int
+CORNER_REFINE_CONTOUR: int
+CORNER_REFINE_APRILTAG: int
+CornerRefineMethod = int
+"""One of [CORNER_REFINE_NONE, CORNER_REFINE_SUBPIX, CORNER_REFINE_CONTOUR, CORNER_REFINE_APRILTAG]"""
+
+DICT_4X4_50: int
+DICT_4X4_100: int
+DICT_4X4_250: int
+DICT_4X4_1000: int
+DICT_5X5_50: int
+DICT_5X5_100: int
+DICT_5X5_250: int
+DICT_5X5_1000: int
+DICT_6X6_50: int
+DICT_6X6_100: int
+DICT_6X6_250: int
+DICT_6X6_1000: int
+DICT_7X7_50: int
+DICT_7X7_100: int
+DICT_7X7_250: int
+DICT_7X7_1000: int
+DICT_ARUCO_ORIGINAL: int
+DICT_APRILTAG_16h5: int
+DICT_APRILTAG_16H5: int
+DICT_APRILTAG_25h9: int
+DICT_APRILTAG_25H9: int
+DICT_APRILTAG_36h10: int
+DICT_APRILTAG_36H10: int
+DICT_APRILTAG_36h11: int
+DICT_APRILTAG_36H11: int
+DICT_ARUCO_MIP_36h12: int
+DICT_ARUCO_MIP_36H12: int
+PredefinedDictionaryType = int
+"""One of [DICT_4X4_50, DICT_4X4_100, DICT_4X4_250, DICT_4X4_1000, DICT_5X5_50, DICT_5X5_100, DICT_5X5_250, DICT_5X5_1000, DICT_6X6_50, DICT_6X6_100, DICT_6X6_250, DICT_6X6_1000, DICT_7X7_50, DICT_7X7_100, DICT_7X7_250, DICT_7X7_1000, DICT_ARUCO_ORIGINAL, DICT_APRILTAG_16h5, DICT_APRILTAG_16H5, DICT_APRILTAG_25h9, DICT_APRILTAG_25H9, DICT_APRILTAG_36h10, DICT_APRILTAG_36H10, DICT_APRILTAG_36h11, DICT_APRILTAG_36H11, DICT_ARUCO_MIP_36h12, DICT_ARUCO_MIP_36H12]"""
+
+
+
+# Classes
+class Board:
+ # Functions
+ @_typing.overload
+ def __init__(self, objPoints: _typing.Sequence[cv2.typing.MatLike], dictionary: Dictionary, ids: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def __init__(self, objPoints: _typing.Sequence[cv2.UMat], dictionary: Dictionary, ids: cv2.UMat) -> None: ...
+
+ def getDictionary(self) -> Dictionary: ...
+
+ def getObjPoints(self) -> _typing.Sequence[_typing.Sequence[cv2.typing.Point3f]]: ...
+
+ def getIds(self) -> _typing.Sequence[int]: ...
+
+ def getRightBottomCorner(self) -> cv2.typing.Point3f: ...
+
+ @_typing.overload
+ def matchImagePoints(self, detectedCorners: _typing.Sequence[cv2.typing.MatLike], detectedIds: cv2.typing.MatLike, objPoints: cv2.typing.MatLike | None = ..., imgPoints: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def matchImagePoints(self, detectedCorners: _typing.Sequence[cv2.UMat], detectedIds: cv2.UMat, objPoints: cv2.UMat | None = ..., imgPoints: cv2.UMat | None = ...) -> tuple[cv2.UMat, cv2.UMat]: ...
+
+ @_typing.overload
+ def generateImage(self, outSize: cv2.typing.Size, img: cv2.typing.MatLike | None = ..., marginSize: int = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def generateImage(self, outSize: cv2.typing.Size, img: cv2.UMat | None = ..., marginSize: int = ..., borderBits: int = ...) -> cv2.UMat: ...
+
+
+class GridBoard(Board):
+ # Functions
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, markerLength: float, markerSeparation: float, dictionary: Dictionary, ids: cv2.typing.MatLike | None = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, markerLength: float, markerSeparation: float, dictionary: Dictionary, ids: cv2.UMat | None = ...) -> None: ...
+
+ def getGridSize(self) -> cv2.typing.Size: ...
+
+ def getMarkerLength(self) -> float: ...
+
+ def getMarkerSeparation(self) -> float: ...
+
+
+class CharucoBoard(Board):
+ # Functions
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, squareLength: float, markerLength: float, dictionary: Dictionary, ids: cv2.typing.MatLike | None = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, size: cv2.typing.Size, squareLength: float, markerLength: float, dictionary: Dictionary, ids: cv2.UMat | None = ...) -> None: ...
+
+ def setLegacyPattern(self, legacyPattern: bool) -> None: ...
+
+ def getLegacyPattern(self) -> bool: ...
+
+ def getChessboardSize(self) -> cv2.typing.Size: ...
+
+ def getSquareLength(self) -> float: ...
+
+ def getMarkerLength(self) -> float: ...
+
+ def getChessboardCorners(self) -> _typing.Sequence[cv2.typing.Point3f]: ...
+
+ @_typing.overload
+ def checkCharucoCornersCollinear(self, charucoIds: cv2.typing.MatLike) -> bool: ...
+ @_typing.overload
+ def checkCharucoCornersCollinear(self, charucoIds: cv2.UMat) -> bool: ...
+
+
+class DetectorParameters:
+ adaptiveThreshWinSizeMin: int
+ adaptiveThreshWinSizeMax: int
+ adaptiveThreshWinSizeStep: int
+ adaptiveThreshConstant: float
+ minMarkerPerimeterRate: float
+ maxMarkerPerimeterRate: float
+ polygonalApproxAccuracyRate: float
+ minCornerDistanceRate: float
+ minDistanceToBorder: int
+ minMarkerDistanceRate: float
+ minGroupDistance: float
+ cornerRefinementMethod: int
+ cornerRefinementWinSize: int
+ relativeCornerRefinmentWinSize: float
+ cornerRefinementMaxIterations: int
+ cornerRefinementMinAccuracy: float
+ markerBorderBits: int
+ perspectiveRemovePixelPerCell: int
+ perspectiveRemoveIgnoredMarginPerCell: float
+ maxErroneousBitsInBorderRate: float
+ minOtsuStdDev: float
+ errorCorrectionRate: float
+ aprilTagQuadDecimate: float
+ aprilTagQuadSigma: float
+ aprilTagMinClusterPixels: int
+ aprilTagMaxNmaxima: int
+ aprilTagCriticalRad: float
+ aprilTagMaxLineFitMse: float
+ aprilTagMinWhiteBlackDiff: int
+ aprilTagDeglitch: int
+ detectInvertedMarker: bool
+ useAruco3Detection: bool
+ minSideLengthCanonicalImg: int
+ minMarkerLengthRatioOriginalImg: float
+
+ # Functions
+ def __init__(self) -> None: ...
+
+ def readDetectorParameters(self, fn: cv2.FileNode) -> bool: ...
+
+ def writeDetectorParameters(self, fs: cv2.FileStorage, name: str = ...) -> bool: ...
+
+
+class RefineParameters:
+ minRepDistance: float
+ errorCorrectionRate: float
+ checkAllOrders: bool
+
+ # Functions
+ def __init__(self, minRepDistance: float = ..., errorCorrectionRate: float = ..., checkAllOrders: bool = ...) -> None: ...
+
+ def readRefineParameters(self, fn: cv2.FileNode) -> bool: ...
+
+ def writeRefineParameters(self, fs: cv2.FileStorage, name: str = ...) -> bool: ...
+
+
+class ArucoDetector(cv2.Algorithm):
+ # Functions
+ def __init__(self, dictionary: Dictionary = ..., detectorParams: DetectorParameters = ..., refineParams: RefineParameters = ...) -> None: ...
+
+ @_typing.overload
+ def detectMarkers(self, image: cv2.typing.MatLike, corners: _typing.Sequence[cv2.typing.MatLike] | None = ..., ids: cv2.typing.MatLike | None = ..., rejectedImgPoints: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike]]: ...
+ @_typing.overload
+ def detectMarkers(self, image: cv2.UMat, corners: _typing.Sequence[cv2.UMat] | None = ..., ids: cv2.UMat | None = ..., rejectedImgPoints: _typing.Sequence[cv2.UMat] | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat]]: ...
+
+ @_typing.overload
+ def refineDetectedMarkers(self, image: cv2.typing.MatLike, board: Board, detectedCorners: _typing.Sequence[cv2.typing.MatLike], detectedIds: cv2.typing.MatLike, rejectedCorners: _typing.Sequence[cv2.typing.MatLike], cameraMatrix: cv2.typing.MatLike | None = ..., distCoeffs: cv2.typing.MatLike | None = ..., recoveredIdxs: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+ @_typing.overload
+ def refineDetectedMarkers(self, image: cv2.UMat, board: Board, detectedCorners: _typing.Sequence[cv2.UMat], detectedIds: cv2.UMat, rejectedCorners: _typing.Sequence[cv2.UMat], cameraMatrix: cv2.UMat | None = ..., distCoeffs: cv2.UMat | None = ..., recoveredIdxs: cv2.UMat | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
+
+ def getDictionary(self) -> Dictionary: ...
+
+ def setDictionary(self, dictionary: Dictionary) -> None: ...
+
+ def getDetectorParameters(self) -> DetectorParameters: ...
+
+ def setDetectorParameters(self, detectorParameters: DetectorParameters) -> None: ...
+
+ def getRefineParameters(self) -> RefineParameters: ...
+
+ def setRefineParameters(self, refineParameters: RefineParameters) -> None: ...
+
+ def write(self, fs: cv2.FileStorage, name: str) -> None: ...
+
+ def read(self, fn: cv2.FileNode) -> None: ...
+
+
+class Dictionary:
+ bytesList: cv2.typing.MatLike
+ markerSize: int
+ maxCorrectionBits: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, bytesList: cv2.typing.MatLike, _markerSize: int, maxcorr: int = ...) -> None: ...
+
+ def readDictionary(self, fn: cv2.FileNode) -> bool: ...
+
+ def writeDictionary(self, fs: cv2.FileStorage, name: str = ...) -> None: ...
+
+ def identify(self, onlyBits: cv2.typing.MatLike, maxCorrectionRate: float) -> tuple[bool, int, int]: ...
+
+ @_typing.overload
+ def getDistanceToId(self, bits: cv2.typing.MatLike, id: int, allRotations: bool = ...) -> int: ...
+ @_typing.overload
+ def getDistanceToId(self, bits: cv2.UMat, id: int, allRotations: bool = ...) -> int: ...
+
+ @_typing.overload
+ def generateImageMarker(self, id: int, sidePixels: int, _img: cv2.typing.MatLike | None = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def generateImageMarker(self, id: int, sidePixels: int, _img: cv2.UMat | None = ..., borderBits: int = ...) -> cv2.UMat: ...
+
+ @staticmethod
+ def getByteListFromBits(bits: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+
+ @staticmethod
+ def getBitsFromByteList(byteList: cv2.typing.MatLike, markerSize: int) -> cv2.typing.MatLike: ...
+
+
+class CharucoParameters:
+ cameraMatrix: cv2.typing.MatLike
+ distCoeffs: cv2.typing.MatLike
+ minMarkers: int
+ tryRefineMarkers: bool
+
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class CharucoDetector(cv2.Algorithm):
+ # Functions
+ def __init__(self, board: CharucoBoard, charucoParams: CharucoParameters = ..., detectorParams: DetectorParameters = ..., refineParams: RefineParameters = ...) -> None: ...
+
+ def getBoard(self) -> CharucoBoard: ...
+
+ def setBoard(self, board: CharucoBoard) -> None: ...
+
+ def getCharucoParameters(self) -> CharucoParameters: ...
+
+ def setCharucoParameters(self, charucoParameters: CharucoParameters) -> None: ...
+
+ def getDetectorParameters(self) -> DetectorParameters: ...
+
+ def setDetectorParameters(self, detectorParameters: DetectorParameters) -> None: ...
+
+ def getRefineParameters(self) -> RefineParameters: ...
+
+ def setRefineParameters(self, refineParameters: RefineParameters) -> None: ...
+
+ @_typing.overload
+ def detectBoard(self, image: cv2.typing.MatLike, charucoCorners: cv2.typing.MatLike | None = ..., charucoIds: cv2.typing.MatLike | None = ..., markerCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., markerIds: cv2.typing.MatLike | None = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectBoard(self, image: cv2.UMat, charucoCorners: cv2.UMat | None = ..., charucoIds: cv2.UMat | None = ..., markerCorners: _typing.Sequence[cv2.UMat] | None = ..., markerIds: cv2.UMat | None = ...) -> tuple[cv2.UMat, cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
+
+ @_typing.overload
+ def detectDiamonds(self, image: cv2.typing.MatLike, diamondCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., diamondIds: cv2.typing.MatLike | None = ..., markerCorners: _typing.Sequence[cv2.typing.MatLike] | None = ..., markerIds: cv2.typing.MatLike | None = ...) -> tuple[_typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike, _typing.Sequence[cv2.typing.MatLike], cv2.typing.MatLike]: ...
+ @_typing.overload
+ def detectDiamonds(self, image: cv2.UMat, diamondCorners: _typing.Sequence[cv2.UMat] | None = ..., diamondIds: cv2.UMat | None = ..., markerCorners: _typing.Sequence[cv2.UMat] | None = ..., markerIds: cv2.UMat | None = ...) -> tuple[_typing.Sequence[cv2.UMat], cv2.UMat, _typing.Sequence[cv2.UMat], cv2.UMat]: ...
+
+
+
+# Functions
+@_typing.overload
+def drawDetectedCornersCharuco(image: cv2.typing.MatLike, charucoCorners: cv2.typing.MatLike, charucoIds: cv2.typing.MatLike | None = ..., cornerColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawDetectedCornersCharuco(image: cv2.UMat, charucoCorners: cv2.UMat, charucoIds: cv2.UMat | None = ..., cornerColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
+
+@_typing.overload
+def drawDetectedDiamonds(image: cv2.typing.MatLike, diamondCorners: _typing.Sequence[cv2.typing.MatLike], diamondIds: cv2.typing.MatLike | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawDetectedDiamonds(image: cv2.UMat, diamondCorners: _typing.Sequence[cv2.UMat], diamondIds: cv2.UMat | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
+
+@_typing.overload
+def drawDetectedMarkers(image: cv2.typing.MatLike, corners: _typing.Sequence[cv2.typing.MatLike], ids: cv2.typing.MatLike | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def drawDetectedMarkers(image: cv2.UMat, corners: _typing.Sequence[cv2.UMat], ids: cv2.UMat | None = ..., borderColor: cv2.typing.Scalar = ...) -> cv2.UMat: ...
+
+def extendDictionary(nMarkers: int, markerSize: int, baseDictionary: Dictionary = ..., randomSeed: int = ...) -> Dictionary: ...
+
+@_typing.overload
+def generateImageMarker(dictionary: Dictionary, id: int, sidePixels: int, img: cv2.typing.MatLike | None = ..., borderBits: int = ...) -> cv2.typing.MatLike: ...
+@_typing.overload
+def generateImageMarker(dictionary: Dictionary, id: int, sidePixels: int, img: cv2.UMat | None = ..., borderBits: int = ...) -> cv2.UMat: ...
+
+def getPredefinedDictionary(dict: int) -> Dictionary: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/config-3.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/config-3.py
new file mode 100644
index 0000000000000000000000000000000000000000..587a42bfacd8401281a51fe5dbc8ea44f4e156d5
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/config-3.py
@@ -0,0 +1,24 @@
+PYTHON_EXTENSIONS_PATHS = [
+ LOADER_DIR
+] + PYTHON_EXTENSIONS_PATHS
+
+ci_and_not_headless = False
+
+try:
+ from .version import ci_build, headless
+
+ ci_and_not_headless = ci_build and not headless
+except:
+ pass
+
+# the Qt plugin is included currently only in the pre-built wheels
+if sys.platform.startswith("linux") and ci_and_not_headless:
+ os.environ["QT_QPA_PLATFORM_PLUGIN_PATH"] = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "qt", "plugins"
+ )
+
+# Qt will throw warning on Linux if fonts are not found
+if sys.platform.startswith("linux") and ci_and_not_headless:
+ os.environ["QT_QPA_FONTDIR"] = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "qt", "fonts"
+ )
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/config.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..a95fbcf0db643541979c557588cb264233bc6891
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/config.py
@@ -0,0 +1,5 @@
+import os
+
+BINARIES_PATHS = [
+ os.path.join(os.path.join(LOADER_DIR, '../../'), 'lib64')
+] + BINARIES_PATHS
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/detail/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/detail/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..71917cce8679bbfddc13b7fcc650b713f12d9251
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/detail/__init__.pyi
@@ -0,0 +1,600 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.gapi
+import cv2.gapi.ie
+import cv2.gapi.onnx
+import cv2.gapi.ov
+import cv2.typing
+import numpy
+import typing as _typing
+
+
+# Enumerations
+TEST_CUSTOM: int
+TEST_EQ: int
+TEST_NE: int
+TEST_LE: int
+TEST_LT: int
+TEST_GE: int
+TEST_GT: int
+TestOp = int
+"""One of [TEST_CUSTOM, TEST_EQ, TEST_NE, TEST_LE, TEST_LT, TEST_GE, TEST_GT]"""
+
+WAVE_CORRECT_HORIZ: int
+WAVE_CORRECT_VERT: int
+WAVE_CORRECT_AUTO: int
+WaveCorrectKind = int
+"""One of [WAVE_CORRECT_HORIZ, WAVE_CORRECT_VERT, WAVE_CORRECT_AUTO]"""
+
+OpaqueKind_CV_UNKNOWN: int
+OPAQUE_KIND_CV_UNKNOWN: int
+OpaqueKind_CV_BOOL: int
+OPAQUE_KIND_CV_BOOL: int
+OpaqueKind_CV_INT: int
+OPAQUE_KIND_CV_INT: int
+OpaqueKind_CV_INT64: int
+OPAQUE_KIND_CV_INT64: int
+OpaqueKind_CV_DOUBLE: int
+OPAQUE_KIND_CV_DOUBLE: int
+OpaqueKind_CV_FLOAT: int
+OPAQUE_KIND_CV_FLOAT: int
+OpaqueKind_CV_UINT64: int
+OPAQUE_KIND_CV_UINT64: int
+OpaqueKind_CV_STRING: int
+OPAQUE_KIND_CV_STRING: int
+OpaqueKind_CV_POINT: int
+OPAQUE_KIND_CV_POINT: int
+OpaqueKind_CV_POINT2F: int
+OPAQUE_KIND_CV_POINT2F: int
+OpaqueKind_CV_POINT3F: int
+OPAQUE_KIND_CV_POINT3F: int
+OpaqueKind_CV_SIZE: int
+OPAQUE_KIND_CV_SIZE: int
+OpaqueKind_CV_RECT: int
+OPAQUE_KIND_CV_RECT: int
+OpaqueKind_CV_SCALAR: int
+OPAQUE_KIND_CV_SCALAR: int
+OpaqueKind_CV_MAT: int
+OPAQUE_KIND_CV_MAT: int
+OpaqueKind_CV_DRAW_PRIM: int
+OPAQUE_KIND_CV_DRAW_PRIM: int
+OpaqueKind = int
+"""One of [OpaqueKind_CV_UNKNOWN, OPAQUE_KIND_CV_UNKNOWN, OpaqueKind_CV_BOOL, OPAQUE_KIND_CV_BOOL, OpaqueKind_CV_INT, OPAQUE_KIND_CV_INT, OpaqueKind_CV_INT64, OPAQUE_KIND_CV_INT64, OpaqueKind_CV_DOUBLE, OPAQUE_KIND_CV_DOUBLE, OpaqueKind_CV_FLOAT, OPAQUE_KIND_CV_FLOAT, OpaqueKind_CV_UINT64, OPAQUE_KIND_CV_UINT64, OpaqueKind_CV_STRING, OPAQUE_KIND_CV_STRING, OpaqueKind_CV_POINT, OPAQUE_KIND_CV_POINT, OpaqueKind_CV_POINT2F, OPAQUE_KIND_CV_POINT2F, OpaqueKind_CV_POINT3F, OPAQUE_KIND_CV_POINT3F, OpaqueKind_CV_SIZE, OPAQUE_KIND_CV_SIZE, OpaqueKind_CV_RECT, OPAQUE_KIND_CV_RECT, OpaqueKind_CV_SCALAR, OPAQUE_KIND_CV_SCALAR, OpaqueKind_CV_MAT, OPAQUE_KIND_CV_MAT, OpaqueKind_CV_DRAW_PRIM, OPAQUE_KIND_CV_DRAW_PRIM]"""
+
+ArgKind_OPAQUE_VAL: int
+ARG_KIND_OPAQUE_VAL: int
+ArgKind_OPAQUE: int
+ARG_KIND_OPAQUE: int
+ArgKind_GOBJREF: int
+ARG_KIND_GOBJREF: int
+ArgKind_GMAT: int
+ARG_KIND_GMAT: int
+ArgKind_GMATP: int
+ARG_KIND_GMATP: int
+ArgKind_GFRAME: int
+ARG_KIND_GFRAME: int
+ArgKind_GSCALAR: int
+ARG_KIND_GSCALAR: int
+ArgKind_GARRAY: int
+ARG_KIND_GARRAY: int
+ArgKind_GOPAQUE: int
+ARG_KIND_GOPAQUE: int
+ArgKind = int
+"""One of [ArgKind_OPAQUE_VAL, ARG_KIND_OPAQUE_VAL, ArgKind_OPAQUE, ARG_KIND_OPAQUE, ArgKind_GOBJREF, ARG_KIND_GOBJREF, ArgKind_GMAT, ARG_KIND_GMAT, ArgKind_GMATP, ARG_KIND_GMATP, ArgKind_GFRAME, ARG_KIND_GFRAME, ArgKind_GSCALAR, ARG_KIND_GSCALAR, ArgKind_GARRAY, ARG_KIND_GARRAY, ArgKind_GOPAQUE, ARG_KIND_GOPAQUE]"""
+
+
+Blender_NO: int
+BLENDER_NO: int
+Blender_FEATHER: int
+BLENDER_FEATHER: int
+Blender_MULTI_BAND: int
+BLENDER_MULTI_BAND: int
+
+ExposureCompensator_NO: int
+EXPOSURE_COMPENSATOR_NO: int
+ExposureCompensator_GAIN: int
+EXPOSURE_COMPENSATOR_GAIN: int
+ExposureCompensator_GAIN_BLOCKS: int
+EXPOSURE_COMPENSATOR_GAIN_BLOCKS: int
+ExposureCompensator_CHANNELS: int
+EXPOSURE_COMPENSATOR_CHANNELS: int
+ExposureCompensator_CHANNELS_BLOCKS: int
+EXPOSURE_COMPENSATOR_CHANNELS_BLOCKS: int
+
+SeamFinder_NO: int
+SEAM_FINDER_NO: int
+SeamFinder_VORONOI_SEAM: int
+SEAM_FINDER_VORONOI_SEAM: int
+SeamFinder_DP_SEAM: int
+SEAM_FINDER_DP_SEAM: int
+
+DpSeamFinder_COLOR: int
+DP_SEAM_FINDER_COLOR: int
+DpSeamFinder_COLOR_GRAD: int
+DP_SEAM_FINDER_COLOR_GRAD: int
+DpSeamFinder_CostFunction = int
+"""One of [DpSeamFinder_COLOR, DP_SEAM_FINDER_COLOR, DpSeamFinder_COLOR_GRAD, DP_SEAM_FINDER_COLOR_GRAD]"""
+
+Timelapser_AS_IS: int
+TIMELAPSER_AS_IS: int
+Timelapser_CROP: int
+TIMELAPSER_CROP: int
+
+GraphCutSeamFinderBase_COST_COLOR: int
+GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR: int
+GraphCutSeamFinderBase_COST_COLOR_GRAD: int
+GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR_GRAD: int
+GraphCutSeamFinderBase_CostType = int
+"""One of [GraphCutSeamFinderBase_COST_COLOR, GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR, GraphCutSeamFinderBase_COST_COLOR_GRAD, GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR_GRAD]"""
+
+TrackerSamplerCSC_MODE_INIT_POS: int
+TRACKER_SAMPLER_CSC_MODE_INIT_POS: int
+TrackerSamplerCSC_MODE_INIT_NEG: int
+TRACKER_SAMPLER_CSC_MODE_INIT_NEG: int
+TrackerSamplerCSC_MODE_TRACK_POS: int
+TRACKER_SAMPLER_CSC_MODE_TRACK_POS: int
+TrackerSamplerCSC_MODE_TRACK_NEG: int
+TRACKER_SAMPLER_CSC_MODE_TRACK_NEG: int
+TrackerSamplerCSC_MODE_DETECT: int
+TRACKER_SAMPLER_CSC_MODE_DETECT: int
+TrackerSamplerCSC_MODE = int
+"""One of [TrackerSamplerCSC_MODE_INIT_POS, TRACKER_SAMPLER_CSC_MODE_INIT_POS, TrackerSamplerCSC_MODE_INIT_NEG, TRACKER_SAMPLER_CSC_MODE_INIT_NEG, TrackerSamplerCSC_MODE_TRACK_POS, TRACKER_SAMPLER_CSC_MODE_TRACK_POS, TrackerSamplerCSC_MODE_TRACK_NEG, TRACKER_SAMPLER_CSC_MODE_TRACK_NEG, TrackerSamplerCSC_MODE_DETECT, TRACKER_SAMPLER_CSC_MODE_DETECT]"""
+
+
+# Classes
+class Blender:
+ # Functions
+ @classmethod
+ def createDefault(cls, type: int, try_gpu: bool = ...) -> Blender: ...
+
+ @_typing.overload
+ def prepare(self, corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> None: ...
+ @_typing.overload
+ def prepare(self, dst_roi: cv2.typing.Rect) -> None: ...
+
+ @_typing.overload
+ def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ...
+ @_typing.overload
+ def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ...
+
+ @_typing.overload
+ def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ...
+
+
+class FeatherBlender(Blender):
+ # Functions
+ def __init__(self, sharpness: float = ...) -> None: ...
+
+ def sharpness(self) -> float: ...
+
+ def setSharpness(self, val: float) -> None: ...
+
+ def prepare(self, dst_roi: cv2.typing.Rect) -> None: ...
+
+ @_typing.overload
+ def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ...
+ @_typing.overload
+ def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ...
+
+ @_typing.overload
+ def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ...
+
+ def createWeightMaps(self, masks: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], weight_maps: _typing.Sequence[cv2.UMat]) -> tuple[cv2.typing.Rect, _typing.Sequence[cv2.UMat]]: ...
+
+
+class MultiBandBlender(Blender):
+ # Functions
+ def __init__(self, try_gpu: int = ..., num_bands: int = ..., weight_type: int = ...) -> None: ...
+
+ def numBands(self) -> int: ...
+
+ def setNumBands(self, val: int) -> None: ...
+
+ def prepare(self, dst_roi: cv2.typing.Rect) -> None: ...
+
+ @_typing.overload
+ def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ...
+ @_typing.overload
+ def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ...
+
+ @_typing.overload
+ def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ...
+
+
+class CameraParams:
+ focal: float
+ aspect: float
+ ppx: float
+ ppy: float
+ R: cv2.typing.MatLike
+ t: cv2.typing.MatLike
+
+ # Functions
+ def K(self) -> cv2.typing.MatLike: ...
+
+
+class ExposureCompensator:
+ # Functions
+ @classmethod
+ def createDefault(cls, type: int) -> ExposureCompensator: ...
+
+ def feed(self, corners: _typing.Sequence[cv2.typing.Point], images: _typing.Sequence[cv2.UMat], masks: _typing.Sequence[cv2.UMat]) -> None: ...
+
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, arg1: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, arg1: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+ def setUpdateGain(self, b: bool) -> None: ...
+
+ def getUpdateGain(self) -> bool: ...
+
+
+class NoExposureCompensator(ExposureCompensator):
+ # Functions
+ @_typing.overload
+ def apply(self, arg1: int, arg2: cv2.typing.Point, arg3: cv2.typing.MatLike, arg4: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, arg1: int, arg2: cv2.typing.Point, arg3: cv2.UMat, arg4: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+
+class GainCompensator(ExposureCompensator):
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, nr_feeds: int) -> None: ...
+
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+ def setNrFeeds(self, nr_feeds: int) -> None: ...
+
+ def getNrFeeds(self) -> int: ...
+
+ def setSimilarityThreshold(self, similarity_threshold: float) -> None: ...
+
+ def getSimilarityThreshold(self) -> float: ...
+
+
+class ChannelsCompensator(ExposureCompensator):
+ # Functions
+ def __init__(self, nr_feeds: int = ...) -> None: ...
+
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+ def setNrFeeds(self, nr_feeds: int) -> None: ...
+
+ def getNrFeeds(self) -> int: ...
+
+ def setSimilarityThreshold(self, similarity_threshold: float) -> None: ...
+
+ def getSimilarityThreshold(self) -> float: ...
+
+
+class BlocksCompensator(ExposureCompensator):
+ # Functions
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+ def setNrFeeds(self, nr_feeds: int) -> None: ...
+
+ def getNrFeeds(self) -> int: ...
+
+ def setSimilarityThreshold(self, similarity_threshold: float) -> None: ...
+
+ def getSimilarityThreshold(self) -> float: ...
+
+ @_typing.overload
+ def setBlockSize(self, width: int, height: int) -> None: ...
+ @_typing.overload
+ def setBlockSize(self, size: cv2.typing.Size) -> None: ...
+
+ def getBlockSize(self) -> cv2.typing.Size: ...
+
+ def setNrGainsFilteringIterations(self, nr_iterations: int) -> None: ...
+
+ def getNrGainsFilteringIterations(self) -> int: ...
+
+
+class BlocksGainCompensator(BlocksCompensator):
+ # Functions
+ @_typing.overload
+ def __init__(self, bl_width: int = ..., bl_height: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, bl_width: int, bl_height: int, nr_feeds: int) -> None: ...
+
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ...
+
+ def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+ def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ...
+
+
+class BlocksChannelsCompensator(BlocksCompensator):
+ # Functions
+ def __init__(self, bl_width: int = ..., bl_height: int = ..., nr_feeds: int = ...) -> None: ...
+
+
+class ImageFeatures:
+ img_idx: int
+ img_size: cv2.typing.Size
+ keypoints: _typing.Sequence[cv2.KeyPoint]
+ descriptors: cv2.UMat
+
+ # Functions
+ def getKeypoints(self) -> _typing.Sequence[cv2.KeyPoint]: ...
+
+
+class MatchesInfo:
+ src_img_idx: int
+ dst_img_idx: int
+ matches: _typing.Sequence[cv2.DMatch]
+ inliers_mask: numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]]
+ num_inliers: int
+ H: cv2.typing.MatLike
+ confidence: float
+
+ # Functions
+ def getMatches(self) -> _typing.Sequence[cv2.DMatch]: ...
+
+ def getInliers(self) -> numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]]: ...
+
+
+class FeaturesMatcher:
+ # Functions
+ def apply(self, features1: ImageFeatures, features2: ImageFeatures) -> MatchesInfo: ...
+
+ def apply2(self, features: _typing.Sequence[ImageFeatures], mask: cv2.UMat | None = ...) -> _typing.Sequence[MatchesInfo]: ...
+
+ def isThreadSafe(self) -> bool: ...
+
+ def collectGarbage(self) -> None: ...
+
+
+class BestOf2NearestMatcher(FeaturesMatcher):
+ # Functions
+ def __init__(self, try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ..., matches_confindece_thresh: float = ...) -> None: ...
+
+ def collectGarbage(self) -> None: ...
+
+ @classmethod
+ def create(cls, try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ..., matches_confindece_thresh: float = ...) -> BestOf2NearestMatcher: ...
+
+
+class BestOf2NearestRangeMatcher(BestOf2NearestMatcher):
+ # Functions
+ def __init__(self, range_width: int = ..., try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ...) -> None: ...
+
+
+class AffineBestOf2NearestMatcher(BestOf2NearestMatcher):
+ # Functions
+ def __init__(self, full_affine: bool = ..., try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ...) -> None: ...
+
+
+class Estimator:
+ # Functions
+ def apply(self, features: _typing.Sequence[ImageFeatures], pairwise_matches: _typing.Sequence[MatchesInfo], cameras: _typing.Sequence[CameraParams]) -> tuple[bool, _typing.Sequence[CameraParams]]: ...
+
+
+class HomographyBasedEstimator(Estimator):
+ # Functions
+ def __init__(self, is_focals_estimated: bool = ...) -> None: ...
+
+
+class AffineBasedEstimator(Estimator):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class BundleAdjusterBase(Estimator):
+ # Functions
+ def refinementMask(self) -> cv2.typing.MatLike: ...
+
+ def setRefinementMask(self, mask: cv2.typing.MatLike) -> None: ...
+
+ def confThresh(self) -> float: ...
+
+ def setConfThresh(self, conf_thresh: float) -> None: ...
+
+ def termCriteria(self) -> cv2.typing.TermCriteria: ...
+
+ def setTermCriteria(self, term_criteria: cv2.typing.TermCriteria) -> None: ...
+
+
+class NoBundleAdjuster(BundleAdjusterBase):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class BundleAdjusterReproj(BundleAdjusterBase):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class BundleAdjusterRay(BundleAdjusterBase):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class BundleAdjusterAffine(BundleAdjusterBase):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class BundleAdjusterAffinePartial(BundleAdjusterBase):
+ # Functions
+ def __init__(self) -> None: ...
+
+
+class SeamFinder:
+ # Functions
+ def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+ @classmethod
+ def createDefault(cls, type: int) -> SeamFinder: ...
+
+
+class NoSeamFinder(SeamFinder):
+ # Functions
+ def find(self, arg1: _typing.Sequence[cv2.UMat], arg2: _typing.Sequence[cv2.typing.Point], arg3: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+
+class PairwiseSeamFinder(SeamFinder):
+ # Functions
+ def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+
+class VoronoiSeamFinder(PairwiseSeamFinder):
+ # Functions
+ def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+
+class DpSeamFinder(SeamFinder):
+ # Functions
+ def __init__(self, costFunc: str) -> None: ...
+
+ def setCostFunction(self, val: str) -> None: ...
+
+
+class GraphCutSeamFinder:
+ # Functions
+ def __init__(self, cost_type: str, terminal_cost: float = ..., bad_region_penalty: float = ...) -> None: ...
+
+ def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+
+class Timelapser:
+ # Functions
+ @classmethod
+ def createDefault(cls, type: int) -> Timelapser: ...
+
+ def initialize(self, corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> None: ...
+
+ @_typing.overload
+ def process(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ...
+ @_typing.overload
+ def process(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ...
+
+ def getDst(self) -> cv2.UMat: ...
+
+
+class TimelapserCrop(Timelapser):
+ ...
+
+class ProjectorBase:
+ ...
+
+class SphericalProjector(ProjectorBase):
+ # Functions
+ def mapForward(self, x: float, y: float, u: float, v: float) -> None: ...
+
+ def mapBackward(self, u: float, v: float, x: float, y: float) -> None: ...
+
+
+
+# Functions
+def calibrateRotatingCamera(Hs: _typing.Sequence[cv2.typing.MatLike], K: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ...
+
+@_typing.overload
+def computeImageFeatures(featuresFinder: cv2.Feature2D, images: _typing.Sequence[cv2.typing.MatLike], masks: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[ImageFeatures]: ...
+@_typing.overload
+def computeImageFeatures(featuresFinder: cv2.Feature2D, images: _typing.Sequence[cv2.UMat], masks: _typing.Sequence[cv2.UMat] | None = ...) -> _typing.Sequence[ImageFeatures]: ...
+
+@_typing.overload
+def computeImageFeatures2(featuresFinder: cv2.Feature2D, image: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> ImageFeatures: ...
+@_typing.overload
+def computeImageFeatures2(featuresFinder: cv2.Feature2D, image: cv2.UMat, mask: cv2.UMat | None = ...) -> ImageFeatures: ...
+
+@_typing.overload
+def createLaplacePyr(img: cv2.typing.MatLike, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+@_typing.overload
+def createLaplacePyr(img: cv2.UMat, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+@_typing.overload
+def createLaplacePyrGpu(img: cv2.typing.MatLike, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+@_typing.overload
+def createLaplacePyrGpu(img: cv2.UMat, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+@_typing.overload
+def createWeightMap(mask: cv2.typing.MatLike, sharpness: float, weight: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def createWeightMap(mask: cv2.UMat, sharpness: float, weight: cv2.UMat) -> cv2.UMat: ...
+
+def focalsFromHomography(H: cv2.typing.MatLike, f0: float, f1: float, f0_ok: bool, f1_ok: bool) -> None: ...
+
+def leaveBiggestComponent(features: _typing.Sequence[ImageFeatures], pairwise_matches: _typing.Sequence[MatchesInfo], conf_threshold: float) -> _typing.Sequence[int]: ...
+
+def matchesGraphAsString(paths: _typing.Sequence[str], pairwise_matches: _typing.Sequence[MatchesInfo], conf_threshold: float) -> str: ...
+
+@_typing.overload
+def normalizeUsingWeightMap(weight: cv2.typing.MatLike, src: cv2.typing.MatLike) -> cv2.typing.MatLike: ...
+@_typing.overload
+def normalizeUsingWeightMap(weight: cv2.UMat, src: cv2.UMat) -> cv2.UMat: ...
+
+def overlapRoi(tl1: cv2.typing.Point, tl2: cv2.typing.Point, sz1: cv2.typing.Size, sz2: cv2.typing.Size, roi: cv2.typing.Rect) -> bool: ...
+
+def restoreImageFromLaplacePyr(pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+def restoreImageFromLaplacePyrGpu(pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ...
+
+@_typing.overload
+def resultRoi(corners: _typing.Sequence[cv2.typing.Point], images: _typing.Sequence[cv2.UMat]) -> cv2.typing.Rect: ...
+@_typing.overload
+def resultRoi(corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> cv2.typing.Rect: ...
+
+def resultRoiIntersection(corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> cv2.typing.Rect: ...
+
+def resultTl(corners: _typing.Sequence[cv2.typing.Point]) -> cv2.typing.Point: ...
+
+def selectRandomSubset(count: int, size: int, subset: _typing.Sequence[int]) -> None: ...
+
+def stitchingLogLevel() -> int: ...
+
+@_typing.overload
+def strip(params: cv2.gapi.ie.PyParams) -> cv2.gapi.GNetParam: ...
+@_typing.overload
+def strip(params: cv2.gapi.onnx.PyParams) -> cv2.gapi.GNetParam: ...
+@_typing.overload
+def strip(params: cv2.gapi.ov.PyParams) -> cv2.gapi.GNetParam: ...
+
+def waveCorrect(rmats: _typing.Sequence[cv2.typing.MatLike], kind: WaveCorrectKind) -> _typing.Sequence[cv2.typing.MatLike]: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/flann/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/flann/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..a107c75226d71aa41e3e225d318761cf754527e4
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/flann/__init__.pyi
@@ -0,0 +1,64 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.typing
+import typing as _typing
+
+
+# Enumerations
+FLANN_INDEX_TYPE_8U: int
+FLANN_INDEX_TYPE_8S: int
+FLANN_INDEX_TYPE_16U: int
+FLANN_INDEX_TYPE_16S: int
+FLANN_INDEX_TYPE_32S: int
+FLANN_INDEX_TYPE_32F: int
+FLANN_INDEX_TYPE_64F: int
+FLANN_INDEX_TYPE_STRING: int
+FLANN_INDEX_TYPE_BOOL: int
+FLANN_INDEX_TYPE_ALGORITHM: int
+LAST_VALUE_FLANN_INDEX_TYPE: int
+FlannIndexType = int
+"""One of [FLANN_INDEX_TYPE_8U, FLANN_INDEX_TYPE_8S, FLANN_INDEX_TYPE_16U, FLANN_INDEX_TYPE_16S, FLANN_INDEX_TYPE_32S, FLANN_INDEX_TYPE_32F, FLANN_INDEX_TYPE_64F, FLANN_INDEX_TYPE_STRING, FLANN_INDEX_TYPE_BOOL, FLANN_INDEX_TYPE_ALGORITHM, LAST_VALUE_FLANN_INDEX_TYPE]"""
+
+
+
+# Classes
+class Index:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, features: cv2.typing.MatLike, params: cv2.typing.IndexParams, distType: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self, features: cv2.UMat, params: cv2.typing.IndexParams, distType: int = ...) -> None: ...
+
+ @_typing.overload
+ def build(self, features: cv2.typing.MatLike, params: cv2.typing.IndexParams, distType: int = ...) -> None: ...
+ @_typing.overload
+ def build(self, features: cv2.UMat, params: cv2.typing.IndexParams, distType: int = ...) -> None: ...
+
+ @_typing.overload
+ def knnSearch(self, query: cv2.typing.MatLike, knn: int, indices: cv2.typing.MatLike | None = ..., dists: cv2.typing.MatLike | None = ..., params: cv2.typing.SearchParams = ...) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def knnSearch(self, query: cv2.UMat, knn: int, indices: cv2.UMat | None = ..., dists: cv2.UMat | None = ..., params: cv2.typing.SearchParams = ...) -> tuple[cv2.UMat, cv2.UMat]: ...
+
+ @_typing.overload
+ def radiusSearch(self, query: cv2.typing.MatLike, radius: float, maxResults: int, indices: cv2.typing.MatLike | None = ..., dists: cv2.typing.MatLike | None = ..., params: cv2.typing.SearchParams = ...) -> tuple[int, cv2.typing.MatLike, cv2.typing.MatLike]: ...
+ @_typing.overload
+ def radiusSearch(self, query: cv2.UMat, radius: float, maxResults: int, indices: cv2.UMat | None = ..., dists: cv2.UMat | None = ..., params: cv2.typing.SearchParams = ...) -> tuple[int, cv2.UMat, cv2.UMat]: ...
+
+ def save(self, filename: str) -> None: ...
+
+ @_typing.overload
+ def load(self, features: cv2.typing.MatLike, filename: str) -> bool: ...
+ @_typing.overload
+ def load(self, features: cv2.UMat, filename: str) -> bool: ...
+
+ def release(self) -> None: ...
+
+ def getDistance(self) -> int: ...
+
+ def getAlgorithm(self) -> int: ...
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b21e54e4195658f71a4d90d8a1d8b993710010b
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.py
@@ -0,0 +1,323 @@
+__all__ = ['op', 'kernel']
+
+import sys
+import cv2 as cv
+
+# NB: Register function in specific module
+def register(mname):
+ def parameterized(func):
+ sys.modules[mname].__dict__[func.__name__] = func
+ return func
+ return parameterized
+
+
+@register('cv2.gapi')
+def networks(*args):
+ return cv.gapi_GNetPackage(list(map(cv.detail.strip, args)))
+
+
+@register('cv2.gapi')
+def compile_args(*args):
+ return list(map(cv.GCompileArg, args))
+
+
+@register('cv2')
+def GIn(*args):
+ return [*args]
+
+
+@register('cv2')
+def GOut(*args):
+ return [*args]
+
+
+@register('cv2')
+def gin(*args):
+ return [*args]
+
+
+@register('cv2.gapi')
+def descr_of(*args):
+ return [*args]
+
+
+@register('cv2')
+class GOpaque():
+ # NB: Inheritance from c++ class cause segfault.
+ # So just aggregate cv.GOpaqueT instead of inheritance
+ def __new__(cls, argtype):
+ return cv.GOpaqueT(argtype)
+
+ class Bool():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_BOOL)
+
+ class Int():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_INT)
+
+ class Int64():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_INT64)
+
+ class UInt64():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_UINT64)
+
+ class Double():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_DOUBLE)
+
+ class Float():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_FLOAT)
+
+ class String():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_STRING)
+
+ class Point():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_POINT)
+
+ class Point2f():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_POINT2F)
+
+ class Point3f():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_POINT3F)
+
+ class Size():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_SIZE)
+
+ class Rect():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_RECT)
+
+ class Prim():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_DRAW_PRIM)
+
+ class Any():
+ def __new__(self):
+ return cv.GOpaqueT(cv.gapi.CV_ANY)
+
+@register('cv2')
+class GArray():
+ # NB: Inheritance from c++ class cause segfault.
+ # So just aggregate cv.GArrayT instead of inheritance
+ def __new__(cls, argtype):
+ return cv.GArrayT(argtype)
+
+ class Bool():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_BOOL)
+
+ class Int():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_INT)
+
+ class Int64():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_INT64)
+
+ class UInt64():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_UINT64)
+
+ class Double():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_DOUBLE)
+
+ class Float():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_FLOAT)
+
+ class String():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_STRING)
+
+ class Point():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_POINT)
+
+ class Point2f():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_POINT2F)
+
+ class Point3f():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_POINT3F)
+
+ class Size():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_SIZE)
+
+ class Rect():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_RECT)
+
+ class Scalar():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_SCALAR)
+
+ class Mat():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_MAT)
+
+ class GMat():
+ def __new__(self):
+ return cv.GArrayT(cv.gapi.CV_GMAT)
+
+ class Prim():
+ def __new__(self):
+ return cv.GArray(cv.gapi.CV_DRAW_PRIM)
+
+ class Any():
+ def __new__(self):
+ return cv.GArray(cv.gapi.CV_ANY)
+
+
+# NB: Top lvl decorator takes arguments
+def op(op_id, in_types, out_types):
+
+ garray_types= {
+ cv.GArray.Bool: cv.gapi.CV_BOOL,
+ cv.GArray.Int: cv.gapi.CV_INT,
+ cv.GArray.Int64: cv.gapi.CV_INT64,
+ cv.GArray.UInt64: cv.gapi.CV_UINT64,
+ cv.GArray.Double: cv.gapi.CV_DOUBLE,
+ cv.GArray.Float: cv.gapi.CV_FLOAT,
+ cv.GArray.String: cv.gapi.CV_STRING,
+ cv.GArray.Point: cv.gapi.CV_POINT,
+ cv.GArray.Point2f: cv.gapi.CV_POINT2F,
+ cv.GArray.Point3f: cv.gapi.CV_POINT3F,
+ cv.GArray.Size: cv.gapi.CV_SIZE,
+ cv.GArray.Rect: cv.gapi.CV_RECT,
+ cv.GArray.Scalar: cv.gapi.CV_SCALAR,
+ cv.GArray.Mat: cv.gapi.CV_MAT,
+ cv.GArray.GMat: cv.gapi.CV_GMAT,
+ cv.GArray.Prim: cv.gapi.CV_DRAW_PRIM,
+ cv.GArray.Any: cv.gapi.CV_ANY
+ }
+
+ gopaque_types= {
+ cv.GOpaque.Size: cv.gapi.CV_SIZE,
+ cv.GOpaque.Rect: cv.gapi.CV_RECT,
+ cv.GOpaque.Bool: cv.gapi.CV_BOOL,
+ cv.GOpaque.Int: cv.gapi.CV_INT,
+ cv.GOpaque.Int64: cv.gapi.CV_INT64,
+ cv.GOpaque.UInt64: cv.gapi.CV_UINT64,
+ cv.GOpaque.Double: cv.gapi.CV_DOUBLE,
+ cv.GOpaque.Float: cv.gapi.CV_FLOAT,
+ cv.GOpaque.String: cv.gapi.CV_STRING,
+ cv.GOpaque.Point: cv.gapi.CV_POINT,
+ cv.GOpaque.Point2f: cv.gapi.CV_POINT2F,
+ cv.GOpaque.Point3f: cv.gapi.CV_POINT3F,
+ cv.GOpaque.Size: cv.gapi.CV_SIZE,
+ cv.GOpaque.Rect: cv.gapi.CV_RECT,
+ cv.GOpaque.Prim: cv.gapi.CV_DRAW_PRIM,
+ cv.GOpaque.Any: cv.gapi.CV_ANY
+ }
+
+ type2str = {
+ cv.gapi.CV_BOOL: 'cv.gapi.CV_BOOL' ,
+ cv.gapi.CV_INT: 'cv.gapi.CV_INT' ,
+ cv.gapi.CV_INT64: 'cv.gapi.CV_INT64' ,
+ cv.gapi.CV_UINT64: 'cv.gapi.CV_UINT64' ,
+ cv.gapi.CV_DOUBLE: 'cv.gapi.CV_DOUBLE' ,
+ cv.gapi.CV_FLOAT: 'cv.gapi.CV_FLOAT' ,
+ cv.gapi.CV_STRING: 'cv.gapi.CV_STRING' ,
+ cv.gapi.CV_POINT: 'cv.gapi.CV_POINT' ,
+ cv.gapi.CV_POINT2F: 'cv.gapi.CV_POINT2F' ,
+ cv.gapi.CV_POINT3F: 'cv.gapi.CV_POINT3F' ,
+ cv.gapi.CV_SIZE: 'cv.gapi.CV_SIZE',
+ cv.gapi.CV_RECT: 'cv.gapi.CV_RECT',
+ cv.gapi.CV_SCALAR: 'cv.gapi.CV_SCALAR',
+ cv.gapi.CV_MAT: 'cv.gapi.CV_MAT',
+ cv.gapi.CV_GMAT: 'cv.gapi.CV_GMAT',
+ cv.gapi.CV_DRAW_PRIM: 'cv.gapi.CV_DRAW_PRIM'
+ }
+
+ # NB: Second lvl decorator takes class to decorate
+ def op_with_params(cls):
+ if not in_types:
+ raise Exception('{} operation should have at least one input!'.format(cls.__name__))
+
+ if not out_types:
+ raise Exception('{} operation should have at least one output!'.format(cls.__name__))
+
+ for i, t in enumerate(out_types):
+ if t not in [cv.GMat, cv.GScalar, *garray_types, *gopaque_types]:
+ raise Exception('{} unsupported output type: {} in position: {}'
+ .format(cls.__name__, t.__name__, i))
+
+ def on(*args):
+ if len(in_types) != len(args):
+ raise Exception('Invalid number of input elements!\nExpected: {}, Actual: {}'
+ .format(len(in_types), len(args)))
+
+ for i, (t, a) in enumerate(zip(in_types, args)):
+ if t in garray_types:
+ if not isinstance(a, cv.GArrayT):
+ raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
+ .format(cls.__name__, i, cv.GArrayT.__name__, type(a).__name__))
+
+ elif a.type() != garray_types[t]:
+ raise Exception("{} invalid GArrayT type for argument {}.\nExpected: {}, Actual: {}"
+ .format(cls.__name__, i, type2str[garray_types[t]], type2str[a.type()]))
+
+ elif t in gopaque_types:
+ if not isinstance(a, cv.GOpaqueT):
+ raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
+ .format(cls.__name__, i, cv.GOpaqueT.__name__, type(a).__name__))
+
+ elif a.type() != gopaque_types[t]:
+ raise Exception("{} invalid GOpaque type for argument {}.\nExpected: {}, Actual: {}"
+ .format(cls.__name__, i, type2str[gopaque_types[t]], type2str[a.type()]))
+
+ else:
+ if t != type(a):
+ raise Exception('{} invalid input type for argument {}.\nExpected: {}, Actual: {}'
+ .format(cls.__name__, i, t.__name__, type(a).__name__))
+
+ op = cv.gapi.__op(op_id, cls.outMeta, *args)
+
+ out_protos = []
+ for i, out_type in enumerate(out_types):
+ if out_type == cv.GMat:
+ out_protos.append(op.getGMat())
+ elif out_type == cv.GScalar:
+ out_protos.append(op.getGScalar())
+ elif out_type in gopaque_types:
+ out_protos.append(op.getGOpaque(gopaque_types[out_type]))
+ elif out_type in garray_types:
+ out_protos.append(op.getGArray(garray_types[out_type]))
+ else:
+ raise Exception("""In {}: G-API operation can't produce the output with type: {} in position: {}"""
+ .format(cls.__name__, out_type.__name__, i))
+
+ return tuple(out_protos) if len(out_protos) != 1 else out_protos[0]
+
+ # NB: Extend operation class
+ cls.id = op_id
+ cls.on = staticmethod(on)
+ return cls
+
+ return op_with_params
+
+
+def kernel(op_cls):
+ # NB: Second lvl decorator takes class to decorate
+ def kernel_with_params(cls):
+ # NB: Add new members to kernel class
+ cls.id = op_cls.id
+ cls.outMeta = op_cls.outMeta
+ return cls
+
+ return kernel_with_params
+
+
+cv.gapi.wip.GStreamerPipeline = cv.gapi_wip_gst_GStreamerPipeline
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..bdc68bd69c5b275a02c29b8605e8502d84f55bbe
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__init__.pyi
@@ -0,0 +1,349 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.typing
+import typing as _typing
+
+
+from cv2.gapi import core as core
+from cv2.gapi import ie as ie
+from cv2.gapi import imgproc as imgproc
+from cv2.gapi import oak as oak
+from cv2.gapi import onnx as onnx
+from cv2.gapi import ot as ot
+from cv2.gapi import ov as ov
+from cv2.gapi import own as own
+from cv2.gapi import render as render
+from cv2.gapi import streaming as streaming
+from cv2.gapi import video as video
+from cv2.gapi import wip as wip
+
+
+# Enumerations
+StereoOutputFormat_DEPTH_FLOAT16: int
+STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16: int
+StereoOutputFormat_DEPTH_FLOAT32: int
+STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32: int
+StereoOutputFormat_DISPARITY_FIXED16_11_5: int
+STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5: int
+StereoOutputFormat_DISPARITY_FIXED16_12_4: int
+STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4: int
+StereoOutputFormat_DEPTH_16F: int
+STEREO_OUTPUT_FORMAT_DEPTH_16F: int
+StereoOutputFormat_DEPTH_32F: int
+STEREO_OUTPUT_FORMAT_DEPTH_32F: int
+StereoOutputFormat_DISPARITY_16Q_10_5: int
+STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5: int
+StereoOutputFormat_DISPARITY_16Q_11_4: int
+STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4: int
+StereoOutputFormat = int
+"""One of [StereoOutputFormat_DEPTH_FLOAT16, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16, StereoOutputFormat_DEPTH_FLOAT32, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32, StereoOutputFormat_DISPARITY_FIXED16_11_5, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5, StereoOutputFormat_DISPARITY_FIXED16_12_4, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4, StereoOutputFormat_DEPTH_16F, STEREO_OUTPUT_FORMAT_DEPTH_16F, StereoOutputFormat_DEPTH_32F, STEREO_OUTPUT_FORMAT_DEPTH_32F, StereoOutputFormat_DISPARITY_16Q_10_5, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5, StereoOutputFormat_DISPARITY_16Q_11_4, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4]"""
+
+CV_BOOL: int
+CV_INT: int
+CV_INT64: int
+CV_UINT64: int
+CV_DOUBLE: int
+CV_FLOAT: int
+CV_STRING: int
+CV_POINT: int
+CV_POINT2F: int
+CV_POINT3F: int
+CV_SIZE: int
+CV_RECT: int
+CV_SCALAR: int
+CV_MAT: int
+CV_GMAT: int
+CV_DRAW_PRIM: int
+CV_ANY: int
+ArgType = int
+"""One of [CV_BOOL, CV_INT, CV_INT64, CV_UINT64, CV_DOUBLE, CV_FLOAT, CV_STRING, CV_POINT, CV_POINT2F, CV_POINT3F, CV_SIZE, CV_RECT, CV_SCALAR, CV_MAT, CV_GMAT, CV_DRAW_PRIM, CV_ANY]"""
+
+
+
+# Classes
+class GNetParam:
+ ...
+
+class GNetPackage:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, nets: _typing.Sequence[GNetParam]) -> None: ...
+
+
+
+# Functions
+def BGR2Gray(src: cv2.GMat) -> cv2.GMat: ...
+
+def BGR2I420(src: cv2.GMat) -> cv2.GMat: ...
+
+def BGR2LUV(src: cv2.GMat) -> cv2.GMat: ...
+
+def BGR2RGB(src: cv2.GMat) -> cv2.GMat: ...
+
+def BGR2YUV(src: cv2.GMat) -> cv2.GMat: ...
+
+def BayerGR2RGB(src_gr: cv2.GMat) -> cv2.GMat: ...
+
+def Canny(image: cv2.GMat, threshold1: float, threshold2: float, apertureSize: int = ..., L2gradient: bool = ...) -> cv2.GMat: ...
+
+def I4202BGR(src: cv2.GMat) -> cv2.GMat: ...
+
+def I4202RGB(src: cv2.GMat) -> cv2.GMat: ...
+
+def LUT(src: cv2.GMat, lut: cv2.typing.MatLike) -> cv2.GMat: ...
+
+def LUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
+
+def Laplacian(src: cv2.GMat, ddepth: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.GMat: ...
+
+def NV12toBGR(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
+
+def NV12toGray(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
+
+def NV12toRGB(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
+
+@_typing.overload
+def RGB2Gray(src: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def RGB2Gray(src: cv2.GMat, rY: float, gY: float, bY: float) -> cv2.GMat: ...
+
+def RGB2HSV(src: cv2.GMat) -> cv2.GMat: ...
+
+def RGB2I420(src: cv2.GMat) -> cv2.GMat: ...
+
+def RGB2Lab(src: cv2.GMat) -> cv2.GMat: ...
+
+def RGB2YUV(src: cv2.GMat) -> cv2.GMat: ...
+
+def RGB2YUV422(src: cv2.GMat) -> cv2.GMat: ...
+
+def Sobel(src: cv2.GMat, ddepth: int, dx: int, dy: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def SobelXY(src: cv2.GMat, ddepth: int, order: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
+
+def YUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
+
+def YUV2RGB(src: cv2.GMat) -> cv2.GMat: ...
+
+def absDiff(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+
+def absDiffC(src: cv2.GMat, c: cv2.GScalar) -> cv2.GMat: ...
+
+def add(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
+
+@_typing.overload
+def addC(src1: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
+@_typing.overload
+def addC(c: cv2.GScalar, src1: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
+
+def addWeighted(src1: cv2.GMat, alpha: float, src2: cv2.GMat, beta: float, gamma: float, ddepth: int = ...) -> cv2.GMat: ...
+
+def bilateralFilter(src: cv2.GMat, d: int, sigmaColor: float, sigmaSpace: float, borderType: int = ...) -> cv2.GMat: ...
+
+@_typing.overload
+def bitwise_and(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def bitwise_and(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+def bitwise_not(src: cv2.GMat) -> cv2.GMat: ...
+
+@_typing.overload
+def bitwise_or(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def bitwise_or(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def bitwise_xor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def bitwise_xor(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+def blur(src: cv2.GMat, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+@_typing.overload
+def boundingRect(src: cv2.GMat) -> cv2.GOpaqueT: ...
+@_typing.overload
+def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
+@_typing.overload
+def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
+
+def boxFilter(src: cv2.GMat, dtype: int, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def cartToPolar(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
+
+@_typing.overload
+def cmpEQ(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpEQ(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def cmpGE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpGE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def cmpGT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpGT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def cmpLE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpLE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def cmpLT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpLT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def cmpNE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def cmpNE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
+
+def combine(lhs: cv2.GKernelPackage, rhs: cv2.GKernelPackage) -> cv2.GKernelPackage: ...
+
+@_typing.overload
+def concatHor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def concatHor(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
+
+@_typing.overload
+def concatVert(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+@_typing.overload
+def concatVert(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
+
+def convertTo(src: cv2.GMat, rdepth: int, alpha: float = ..., beta: float = ...) -> cv2.GMat: ...
+
+def copy(in_: cv2.GMat) -> cv2.GMat: ...
+
+def countNonZero(src: cv2.GMat) -> cv2.GOpaqueT: ...
+
+def crop(src: cv2.GMat, rect: cv2.typing.Rect) -> cv2.GMat: ...
+
+def dilate(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def dilate3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def div(src1: cv2.GMat, src2: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
+
+def divC(src: cv2.GMat, divisor: cv2.GScalar, scale: float, ddepth: int = ...) -> cv2.GMat: ...
+
+def divRC(divident: cv2.GScalar, src: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
+
+def equalizeHist(src: cv2.GMat) -> cv2.GMat: ...
+
+def erode(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def erode3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def filter2D(src: cv2.GMat, ddepth: int, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., delta: cv2.typing.Scalar = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def flip(src: cv2.GMat, flipCode: int) -> cv2.GMat: ...
+
+def gaussianBlur(src: cv2.GMat, ksize: cv2.typing.Size, sigmaX: float, sigmaY: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def goodFeaturesToTrack(image: cv2.GMat, maxCorners: int, qualityLevel: float, minDistance: float, mask: cv2.typing.MatLike | None = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> cv2.GArrayT: ...
+
+def inRange(src: cv2.GMat, threshLow: cv2.GScalar, threshUp: cv2.GScalar) -> cv2.GMat: ...
+
+@_typing.overload
+def infer(name: str, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
+@_typing.overload
+def infer(name: str, roi: cv2.GOpaqueT, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
+@_typing.overload
+def infer(name: str, rois: cv2.GArrayT, inputs: cv2.GInferInputs) -> cv2.GInferListOutputs: ...
+
+def infer2(name: str, in_: cv2.GMat, inputs: cv2.GInferListInputs) -> cv2.GInferListOutputs: ...
+
+def integral(src: cv2.GMat, sdepth: int = ..., sqdepth: int = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
+
+@_typing.overload
+def kmeans(data: cv2.GMat, K: int, bestLabels: cv2.GMat, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
+@_typing.overload
+def kmeans(data: cv2.GMat, K: int, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
+@_typing.overload
+def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
+@_typing.overload
+def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
+
+def mask(src: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
+
+def max(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+
+def mean(src: cv2.GMat) -> cv2.GScalar: ...
+
+def medianBlur(src: cv2.GMat, ksize: int) -> cv2.GMat: ...
+
+def merge3(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat) -> cv2.GMat: ...
+
+def merge4(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat, src4: cv2.GMat) -> cv2.GMat: ...
+
+def min(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
+
+def morphologyEx(src: cv2.GMat, op: cv2.MorphTypes, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: cv2.BorderTypes = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def mul(src1: cv2.GMat, src2: cv2.GMat, scale: float = ..., ddepth: int = ...) -> cv2.GMat: ...
+
+@_typing.overload
+def mulC(src: cv2.GMat, multiplier: float, ddepth: int = ...) -> cv2.GMat: ...
+@_typing.overload
+def mulC(src: cv2.GMat, multiplier: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
+@_typing.overload
+def mulC(multiplier: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
+
+def normInf(src: cv2.GMat) -> cv2.GScalar: ...
+
+def normL1(src: cv2.GMat) -> cv2.GScalar: ...
+
+def normL2(src: cv2.GMat) -> cv2.GScalar: ...
+
+def normalize(src: cv2.GMat, alpha: float, beta: float, norm_type: int, ddepth: int = ...) -> cv2.GMat: ...
+
+@_typing.overload
+def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., filterLabel: int = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
+@_typing.overload
+def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float, alignmentToSquare: bool, filterOutOfBounds: bool) -> cv2.GArrayT: ...
+
+def parseYolo(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., nmsThreshold: float = ..., anchors: _typing.Sequence[float] = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
+
+def phase(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> cv2.GMat: ...
+
+def polarToCart(magnitude: cv2.GMat, angle: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
+
+def remap(src: cv2.GMat, map1: cv2.typing.MatLike, map2: cv2.typing.MatLike, interpolation: int, borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def resize(src: cv2.GMat, dsize: cv2.typing.Size, fx: float = ..., fy: float = ..., interpolation: int = ...) -> cv2.GMat: ...
+
+def select(src1: cv2.GMat, src2: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
+
+def sepFilter(src: cv2.GMat, ddepth: int, kernelX: cv2.typing.MatLike, kernelY: cv2.typing.MatLike, anchor: cv2.typing.Point, delta: cv2.typing.Scalar, borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def split3(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat]: ...
+
+def split4(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat, cv2.GMat]: ...
+
+def sqrt(src: cv2.GMat) -> cv2.GMat: ...
+
+def sub(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
+
+def subC(src: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
+
+def subRC(c: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
+
+def sum(src: cv2.GMat) -> cv2.GScalar: ...
+
+@_typing.overload
+def threshold(src: cv2.GMat, thresh: cv2.GScalar, maxval: cv2.GScalar, type: int) -> cv2.GMat: ...
+@_typing.overload
+def threshold(src: cv2.GMat, maxval: cv2.GScalar, type: int) -> tuple[cv2.GMat, cv2.GScalar]: ...
+
+def transpose(src: cv2.GMat) -> cv2.GMat: ...
+
+def warpAffine(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+def warpPerspective(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..19708e5165512bcd42ff14612e56d6145ad52faa
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..a418f70ab798fcac580de697773d460ea9a12055
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/__init__.pyi
@@ -0,0 +1,7 @@
+__all__: list[str] = []
+
+from cv2.gapi.core import cpu as cpu
+from cv2.gapi.core import fluid as fluid
+from cv2.gapi.core import ocl as ocl
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/cpu/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/cpu/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/cpu/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/fluid/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/fluid/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/fluid/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..700ce621174e38511b7d9c32ced225fbfcca0338
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/__init__.pyi
@@ -0,0 +1,51 @@
+__all__: list[str] = []
+
+import cv2.typing
+import typing as _typing
+
+
+from cv2.gapi.ie import detail as detail
+
+
+# Enumerations
+TraitAs_TENSOR: int
+TRAIT_AS_TENSOR: int
+TraitAs_IMAGE: int
+TRAIT_AS_IMAGE: int
+TraitAs = int
+"""One of [TraitAs_TENSOR, TRAIT_AS_TENSOR, TraitAs_IMAGE, TRAIT_AS_IMAGE]"""
+
+Sync: int
+SYNC: int
+Async: int
+ASYNC: int
+InferMode = int
+"""One of [Sync, SYNC, Async, ASYNC]"""
+
+
+
+# Classes
+class PyParams:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, tag: str, model: str, weights: str, device: str) -> None: ...
+ @_typing.overload
+ def __init__(self, tag: str, model: str, device: str) -> None: ...
+
+ def constInput(self, layer_name: str, data: cv2.typing.MatLike, hint: TraitAs = ...) -> PyParams: ...
+
+ def cfgNumRequests(self, nireq: int) -> PyParams: ...
+
+ def cfgBatchSize(self, size: int) -> PyParams: ...
+
+
+
+# Functions
+@_typing.overload
+def params(tag: str, model: str, weights: str, device: str) -> PyParams: ...
+@_typing.overload
+def params(tag: str, model: str, device: str) -> PyParams: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..e9aa68c68a73a2a25a419d64f5781581931251ab
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi
@@ -0,0 +1,12 @@
+__all__: list[str] = []
+
+ParamDesc_Kind_Load: int
+PARAM_DESC_KIND_LOAD: int
+ParamDesc_Kind_Import: int
+PARAM_DESC_KIND_IMPORT: int
+ParamDesc_Kind = int
+"""One of [ParamDesc_Kind_Load, PARAM_DESC_KIND_LOAD, ParamDesc_Kind_Import, PARAM_DESC_KIND_IMPORT]"""
+
+
+# Classes
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..0d4b571303e9fa6f30f5928b113a0a5403b61069
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/__init__.pyi
@@ -0,0 +1,5 @@
+__all__: list[str] = []
+
+from cv2.gapi.imgproc import fluid as fluid
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/fluid/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/fluid/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/imgproc/fluid/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/oak/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/oak/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..2a871fab56aa9149d1fd71986bd024d1d6b2a9d4
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/oak/__init__.pyi
@@ -0,0 +1,37 @@
+__all__: list[str] = []
+
+EncoderConfig_RateControlMode_CBR: int
+ENCODER_CONFIG_RATE_CONTROL_MODE_CBR: int
+EncoderConfig_RateControlMode_VBR: int
+ENCODER_CONFIG_RATE_CONTROL_MODE_VBR: int
+EncoderConfig_RateControlMode = int
+"""One of [EncoderConfig_RateControlMode_CBR, ENCODER_CONFIG_RATE_CONTROL_MODE_CBR, EncoderConfig_RateControlMode_VBR, ENCODER_CONFIG_RATE_CONTROL_MODE_VBR]"""
+
+EncoderConfig_Profile_H264_BASELINE: int
+ENCODER_CONFIG_PROFILE_H264_BASELINE: int
+EncoderConfig_Profile_H264_HIGH: int
+ENCODER_CONFIG_PROFILE_H264_HIGH: int
+EncoderConfig_Profile_H264_MAIN: int
+ENCODER_CONFIG_PROFILE_H264_MAIN: int
+EncoderConfig_Profile_H265_MAIN: int
+ENCODER_CONFIG_PROFILE_H265_MAIN: int
+EncoderConfig_Profile_MJPEG: int
+ENCODER_CONFIG_PROFILE_MJPEG: int
+EncoderConfig_Profile = int
+"""One of [EncoderConfig_Profile_H264_BASELINE, ENCODER_CONFIG_PROFILE_H264_BASELINE, EncoderConfig_Profile_H264_HIGH, ENCODER_CONFIG_PROFILE_H264_HIGH, EncoderConfig_Profile_H264_MAIN, ENCODER_CONFIG_PROFILE_H264_MAIN, EncoderConfig_Profile_H265_MAIN, ENCODER_CONFIG_PROFILE_H265_MAIN, EncoderConfig_Profile_MJPEG, ENCODER_CONFIG_PROFILE_MJPEG]"""
+
+ColorCameraParams_BoardSocket_RGB: int
+COLOR_CAMERA_PARAMS_BOARD_SOCKET_RGB: int
+ColorCameraParams_BoardSocket_BGR: int
+COLOR_CAMERA_PARAMS_BOARD_SOCKET_BGR: int
+ColorCameraParams_BoardSocket = int
+"""One of [ColorCameraParams_BoardSocket_RGB, COLOR_CAMERA_PARAMS_BOARD_SOCKET_RGB, ColorCameraParams_BoardSocket_BGR, COLOR_CAMERA_PARAMS_BOARD_SOCKET_BGR]"""
+
+ColorCameraParams_Resolution_THE_1080_P: int
+COLOR_CAMERA_PARAMS_RESOLUTION_THE_1080_P: int
+ColorCameraParams_Resolution = int
+"""One of [ColorCameraParams_Resolution_THE_1080_P, COLOR_CAMERA_PARAMS_RESOLUTION_THE_1080_P]"""
+
+
+# Classes
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..9185d6504fd6c541560b79c0e88e75b27d24a1fe
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/__init__.pyi
@@ -0,0 +1,51 @@
+__all__: list[str] = []
+
+import cv2.gapi.onnx.ep
+import cv2.typing
+import typing as _typing
+
+
+from cv2.gapi.onnx import ep as ep
+
+
+# Enumerations
+TraitAs_TENSOR: int
+TRAIT_AS_TENSOR: int
+TraitAs_IMAGE: int
+TRAIT_AS_IMAGE: int
+TraitAs = int
+"""One of [TraitAs_TENSOR, TRAIT_AS_TENSOR, TraitAs_IMAGE, TRAIT_AS_IMAGE]"""
+
+
+
+# Classes
+class PyParams:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, tag: str, model_path: str) -> None: ...
+
+ def cfgMeanStd(self, layer_name: str, m: cv2.typing.Scalar, s: cv2.typing.Scalar) -> PyParams: ...
+
+ def cfgNormalize(self, layer_name: str, flag: bool) -> PyParams: ...
+
+ @_typing.overload
+ def cfgAddExecutionProvider(self, ep: cv2.gapi.onnx.ep.OpenVINO) -> PyParams: ...
+ @_typing.overload
+ def cfgAddExecutionProvider(self, ep: cv2.gapi.onnx.ep.DirectML) -> PyParams: ...
+ @_typing.overload
+ def cfgAddExecutionProvider(self, ep: cv2.gapi.onnx.ep.CoreML) -> PyParams: ...
+ @_typing.overload
+ def cfgAddExecutionProvider(self, ep: cv2.gapi.onnx.ep.CUDA) -> PyParams: ...
+ @_typing.overload
+ def cfgAddExecutionProvider(self, ep: cv2.gapi.onnx.ep.TensorRT) -> PyParams: ...
+
+ def cfgDisableMemPattern(self) -> PyParams: ...
+
+
+
+# Functions
+def params(tag: str, model_path: str) -> PyParams: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/ep/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/ep/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..e32f039760ac0f095f7d1aa6112e6e1ac692fc85
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/onnx/ep/__init__.pyi
@@ -0,0 +1,63 @@
+__all__: list[str] = []
+
+import cv2.typing
+import typing as _typing
+
+
+# Classes
+class CoreML:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def cfgUseCPUOnly(self) -> CoreML: ...
+
+ def cfgEnableOnSubgraph(self) -> CoreML: ...
+
+ def cfgEnableOnlyNeuralEngine(self) -> CoreML: ...
+
+
+class CUDA:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, dev_id: int) -> None: ...
+
+
+class TensorRT:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, dev_id: int) -> None: ...
+
+
+class OpenVINO:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, dev_type: str) -> None: ...
+ @_typing.overload
+ def __init__(self, params: cv2.typing.map_string_and_string) -> None: ...
+
+ def cfgCacheDir(self, dir: str) -> OpenVINO: ...
+
+ def cfgNumThreads(self, nthreads: int) -> OpenVINO: ...
+
+ def cfgEnableOpenCLThrottling(self) -> OpenVINO: ...
+
+ def cfgEnableDynamicShapes(self) -> OpenVINO: ...
+
+
+class DirectML:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, device_id: int) -> None: ...
+ @_typing.overload
+ def __init__(self, adapter_name: str) -> None: ...
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..09e95e7c124d443526e47c881e8066923a58a26e
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/__init__.pyi
@@ -0,0 +1,32 @@
+__all__: list[str] = []
+
+import cv2
+import typing as _typing
+
+
+from cv2.gapi.ot import cpu as cpu
+
+
+# Enumerations
+NEW: int
+TRACKED: int
+LOST: int
+TrackingStatus = int
+"""One of [NEW, TRACKED, LOST]"""
+
+
+
+# Classes
+class ObjectTrackerParams:
+ max_num_objects: int
+ input_image_format: int
+ tracking_per_class: bool
+
+
+# Functions
+@_typing.overload
+def track(mat: cv2.GMat, detected_rects: cv2.GArrayT, detected_class_labels: cv2.GArrayT, delta: float) -> tuple[cv2.GArrayT, cv2.GArrayT, cv2.GArrayT, cv2.GArrayT]: ...
+@_typing.overload
+def track(frame: cv2.GFrame, detected_rects: cv2.GArrayT, detected_class_labels: cv2.GArrayT, delta: float) -> tuple[cv2.GArrayT, cv2.GArrayT, cv2.GArrayT, cv2.GArrayT]: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..9bc2c8683cd47aa3c2466a9c40832c7a04e464fb
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi
@@ -0,0 +1,74 @@
+__all__: list[str] = []
+
+import cv2.typing
+import typing as _typing
+
+
+# Classes
+class PyParams:
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, tag: str, model_path: str, bin_path: str, device: str) -> None: ...
+ @_typing.overload
+ def __init__(self, tag: str, blob_path: str, device: str) -> None: ...
+
+ def cfgPluginConfig(self, config: cv2.typing.map_string_and_string) -> PyParams: ...
+
+ @_typing.overload
+ def cfgInputTensorLayout(self, tensor_layout: str) -> PyParams: ...
+ @_typing.overload
+ def cfgInputTensorLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ...
+
+ @_typing.overload
+ def cfgInputModelLayout(self, tensor_layout: str) -> PyParams: ...
+ @_typing.overload
+ def cfgInputModelLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ...
+
+ @_typing.overload
+ def cfgOutputTensorLayout(self, tensor_layout: str) -> PyParams: ...
+ @_typing.overload
+ def cfgOutputTensorLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ...
+
+ @_typing.overload
+ def cfgOutputModelLayout(self, tensor_layout: str) -> PyParams: ...
+ @_typing.overload
+ def cfgOutputModelLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ...
+
+ @_typing.overload
+ def cfgOutputTensorPrecision(self, precision: int) -> PyParams: ...
+ @_typing.overload
+ def cfgOutputTensorPrecision(self, precision_map: cv2.typing.map_string_and_int) -> PyParams: ...
+
+ @_typing.overload
+ def cfgReshape(self, new_shape: _typing.Sequence[int]) -> PyParams: ...
+ @_typing.overload
+ def cfgReshape(self, new_shape_map: cv2.typing.map_string_and_vector_size_t) -> PyParams: ...
+
+ def cfgNumRequests(self, nireq: int) -> PyParams: ...
+
+ @_typing.overload
+ def cfgMean(self, mean_values: _typing.Sequence[float]) -> PyParams: ...
+ @_typing.overload
+ def cfgMean(self, mean_map: cv2.typing.map_string_and_vector_float) -> PyParams: ...
+
+ @_typing.overload
+ def cfgScale(self, scale_values: _typing.Sequence[float]) -> PyParams: ...
+ @_typing.overload
+ def cfgScale(self, scale_map: cv2.typing.map_string_and_vector_float) -> PyParams: ...
+
+ @_typing.overload
+ def cfgResize(self, interpolation: int) -> PyParams: ...
+ @_typing.overload
+ def cfgResize(self, interpolation: cv2.typing.map_string_and_int) -> PyParams: ...
+
+
+
+# Functions
+@_typing.overload
+def params(tag: str, model_path: str, weights: str, device: str) -> PyParams: ...
+@_typing.overload
+def params(tag: str, bin_path: str, device: str) -> PyParams: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..9ac6ecc9d0061d0cda93097f6a4fe9d154c2fcab
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/__init__.pyi
@@ -0,0 +1,5 @@
+__all__: list[str] = []
+
+from cv2.gapi.own import detail as detail
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/detail/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/detail/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..cde2a28e5daddbafa3c4354eb4db93053629d49a
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/own/detail/__init__.pyi
@@ -0,0 +1,10 @@
+__all__: list[str] = []
+
+MatHeader_AUTO_STEP: int
+MAT_HEADER_AUTO_STEP: int
+MatHeader_TYPE_MASK: int
+MAT_HEADER_TYPE_MASK: int
+
+
+# Classes
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..229877aa6c1063c56e3162c9d5c76d95cc6f472b
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/__init__.pyi
@@ -0,0 +1,5 @@
+__all__: list[str] = []
+
+from cv2.gapi.render import ocv as ocv
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/ocv/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/ocv/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/render/ocv/__init__.pyi
@@ -0,0 +1,9 @@
+__all__: list[str] = []
+
+import cv2
+
+
+# Functions
+def kernels() -> cv2.GKernelPackage: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/streaming/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/streaming/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..2b70ce7ed2dcf40e9d89607cb3fa7c99e69f2d03
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/streaming/__init__.pyi
@@ -0,0 +1,42 @@
+__all__: list[str] = []
+
+import cv2
+import typing as _typing
+
+
+# Enumerations
+sync_policy_dont_sync: int
+SYNC_POLICY_DONT_SYNC: int
+sync_policy_drop: int
+SYNC_POLICY_DROP: int
+sync_policy = int
+"""One of [sync_policy_dont_sync, SYNC_POLICY_DONT_SYNC, sync_policy_drop, SYNC_POLICY_DROP]"""
+
+
+
+# Classes
+class queue_capacity:
+ capacity: int
+
+ # Functions
+ def __init__(self, cap: int = ...) -> None: ...
+
+
+
+# Functions
+def desync(g: cv2.GMat) -> cv2.GMat: ...
+
+def seqNo(arg1: cv2.GMat) -> cv2.GOpaqueT: ...
+
+def seq_id(arg1: cv2.GMat) -> cv2.GOpaqueT: ...
+
+@_typing.overload
+def size(src: cv2.GMat) -> cv2.GOpaqueT: ...
+@_typing.overload
+def size(r: cv2.GOpaqueT) -> cv2.GOpaqueT: ...
+@_typing.overload
+def size(src: cv2.GFrame) -> cv2.GOpaqueT: ...
+
+def timestamp(arg1: cv2.GMat) -> cv2.GOpaqueT: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/video/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/video/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..e1c5477db9927fbfd260bdc490a8e80739764b2d
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/video/__init__.pyi
@@ -0,0 +1,10 @@
+__all__: list[str] = []
+
+# Enumerations
+TYPE_BS_MOG2: int
+TYPE_BS_KNN: int
+BackgroundSubtractorType = int
+"""One of [TYPE_BS_MOG2, TYPE_BS_KNN]"""
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..c38bca4c5085c288bf8921b8ed8624f22d2dba90
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/__init__.pyi
@@ -0,0 +1,41 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.gapi
+import cv2.gapi.wip.gst
+import cv2.typing
+import typing as _typing
+
+
+from cv2.gapi.wip import draw as draw
+from cv2.gapi.wip import gst as gst
+from cv2.gapi.wip import onevpl as onevpl
+
+
+# Classes
+class GOutputs:
+ # Functions
+ def getGMat(self) -> cv2.GMat: ...
+
+ def getGScalar(self) -> cv2.GScalar: ...
+
+ def getGArray(self, type: cv2.gapi.ArgType) -> cv2.GArrayT: ...
+
+ def getGOpaque(self, type: cv2.gapi.ArgType) -> cv2.GOpaqueT: ...
+
+
+class IStreamSource:
+ ...
+
+
+# Functions
+def get_streaming_source(pipeline: cv2.gapi.wip.gst.GStreamerPipeline, appsinkName: str, outputType: cv2.gapi.wip.gst.GStreamerSource_OutputType = ...) -> IStreamSource: ...
+
+@_typing.overload
+def make_capture_src(path: str, properties: cv2.typing.map_int_and_double = ...) -> IStreamSource: ...
+@_typing.overload
+def make_capture_src(id: int, properties: cv2.typing.map_int_and_double = ...) -> IStreamSource: ...
+
+def make_gst_src(pipeline: str, outputType: cv2.gapi.wip.gst.GStreamerSource_OutputType = ...) -> IStreamSource: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/draw/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/draw/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..075337837eaf411a536bb5ea8ca0d7084b9ad52b
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/draw/__init__.pyi
@@ -0,0 +1,119 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.typing
+import typing as _typing
+
+
+# Classes
+class Text:
+ text: str
+ org: cv2.typing.Point
+ ff: int
+ fs: float
+ color: cv2.typing.Scalar
+ thick: int
+ lt: int
+ bottom_left_origin: bool
+
+ # Functions
+ @_typing.overload
+ def __init__(self, text_: str, org_: cv2.typing.Point, ff_: int, fs_: float, color_: cv2.typing.Scalar, thick_: int = ..., lt_: int = ..., bottom_left_origin_: bool = ...) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+
+class Rect:
+ rect: cv2.typing.Rect
+ color: cv2.typing.Scalar
+ thick: int
+ lt: int
+ shift: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, rect_: cv2.typing.Rect2i, color_: cv2.typing.Scalar, thick_: int = ..., lt_: int = ..., shift_: int = ...) -> None: ...
+
+
+class Circle:
+ center: cv2.typing.Point
+ radius: int
+ color: cv2.typing.Scalar
+ thick: int
+ lt: int
+ shift: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self, center_: cv2.typing.Point, radius_: int, color_: cv2.typing.Scalar, thick_: int = ..., lt_: int = ..., shift_: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+
+class Line:
+ pt1: cv2.typing.Point
+ pt2: cv2.typing.Point
+ color: cv2.typing.Scalar
+ thick: int
+ lt: int
+ shift: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self, pt1_: cv2.typing.Point, pt2_: cv2.typing.Point, color_: cv2.typing.Scalar, thick_: int = ..., lt_: int = ..., shift_: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+
+class Mosaic:
+ mos: cv2.typing.Rect
+ cellSz: int
+ decim: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self) -> None: ...
+ @_typing.overload
+ def __init__(self, mos_: cv2.typing.Rect2i, cellSz_: int, decim_: int) -> None: ...
+
+
+class Image:
+ org: cv2.typing.Point
+ img: cv2.typing.MatLike
+ alpha: cv2.typing.MatLike
+
+ # Functions
+ @_typing.overload
+ def __init__(self, org_: cv2.typing.Point, img_: cv2.typing.MatLike, alpha_: cv2.typing.MatLike) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+
+class Poly:
+ points: _typing.Sequence[cv2.typing.Point]
+ color: cv2.typing.Scalar
+ thick: int
+ lt: int
+ shift: int
+
+ # Functions
+ @_typing.overload
+ def __init__(self, points_: _typing.Sequence[cv2.typing.Point], color_: cv2.typing.Scalar, thick_: int = ..., lt_: int = ..., shift_: int = ...) -> None: ...
+ @_typing.overload
+ def __init__(self) -> None: ...
+
+
+
+# Functions
+@_typing.overload
+def render(bgr: cv2.typing.MatLike, prims: _typing.Sequence[cv2.typing.Prim], args: _typing.Sequence[cv2.GCompileArg] = ...) -> None: ...
+@_typing.overload
+def render(y_plane: cv2.typing.MatLike, uv_plane: cv2.typing.MatLike, prims: _typing.Sequence[cv2.typing.Prim], args: _typing.Sequence[cv2.GCompileArg] = ...) -> None: ...
+
+def render3ch(src: cv2.GMat, prims: cv2.GArrayT) -> cv2.GMat: ...
+
+def renderNV12(y: cv2.GMat, uv: cv2.GMat, prims: cv2.GArrayT) -> tuple[cv2.GMat, cv2.GMat]: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/gst/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/gst/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..739778186a9ea96cb71a4f06244f895653524a44
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/gst/__init__.pyi
@@ -0,0 +1,17 @@
+__all__: list[str] = []
+
+GStreamerSource_OutputType_FRAME: int
+GSTREAMER_SOURCE_OUTPUT_TYPE_FRAME: int
+GStreamerSource_OutputType_MAT: int
+GSTREAMER_SOURCE_OUTPUT_TYPE_MAT: int
+GStreamerSource_OutputType = int
+"""One of [GStreamerSource_OutputType_FRAME, GSTREAMER_SOURCE_OUTPUT_TYPE_FRAME, GStreamerSource_OutputType_MAT, GSTREAMER_SOURCE_OUTPUT_TYPE_MAT]"""
+
+
+# Classes
+class GStreamerPipeline:
+ # Functions
+ def __init__(self, pipeline: str) -> None: ...
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/onevpl/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/onevpl/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..46acf87b226d3a40736df7bbe985f714eb803887
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/gapi/wip/onevpl/__init__.pyi
@@ -0,0 +1,16 @@
+__all__: list[str] = []
+
+# Enumerations
+AccelType_HOST: int
+ACCEL_TYPE_HOST: int
+AccelType_DX11: int
+ACCEL_TYPE_DX11: int
+AccelType_VAAPI: int
+ACCEL_TYPE_VAAPI: int
+AccelType_LAST_VALUE: int
+ACCEL_TYPE_LAST_VALUE: int
+AccelType = int
+"""One of [AccelType_HOST, ACCEL_TYPE_HOST, AccelType_DX11, ACCEL_TYPE_DX11, AccelType_VAAPI, ACCEL_TYPE_VAAPI, AccelType_LAST_VALUE, ACCEL_TYPE_LAST_VALUE]"""
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/ipp/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/ipp/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..23b6636ee899355b99cfe4fd3c2d5f897e538b33
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/ipp/__init__.pyi
@@ -0,0 +1,14 @@
+__all__: list[str] = []
+
+# Functions
+def getIppVersion() -> str: ...
+
+def setUseIPP(flag: bool) -> None: ...
+
+def setUseIPP_NotExact(flag: bool) -> None: ...
+
+def useIPP() -> bool: ...
+
+def useIPP_NotExact() -> bool: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py2.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py2.py
new file mode 100644
index 0000000000000000000000000000000000000000..07fbae9f7aa704c64acfa4bbce187dc9dbf24759
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py2.py
@@ -0,0 +1,6 @@
+# flake8: noqa
+import sys
+
+if sys.version_info[:2] < (3, 0):
+ def exec_file_wrapper(fpath, g_vars, l_vars):
+ execfile(fpath, g_vars, l_vars)
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py3.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py3.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f3b21ab862d42ed4572bcd52c2c41a72dbc0521
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/load_config_py3.py
@@ -0,0 +1,9 @@
+# flake8: noqa
+import os
+import sys
+
+if sys.version_info[:2] >= (3, 0):
+ def exec_file_wrapper(fpath, g_vars, l_vars):
+ with open(fpath) as f:
+ code = compile(f.read(), os.path.basename(fpath), 'exec')
+ exec(code, g_vars, l_vars)
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/ogl/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/ogl/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..503e1a837721dd524bd7b1f6b34250c98459f274
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/ogl/__init__.pyi
@@ -0,0 +1,51 @@
+__all__: list[str] = []
+
+# Enumerations
+POINTS: int
+LINES: int
+LINE_LOOP: int
+LINE_STRIP: int
+TRIANGLES: int
+TRIANGLE_STRIP: int
+TRIANGLE_FAN: int
+QUADS: int
+QUAD_STRIP: int
+POLYGON: int
+RenderModes = int
+"""One of [POINTS, LINES, LINE_LOOP, LINE_STRIP, TRIANGLES, TRIANGLE_STRIP, TRIANGLE_FAN, QUADS, QUAD_STRIP, POLYGON]"""
+
+
+Buffer_ARRAY_BUFFER: int
+BUFFER_ARRAY_BUFFER: int
+Buffer_ELEMENT_ARRAY_BUFFER: int
+BUFFER_ELEMENT_ARRAY_BUFFER: int
+Buffer_PIXEL_PACK_BUFFER: int
+BUFFER_PIXEL_PACK_BUFFER: int
+Buffer_PIXEL_UNPACK_BUFFER: int
+BUFFER_PIXEL_UNPACK_BUFFER: int
+Buffer_Target = int
+"""One of [Buffer_ARRAY_BUFFER, BUFFER_ARRAY_BUFFER, Buffer_ELEMENT_ARRAY_BUFFER, BUFFER_ELEMENT_ARRAY_BUFFER, Buffer_PIXEL_PACK_BUFFER, BUFFER_PIXEL_PACK_BUFFER, Buffer_PIXEL_UNPACK_BUFFER, BUFFER_PIXEL_UNPACK_BUFFER]"""
+
+Buffer_READ_ONLY: int
+BUFFER_READ_ONLY: int
+Buffer_WRITE_ONLY: int
+BUFFER_WRITE_ONLY: int
+Buffer_READ_WRITE: int
+BUFFER_READ_WRITE: int
+Buffer_Access = int
+"""One of [Buffer_READ_ONLY, BUFFER_READ_ONLY, Buffer_WRITE_ONLY, BUFFER_WRITE_ONLY, Buffer_READ_WRITE, BUFFER_READ_WRITE]"""
+
+Texture2D_NONE: int
+TEXTURE2D_NONE: int
+Texture2D_DEPTH_COMPONENT: int
+TEXTURE2D_DEPTH_COMPONENT: int
+Texture2D_RGB: int
+TEXTURE2D_RGB: int
+Texture2D_RGBA: int
+TEXTURE2D_RGBA: int
+Texture2D_Format = int
+"""One of [Texture2D_NONE, TEXTURE2D_NONE, Texture2D_DEPTH_COMPONENT, TEXTURE2D_DEPTH_COMPONENT, Texture2D_RGB, TEXTURE2D_RGB, Texture2D_RGBA, TEXTURE2D_RGBA]"""
+
+
+# Classes
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/py.typed b/evalkit_tf446/lib/python3.10/site-packages/cv2/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/qt/plugins/platforms/libqxcb.so b/evalkit_tf446/lib/python3.10/site-packages/cv2/qt/plugins/platforms/libqxcb.so
new file mode 100644
index 0000000000000000000000000000000000000000..39d4ced6edb53960004ea67d18643afde064b3e6
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/cv2/qt/plugins/platforms/libqxcb.so differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/samples/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/samples/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..03f2f9633de8ff81541a0ae6d15538100ed78aa5
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/samples/__init__.pyi
@@ -0,0 +1,12 @@
+__all__: list[str] = []
+
+# Functions
+def addSamplesDataSearchPath(path: str) -> None: ...
+
+def addSamplesDataSearchSubDirectory(subdir: str) -> None: ...
+
+def findFile(relative_path: str, required: bool = ..., silentMode: bool = ...) -> str: ...
+
+def findFileOrKeep(relative_path: str, silentMode: bool = ...) -> str: ...
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/segmentation/__init__.pyi b/evalkit_tf446/lib/python3.10/site-packages/cv2/segmentation/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..7baa0d1932b720547a9e5155bfe4eab9b5681822
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/segmentation/__init__.pyi
@@ -0,0 +1,39 @@
+__all__: list[str] = []
+
+import cv2
+import cv2.typing
+import typing as _typing
+
+
+# Classes
+class IntelligentScissorsMB:
+ # Functions
+ def __init__(self) -> None: ...
+
+ def setWeights(self, weight_non_edge: float, weight_gradient_direction: float, weight_gradient_magnitude: float) -> IntelligentScissorsMB: ...
+
+ def setGradientMagnitudeMaxLimit(self, gradient_magnitude_threshold_max: float = ...) -> IntelligentScissorsMB: ...
+
+ def setEdgeFeatureZeroCrossingParameters(self, gradient_magnitude_min_value: float = ...) -> IntelligentScissorsMB: ...
+
+ def setEdgeFeatureCannyParameters(self, threshold1: float, threshold2: float, apertureSize: int = ..., L2gradient: bool = ...) -> IntelligentScissorsMB: ...
+
+ @_typing.overload
+ def applyImage(self, image: cv2.typing.MatLike) -> IntelligentScissorsMB: ...
+ @_typing.overload
+ def applyImage(self, image: cv2.UMat) -> IntelligentScissorsMB: ...
+
+ @_typing.overload
+ def applyImageFeatures(self, non_edge: cv2.typing.MatLike, gradient_direction: cv2.typing.MatLike, gradient_magnitude: cv2.typing.MatLike, image: cv2.typing.MatLike | None = ...) -> IntelligentScissorsMB: ...
+ @_typing.overload
+ def applyImageFeatures(self, non_edge: cv2.UMat, gradient_direction: cv2.UMat, gradient_magnitude: cv2.UMat, image: cv2.UMat | None = ...) -> IntelligentScissorsMB: ...
+
+ def buildMap(self, sourcePt: cv2.typing.Point) -> None: ...
+
+ @_typing.overload
+ def getContour(self, targetPt: cv2.typing.Point, contour: cv2.typing.MatLike | None = ..., backward: bool = ...) -> cv2.typing.MatLike: ...
+ @_typing.overload
+ def getContour(self, targetPt: cv2.typing.Point, contour: cv2.UMat | None = ..., backward: bool = ...) -> cv2.UMat: ...
+
+
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9146aac633a6db8f86722aaeec13edc33bdca4f2
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__init__.py
@@ -0,0 +1,178 @@
+__all__ = [
+ "IntPointer",
+ "MatLike",
+ "MatShape",
+ "Size",
+ "Size2f",
+ "Scalar",
+ "Point",
+ "Point2i",
+ "Point2f",
+ "Point2d",
+ "Point3i",
+ "Point3f",
+ "Point3d",
+ "Range",
+ "Rect",
+ "Rect2i",
+ "Rect2f",
+ "Rect2d",
+ "Moments",
+ "RotatedRect",
+ "TermCriteria",
+ "Vec2i",
+ "Vec2f",
+ "Vec2d",
+ "Vec3i",
+ "Vec3f",
+ "Vec3d",
+ "Vec4i",
+ "Vec4f",
+ "Vec4d",
+ "Vec6f",
+ "FeatureDetector",
+ "DescriptorExtractor",
+ "FeatureExtractor",
+ "GProtoArg",
+ "GProtoInputArgs",
+ "GProtoOutputArgs",
+ "GRunArg",
+ "GOptRunArg",
+ "GMetaArg",
+ "Prim",
+ "Matx33f",
+ "Matx33d",
+ "Matx44f",
+ "Matx44d",
+ "GTypeInfo",
+ "ExtractArgsCallback",
+ "ExtractMetaCallback",
+ "LayerId",
+ "IndexParams",
+ "SearchParams",
+ "map_string_and_string",
+ "map_string_and_int",
+ "map_string_and_vector_size_t",
+ "map_string_and_vector_float",
+ "map_int_and_double",
+]
+
+import cv2.dnn
+import cv2.mat_wrapper
+import typing as _typing
+import cv2.gapi.wip.draw
+import numpy
+import cv2
+
+
+if _typing.TYPE_CHECKING:
+ NumPyArrayNumeric = numpy.ndarray[_typing.Any, numpy.dtype[numpy.integer[_typing.Any] | numpy.floating[_typing.Any]]]
+else:
+ NumPyArrayNumeric = numpy.ndarray
+
+
+if _typing.TYPE_CHECKING:
+ NumPyArrayFloat32 = numpy.ndarray[_typing.Any, numpy.dtype[numpy.float32]]
+else:
+ NumPyArrayFloat32 = numpy.ndarray
+
+
+if _typing.TYPE_CHECKING:
+ NumPyArrayFloat64 = numpy.ndarray[_typing.Any, numpy.dtype[numpy.float64]]
+else:
+ NumPyArrayFloat64 = numpy.ndarray
+
+
+if _typing.TYPE_CHECKING:
+ TermCriteria_Type = cv2.TermCriteria_Type
+else:
+ TermCriteria_Type = int
+
+
+IntPointer = int
+"""Represents an arbitrary pointer"""
+MatLike = _typing.Union[cv2.mat_wrapper.Mat, NumPyArrayNumeric]
+MatShape = _typing.Sequence[int]
+Size = _typing.Sequence[int]
+"""Required length is 2"""
+Size2f = _typing.Sequence[float]
+"""Required length is 2"""
+Scalar = _typing.Sequence[float]
+"""Required length is at most 4"""
+Point = _typing.Sequence[int]
+"""Required length is 2"""
+Point2i = Point
+Point2f = _typing.Sequence[float]
+"""Required length is 2"""
+Point2d = _typing.Sequence[float]
+"""Required length is 2"""
+Point3i = _typing.Sequence[int]
+"""Required length is 3"""
+Point3f = _typing.Sequence[float]
+"""Required length is 3"""
+Point3d = _typing.Sequence[float]
+"""Required length is 3"""
+Range = _typing.Sequence[int]
+"""Required length is 2"""
+Rect = _typing.Sequence[int]
+"""Required length is 4"""
+Rect2i = _typing.Sequence[int]
+"""Required length is 4"""
+Rect2f = _typing.Sequence[float]
+"""Required length is 4"""
+Rect2d = _typing.Sequence[float]
+"""Required length is 4"""
+Moments = _typing.Dict[str, float]
+RotatedRect = _typing.Tuple[Point2f, Size2f, float]
+"""Any type providing sequence protocol is supported"""
+TermCriteria = _typing.Tuple[TermCriteria_Type, int, float]
+"""Any type providing sequence protocol is supported"""
+Vec2i = _typing.Sequence[int]
+"""Required length is 2"""
+Vec2f = _typing.Sequence[float]
+"""Required length is 2"""
+Vec2d = _typing.Sequence[float]
+"""Required length is 2"""
+Vec3i = _typing.Sequence[int]
+"""Required length is 3"""
+Vec3f = _typing.Sequence[float]
+"""Required length is 3"""
+Vec3d = _typing.Sequence[float]
+"""Required length is 3"""
+Vec4i = _typing.Sequence[int]
+"""Required length is 4"""
+Vec4f = _typing.Sequence[float]
+"""Required length is 4"""
+Vec4d = _typing.Sequence[float]
+"""Required length is 4"""
+Vec6f = _typing.Sequence[float]
+"""Required length is 6"""
+FeatureDetector = cv2.Feature2D
+DescriptorExtractor = cv2.Feature2D
+FeatureExtractor = cv2.Feature2D
+GProtoArg = _typing.Union[Scalar, cv2.GMat, cv2.GOpaqueT, cv2.GArrayT]
+GProtoInputArgs = _typing.Sequence[GProtoArg]
+GProtoOutputArgs = _typing.Sequence[GProtoArg]
+GRunArg = _typing.Union[MatLike, Scalar, cv2.GOpaqueT, cv2.GArrayT, _typing.Sequence[_typing.Any], None]
+GOptRunArg = _typing.Optional[GRunArg]
+GMetaArg = _typing.Union[cv2.GMat, Scalar, cv2.GOpaqueT, cv2.GArrayT]
+Prim = _typing.Union[cv2.gapi.wip.draw.Text, cv2.gapi.wip.draw.Circle, cv2.gapi.wip.draw.Image, cv2.gapi.wip.draw.Line, cv2.gapi.wip.draw.Rect, cv2.gapi.wip.draw.Mosaic, cv2.gapi.wip.draw.Poly]
+Matx33f = NumPyArrayFloat32
+"""NDArray(shape=(3, 3), dtype=numpy.float32)"""
+Matx33d = NumPyArrayFloat64
+"""NDArray(shape=(3, 3), dtype=numpy.float64)"""
+Matx44f = NumPyArrayFloat32
+"""NDArray(shape=(4, 4), dtype=numpy.float32)"""
+Matx44d = NumPyArrayFloat64
+"""NDArray(shape=(4, 4), dtype=numpy.float64)"""
+GTypeInfo = _typing.Union[cv2.GMat, Scalar, cv2.GOpaqueT, cv2.GArrayT]
+ExtractArgsCallback = _typing.Callable[[_typing.Sequence[GTypeInfo]], _typing.Sequence[GRunArg]]
+ExtractMetaCallback = _typing.Callable[[_typing.Sequence[GTypeInfo]], _typing.Sequence[GMetaArg]]
+LayerId = cv2.dnn.DictValue
+IndexParams = _typing.Dict[str, _typing.Union[bool, int, float, str]]
+SearchParams = _typing.Dict[str, _typing.Union[bool, int, float, str]]
+map_string_and_string = _typing.Dict[str, str]
+map_string_and_int = _typing.Dict[str, int]
+map_string_and_vector_size_t = _typing.Dict[str, _typing.Sequence[int]]
+map_string_and_vector_float = _typing.Dict[str, _typing.Sequence[float]]
+map_int_and_double = _typing.Dict[int, float]
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eb4003be0e8a8e3f2fe644436634a92de9612a30
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/cv2/typing/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cv2/version.py b/evalkit_tf446/lib/python3.10/site-packages/cv2/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..05e3354e999a9078a73bccd6bfc5aa3d3cf87b14
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cv2/version.py
@@ -0,0 +1,5 @@
+opencv_version = "4.10.0.84"
+contrib = False
+headless = False
+rolling = False
+ci_build = True
\ No newline at end of file
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cycler/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/cycler/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..97949547ad37c7105beb76cfc9e72a683465fc07
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/cycler/__init__.py
@@ -0,0 +1,573 @@
+"""
+Cycler
+======
+
+Cycling through combinations of values, producing dictionaries.
+
+You can add cyclers::
+
+ from cycler import cycler
+ cc = (cycler(color=list('rgb')) +
+ cycler(linestyle=['-', '--', '-.']))
+ for d in cc:
+ print(d)
+
+Results in::
+
+ {'color': 'r', 'linestyle': '-'}
+ {'color': 'g', 'linestyle': '--'}
+ {'color': 'b', 'linestyle': '-.'}
+
+
+You can multiply cyclers::
+
+ from cycler import cycler
+ cc = (cycler(color=list('rgb')) *
+ cycler(linestyle=['-', '--', '-.']))
+ for d in cc:
+ print(d)
+
+Results in::
+
+ {'color': 'r', 'linestyle': '-'}
+ {'color': 'r', 'linestyle': '--'}
+ {'color': 'r', 'linestyle': '-.'}
+ {'color': 'g', 'linestyle': '-'}
+ {'color': 'g', 'linestyle': '--'}
+ {'color': 'g', 'linestyle': '-.'}
+ {'color': 'b', 'linestyle': '-'}
+ {'color': 'b', 'linestyle': '--'}
+ {'color': 'b', 'linestyle': '-.'}
+"""
+
+
+from __future__ import annotations
+
+from collections.abc import Hashable, Iterable, Generator
+import copy
+from functools import reduce
+from itertools import product, cycle
+from operator import mul, add
+# Dict, List, Union required for runtime cast calls
+from typing import TypeVar, Generic, Callable, Union, Dict, List, Any, overload, cast
+
+__version__ = "0.12.1"
+
+K = TypeVar("K", bound=Hashable)
+L = TypeVar("L", bound=Hashable)
+V = TypeVar("V")
+U = TypeVar("U")
+
+
+def _process_keys(
+ left: Cycler[K, V] | Iterable[dict[K, V]],
+ right: Cycler[K, V] | Iterable[dict[K, V]] | None,
+) -> set[K]:
+ """
+ Helper function to compose cycler keys.
+
+ Parameters
+ ----------
+ left, right : iterable of dictionaries or None
+ The cyclers to be composed.
+
+ Returns
+ -------
+ keys : set
+ The keys in the composition of the two cyclers.
+ """
+ l_peek: dict[K, V] = next(iter(left)) if left != [] else {}
+ r_peek: dict[K, V] = next(iter(right)) if right is not None else {}
+ l_key: set[K] = set(l_peek.keys())
+ r_key: set[K] = set(r_peek.keys())
+ if l_key & r_key:
+ raise ValueError("Can not compose overlapping cycles")
+ return l_key | r_key
+
+
+def concat(left: Cycler[K, V], right: Cycler[K, U]) -> Cycler[K, V | U]:
+ r"""
+ Concatenate `Cycler`\s, as if chained using `itertools.chain`.
+
+ The keys must match exactly.
+
+ Examples
+ --------
+ >>> num = cycler('a', range(3))
+ >>> let = cycler('a', 'abc')
+ >>> num.concat(let)
+ cycler('a', [0, 1, 2, 'a', 'b', 'c'])
+
+ Returns
+ -------
+ `Cycler`
+ The concatenated cycler.
+ """
+ if left.keys != right.keys:
+ raise ValueError(
+ "Keys do not match:\n"
+ "\tIntersection: {both!r}\n"
+ "\tDisjoint: {just_one!r}".format(
+ both=left.keys & right.keys, just_one=left.keys ^ right.keys
+ )
+ )
+ _l = cast(Dict[K, List[Union[V, U]]], left.by_key())
+ _r = cast(Dict[K, List[Union[V, U]]], right.by_key())
+ return reduce(add, (_cycler(k, _l[k] + _r[k]) for k in left.keys))
+
+
+class Cycler(Generic[K, V]):
+ """
+ Composable cycles.
+
+ This class has compositions methods:
+
+ ``+``
+ for 'inner' products (zip)
+
+ ``+=``
+ in-place ``+``
+
+ ``*``
+ for outer products (`itertools.product`) and integer multiplication
+
+ ``*=``
+ in-place ``*``
+
+ and supports basic slicing via ``[]``.
+
+ Parameters
+ ----------
+ left, right : Cycler or None
+ The 'left' and 'right' cyclers.
+ op : func or None
+ Function which composes the 'left' and 'right' cyclers.
+ """
+
+ def __call__(self):
+ return cycle(self)
+
+ def __init__(
+ self,
+ left: Cycler[K, V] | Iterable[dict[K, V]] | None,
+ right: Cycler[K, V] | None = None,
+ op: Any = None,
+ ):
+ """
+ Semi-private init.
+
+ Do not use this directly, use `cycler` function instead.
+ """
+ if isinstance(left, Cycler):
+ self._left: Cycler[K, V] | list[dict[K, V]] = Cycler(
+ left._left, left._right, left._op
+ )
+ elif left is not None:
+ # Need to copy the dictionary or else that will be a residual
+ # mutable that could lead to strange errors
+ self._left = [copy.copy(v) for v in left]
+ else:
+ self._left = []
+
+ if isinstance(right, Cycler):
+ self._right: Cycler[K, V] | None = Cycler(
+ right._left, right._right, right._op
+ )
+ else:
+ self._right = None
+
+ self._keys: set[K] = _process_keys(self._left, self._right)
+ self._op: Any = op
+
+ def __contains__(self, k):
+ return k in self._keys
+
+ @property
+ def keys(self) -> set[K]:
+ """The keys this Cycler knows about."""
+ return set(self._keys)
+
+ def change_key(self, old: K, new: K) -> None:
+ """
+ Change a key in this cycler to a new name.
+ Modification is performed in-place.
+
+ Does nothing if the old key is the same as the new key.
+ Raises a ValueError if the new key is already a key.
+ Raises a KeyError if the old key isn't a key.
+ """
+ if old == new:
+ return
+ if new in self._keys:
+ raise ValueError(
+ f"Can't replace {old} with {new}, {new} is already a key"
+ )
+ if old not in self._keys:
+ raise KeyError(
+ f"Can't replace {old} with {new}, {old} is not a key"
+ )
+
+ self._keys.remove(old)
+ self._keys.add(new)
+
+ if self._right is not None and old in self._right.keys:
+ self._right.change_key(old, new)
+
+ # self._left should always be non-None
+ # if self._keys is non-empty.
+ elif isinstance(self._left, Cycler):
+ self._left.change_key(old, new)
+ else:
+ # It should be completely safe at this point to
+ # assume that the old key can be found in each
+ # iteration.
+ self._left = [{new: entry[old]} for entry in self._left]
+
+ @classmethod
+ def _from_iter(cls, label: K, itr: Iterable[V]) -> Cycler[K, V]:
+ """
+ Class method to create 'base' Cycler objects
+ that do not have a 'right' or 'op' and for which
+ the 'left' object is not another Cycler.
+
+ Parameters
+ ----------
+ label : hashable
+ The property key.
+
+ itr : iterable
+ Finite length iterable of the property values.
+
+ Returns
+ -------
+ `Cycler`
+ New 'base' cycler.
+ """
+ ret: Cycler[K, V] = cls(None)
+ ret._left = list({label: v} for v in itr)
+ ret._keys = {label}
+ return ret
+
+ def __getitem__(self, key: slice) -> Cycler[K, V]:
+ # TODO : maybe add numpy style fancy slicing
+ if isinstance(key, slice):
+ trans = self.by_key()
+ return reduce(add, (_cycler(k, v[key]) for k, v in trans.items()))
+ else:
+ raise ValueError("Can only use slices with Cycler.__getitem__")
+
+ def __iter__(self) -> Generator[dict[K, V], None, None]:
+ if self._right is None:
+ for left in self._left:
+ yield dict(left)
+ else:
+ if self._op is None:
+ raise TypeError(
+ "Operation cannot be None when both left and right are defined"
+ )
+ for a, b in self._op(self._left, self._right):
+ out = {}
+ out.update(a)
+ out.update(b)
+ yield out
+
+ def __add__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
+ """
+ Pair-wise combine two equal length cyclers (zip).
+
+ Parameters
+ ----------
+ other : Cycler
+ """
+ if len(self) != len(other):
+ raise ValueError(
+ f"Can only add equal length cycles, not {len(self)} and {len(other)}"
+ )
+ return Cycler(
+ cast(Cycler[Union[K, L], Union[V, U]], self),
+ cast(Cycler[Union[K, L], Union[V, U]], other),
+ zip
+ )
+
+ @overload
+ def __mul__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
+ ...
+
+ @overload
+ def __mul__(self, other: int) -> Cycler[K, V]:
+ ...
+
+ def __mul__(self, other):
+ """
+ Outer product of two cyclers (`itertools.product`) or integer
+ multiplication.
+
+ Parameters
+ ----------
+ other : Cycler or int
+ """
+ if isinstance(other, Cycler):
+ return Cycler(
+ cast(Cycler[Union[K, L], Union[V, U]], self),
+ cast(Cycler[Union[K, L], Union[V, U]], other),
+ product
+ )
+ elif isinstance(other, int):
+ trans = self.by_key()
+ return reduce(
+ add, (_cycler(k, v * other) for k, v in trans.items())
+ )
+ else:
+ return NotImplemented
+
+ @overload
+ def __rmul__(self, other: Cycler[L, U]) -> Cycler[K | L, V | U]:
+ ...
+
+ @overload
+ def __rmul__(self, other: int) -> Cycler[K, V]:
+ ...
+
+ def __rmul__(self, other):
+ return self * other
+
+ def __len__(self) -> int:
+ op_dict: dict[Callable, Callable[[int, int], int]] = {zip: min, product: mul}
+ if self._right is None:
+ return len(self._left)
+ l_len = len(self._left)
+ r_len = len(self._right)
+ return op_dict[self._op](l_len, r_len)
+
+ # iadd and imul do not exapand the the type as the returns must be consistent with
+ # self, thus they flag as inconsistent with add/mul
+ def __iadd__(self, other: Cycler[K, V]) -> Cycler[K, V]: # type: ignore[misc]
+ """
+ In-place pair-wise combine two equal length cyclers (zip).
+
+ Parameters
+ ----------
+ other : Cycler
+ """
+ if not isinstance(other, Cycler):
+ raise TypeError("Cannot += with a non-Cycler object")
+ # True shallow copy of self is fine since this is in-place
+ old_self = copy.copy(self)
+ self._keys = _process_keys(old_self, other)
+ self._left = old_self
+ self._op = zip
+ self._right = Cycler(other._left, other._right, other._op)
+ return self
+
+ def __imul__(self, other: Cycler[K, V] | int) -> Cycler[K, V]: # type: ignore[misc]
+ """
+ In-place outer product of two cyclers (`itertools.product`).
+
+ Parameters
+ ----------
+ other : Cycler
+ """
+ if not isinstance(other, Cycler):
+ raise TypeError("Cannot *= with a non-Cycler object")
+ # True shallow copy of self is fine since this is in-place
+ old_self = copy.copy(self)
+ self._keys = _process_keys(old_self, other)
+ self._left = old_self
+ self._op = product
+ self._right = Cycler(other._left, other._right, other._op)
+ return self
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Cycler):
+ return False
+ if len(self) != len(other):
+ return False
+ if self.keys ^ other.keys:
+ return False
+ return all(a == b for a, b in zip(self, other))
+
+ __hash__ = None # type: ignore
+
+ def __repr__(self) -> str:
+ op_map = {zip: "+", product: "*"}
+ if self._right is None:
+ lab = self.keys.pop()
+ itr = list(v[lab] for v in self)
+ return f"cycler({lab!r}, {itr!r})"
+ else:
+ op = op_map.get(self._op, "?")
+ msg = "({left!r} {op} {right!r})"
+ return msg.format(left=self._left, op=op, right=self._right)
+
+ def _repr_html_(self) -> str:
+ # an table showing the value of each key through a full cycle
+ output = ""
+ sorted_keys = sorted(self.keys, key=repr)
+ for key in sorted_keys:
+ output += f"| {key!r} | "
+ for d in iter(self):
+ output += ""
+ for k in sorted_keys:
+ output += f"| {d[k]!r} | "
+ output += "
"
+ output += "
"
+ return output
+
+ def by_key(self) -> dict[K, list[V]]:
+ """
+ Values by key.
+
+ This returns the transposed values of the cycler. Iterating
+ over a `Cycler` yields dicts with a single value for each key,
+ this method returns a `dict` of `list` which are the values
+ for the given key.
+
+ The returned value can be used to create an equivalent `Cycler`
+ using only `+`.
+
+ Returns
+ -------
+ transpose : dict
+ dict of lists of the values for each key.
+ """
+
+ # TODO : sort out if this is a bottle neck, if there is a better way
+ # and if we care.
+
+ keys = self.keys
+ out: dict[K, list[V]] = {k: list() for k in keys}
+
+ for d in self:
+ for k in keys:
+ out[k].append(d[k])
+ return out
+
+ # for back compatibility
+ _transpose = by_key
+
+ def simplify(self) -> Cycler[K, V]:
+ """
+ Simplify the cycler into a sum (but no products) of cyclers.
+
+ Returns
+ -------
+ simple : Cycler
+ """
+ # TODO: sort out if it is worth the effort to make sure this is
+ # balanced. Currently it is is
+ # (((a + b) + c) + d) vs
+ # ((a + b) + (c + d))
+ # I would believe that there is some performance implications
+ trans = self.by_key()
+ return reduce(add, (_cycler(k, v) for k, v in trans.items()))
+
+ concat = concat
+
+
+@overload
+def cycler(arg: Cycler[K, V]) -> Cycler[K, V]:
+ ...
+
+
+@overload
+def cycler(**kwargs: Iterable[V]) -> Cycler[str, V]:
+ ...
+
+
+@overload
+def cycler(label: K, itr: Iterable[V]) -> Cycler[K, V]:
+ ...
+
+
+def cycler(*args, **kwargs):
+ """
+ Create a new `Cycler` object from a single positional argument,
+ a pair of positional arguments, or the combination of keyword arguments.
+
+ cycler(arg)
+ cycler(label1=itr1[, label2=iter2[, ...]])
+ cycler(label, itr)
+
+ Form 1 simply copies a given `Cycler` object.
+
+ Form 2 composes a `Cycler` as an inner product of the
+ pairs of keyword arguments. In other words, all of the
+ iterables are cycled simultaneously, as if through zip().
+
+ Form 3 creates a `Cycler` from a label and an iterable.
+ This is useful for when the label cannot be a keyword argument
+ (e.g., an integer or a name that has a space in it).
+
+ Parameters
+ ----------
+ arg : Cycler
+ Copy constructor for Cycler (does a shallow copy of iterables).
+ label : name
+ The property key. In the 2-arg form of the function,
+ the label can be any hashable object. In the keyword argument
+ form of the function, it must be a valid python identifier.
+ itr : iterable
+ Finite length iterable of the property values.
+ Can be a single-property `Cycler` that would
+ be like a key change, but as a shallow copy.
+
+ Returns
+ -------
+ cycler : Cycler
+ New `Cycler` for the given property
+
+ """
+ if args and kwargs:
+ raise TypeError(
+ "cycler() can only accept positional OR keyword arguments -- not both."
+ )
+
+ if len(args) == 1:
+ if not isinstance(args[0], Cycler):
+ raise TypeError(
+ "If only one positional argument given, it must "
+ "be a Cycler instance."
+ )
+ return Cycler(args[0])
+ elif len(args) == 2:
+ return _cycler(*args)
+ elif len(args) > 2:
+ raise TypeError(
+ "Only a single Cycler can be accepted as the lone "
+ "positional argument. Use keyword arguments instead."
+ )
+
+ if kwargs:
+ return reduce(add, (_cycler(k, v) for k, v in kwargs.items()))
+
+ raise TypeError("Must have at least a positional OR keyword arguments")
+
+
+def _cycler(label: K, itr: Iterable[V]) -> Cycler[K, V]:
+ """
+ Create a new `Cycler` object from a property name and iterable of values.
+
+ Parameters
+ ----------
+ label : hashable
+ The property key.
+ itr : iterable
+ Finite length iterable of the property values.
+
+ Returns
+ -------
+ cycler : Cycler
+ New `Cycler` for the given property
+ """
+ if isinstance(itr, Cycler):
+ keys = itr.keys
+ if len(keys) != 1:
+ msg = "Can not create Cycler from a multi-property Cycler"
+ raise ValueError(msg)
+
+ lab = keys.pop()
+ # Doesn't need to be a new list because
+ # _from_iter() will be creating that new list anyway.
+ itr = (v[lab] for v in itr)
+
+ return Cycler._from_iter(label, itr)
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cycler/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/cycler/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..02de52f0f24f089469c9965b17d0572261ca046e
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/cycler/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/cycler/py.typed b/evalkit_tf446/lib/python3.10/site-packages/cycler/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cd71da6b5ec401192d24d7fbd5e44e48f16e5840
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/compat.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/compat.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..821c6047a1558a20681fb0f3dfad42fa0ae19409
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/compat.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/decoder.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/decoder.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7c19e0bb6d23d43d8d4fa582a432e72470997807
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/decoder.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoder.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoder.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..60acdbd67c079a092b09a34ae951990c66141875
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoder.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoderH.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoderH.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..99684c258907db7c7c2771c2d4ce2c5034220f80
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/encoderH.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/ordered_dict.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/ordered_dict.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..16db44258bedd1bd6682849634f3d6fab32416aa
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/ordered_dict.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/scanner.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/scanner.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7a520462c3d3b9485a481fe3cb47ad3479a8acd9
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/scanner.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/tool.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/tool.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c4776ca4f852cf78006cc9cbeadd029a02960ec6
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/__pycache__/tool.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..54cd7d1bc455893a812da031eb5f08a65a0b0d60
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_decimal.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_decimal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6474f5a3247231919296626db019ed911f16d8cf
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_decimal.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_for_json.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_for_json.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..24209f44e6f69b136e0ced87b84d2006e78887fc
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_for_json.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass2.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass2.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6409441a52bcf364a067a047fc2eefee241a0ccb
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_pass2.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_scanstring.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_scanstring.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ffdc0f6cb92f7462ee870b690cb5f7fbdcbe395f
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/hjson/tests/__pycache__/test_scanstring.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt
@@ -0,0 +1,1568 @@
+End User License Agreement
+--------------------------
+
+
+Preface
+-------
+
+The Software License Agreement in Chapter 1 and the Supplement
+in Chapter 2 contain license terms and conditions that govern
+the use of NVIDIA software. By accepting this agreement, you
+agree to comply with all the terms and conditions applicable
+to the product(s) included herein.
+
+
+NVIDIA Driver
+
+
+Description
+
+This package contains the operating system driver and
+fundamental system software components for NVIDIA GPUs.
+
+
+NVIDIA CUDA Toolkit
+
+
+Description
+
+The NVIDIA CUDA Toolkit provides command-line and graphical
+tools for building, debugging and optimizing the performance
+of applications accelerated by NVIDIA GPUs, runtime and math
+libraries, and documentation including programming guides,
+user manuals, and API references.
+
+
+Default Install Location of CUDA Toolkit
+
+Windows platform:
+
+%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
+
+Linux platform:
+
+/usr/local/cuda-#.#
+
+Mac platform:
+
+/Developer/NVIDIA/CUDA-#.#
+
+
+NVIDIA CUDA Samples
+
+
+Description
+
+This package includes over 100+ CUDA examples that demonstrate
+various CUDA programming principles, and efficient CUDA
+implementation of algorithms in specific application domains.
+
+
+Default Install Location of CUDA Samples
+
+Windows platform:
+
+%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
+
+Linux platform:
+
+/usr/local/cuda-#.#/samples
+
+and
+
+$HOME/NVIDIA_CUDA-#.#_Samples
+
+Mac platform:
+
+/Developer/NVIDIA/CUDA-#.#/samples
+
+
+NVIDIA Nsight Visual Studio Edition (Windows only)
+
+
+Description
+
+NVIDIA Nsight Development Platform, Visual Studio Edition is a
+development environment integrated into Microsoft Visual
+Studio that provides tools for debugging, profiling, analyzing
+and optimizing your GPU computing and graphics applications.
+
+
+Default Install Location of Nsight Visual Studio Edition
+
+Windows platform:
+
+%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
+
+
+1. License Agreement for NVIDIA Software Development Kits
+---------------------------------------------------------
+
+
+Release Date: July 26, 2018
+---------------------------
+
+
+Important NoticeRead before downloading, installing,
+copying or using the licensed software:
+-------------------------------------------------------
+
+This license agreement, including exhibits attached
+("Agreement”) is a legal agreement between you and NVIDIA
+Corporation ("NVIDIA") and governs your use of a NVIDIA
+software development kit (“SDK”).
+
+Each SDK has its own set of software and materials, but here
+is a description of the types of items that may be included in
+a SDK: source code, header files, APIs, data sets and assets
+(examples include images, textures, models, scenes, videos,
+native API input/output files), binary software, sample code,
+libraries, utility programs, programming code and
+documentation.
+
+This Agreement can be accepted only by an adult of legal age
+of majority in the country in which the SDK is used.
+
+If you are entering into this Agreement on behalf of a company
+or other legal entity, you represent that you have the legal
+authority to bind the entity to this Agreement, in which case
+“you” will mean the entity you represent.
+
+If you don’t have the required age or authority to accept
+this Agreement, or if you don’t accept all the terms and
+conditions of this Agreement, do not download, install or use
+the SDK.
+
+You agree to use the SDK only for purposes that are permitted
+by (a) this Agreement, and (b) any applicable law, regulation
+or generally accepted practices or guidelines in the relevant
+jurisdictions.
+
+
+1.1. License
+
+
+1.1.1. License Grant
+
+Subject to the terms of this Agreement, NVIDIA hereby grants
+you a non-exclusive, non-transferable license, without the
+right to sublicense (except as expressly provided in this
+Agreement) to:
+
+ 1. Install and use the SDK,
+
+ 2. Modify and create derivative works of sample source code
+ delivered in the SDK, and
+
+ 3. Distribute those portions of the SDK that are identified
+ in this Agreement as distributable, as incorporated in
+ object code format into a software application that meets
+ the distribution requirements indicated in this Agreement.
+
+
+1.1.2. Distribution Requirements
+
+These are the distribution requirements for you to exercise
+the distribution grant:
+
+ 1. Your application must have material additional
+ functionality, beyond the included portions of the SDK.
+
+ 2. The distributable portions of the SDK shall only be
+ accessed by your application.
+
+ 3. The following notice shall be included in modifications
+ and derivative works of sample source code distributed:
+ “This software contains source code provided by NVIDIA
+ Corporation.”
+
+ 4. Unless a developer tool is identified in this Agreement
+ as distributable, it is delivered for your internal use
+ only.
+
+ 5. The terms under which you distribute your application
+ must be consistent with the terms of this Agreement,
+ including (without limitation) terms relating to the
+ license grant and license restrictions and protection of
+ NVIDIA’s intellectual property rights. Additionally, you
+ agree that you will protect the privacy, security and
+ legal rights of your application users.
+
+ 6. You agree to notify NVIDIA in writing of any known or
+ suspected distribution or use of the SDK not in compliance
+ with the requirements of this Agreement, and to enforce
+ the terms of your agreements with respect to distributed
+ SDK.
+
+
+1.1.3. Authorized Users
+
+You may allow employees and contractors of your entity or of
+your subsidiary(ies) to access and use the SDK from your
+secure network to perform work on your behalf.
+
+If you are an academic institution you may allow users
+enrolled or employed by the academic institution to access and
+use the SDK from your secure network.
+
+You are responsible for the compliance with the terms of this
+Agreement by your authorized users. If you become aware that
+your authorized users didn’t follow the terms of this
+Agreement, you agree to take reasonable steps to resolve the
+non-compliance and prevent new occurrences.
+
+
+1.1.4. Pre-Release SDK
+
+The SDK versions identified as alpha, beta, preview or
+otherwise as pre-release, may not be fully functional, may
+contain errors or design flaws, and may have reduced or
+different security, privacy, accessibility, availability, and
+reliability standards relative to commercial versions of
+NVIDIA software and materials. Use of a pre-release SDK may
+result in unexpected results, loss of data, project delays or
+other unpredictable damage or loss.
+
+You may use a pre-release SDK at your own risk, understanding
+that pre-release SDKs are not intended for use in production
+or business-critical systems.
+
+NVIDIA may choose not to make available a commercial version
+of any pre-release SDK. NVIDIA may also choose to abandon
+development and terminate the availability of a pre-release
+SDK at any time without liability.
+
+
+1.1.5. Updates
+
+NVIDIA may, at its option, make available patches, workarounds
+or other updates to this SDK. Unless the updates are provided
+with their separate governing terms, they are deemed part of
+the SDK licensed to you as provided in this Agreement. You
+agree that the form and content of the SDK that NVIDIA
+provides may change without prior notice to you. While NVIDIA
+generally maintains compatibility between versions, NVIDIA may
+in some cases make changes that introduce incompatibilities in
+future versions of the SDK.
+
+
+1.1.6. Third Party Licenses
+
+The SDK may come bundled with, or otherwise include or be
+distributed with, third party software licensed by a NVIDIA
+supplier and/or open source software provided under an open
+source license. Use of third party software is subject to the
+third-party license terms, or in the absence of third party
+terms, the terms of this Agreement. Copyright to third party
+software is held by the copyright holders indicated in the
+third-party software or license.
+
+
+1.1.7. Reservation of Rights
+
+NVIDIA reserves all rights, title, and interest in and to the
+SDK, not expressly granted to you under this Agreement.
+
+
+1.2. Limitations
+
+The following license limitations apply to your use of the
+SDK:
+
+ 1. You may not reverse engineer, decompile or disassemble,
+ or remove copyright or other proprietary notices from any
+ portion of the SDK or copies of the SDK.
+
+ 2. Except as expressly provided in this Agreement, you may
+ not copy, sell, rent, sublicense, transfer, distribute,
+ modify, or create derivative works of any portion of the
+ SDK. For clarity, you may not distribute or sublicense the
+ SDK as a stand-alone product.
+
+ 3. Unless you have an agreement with NVIDIA for this
+ purpose, you may not indicate that an application created
+ with the SDK is sponsored or endorsed by NVIDIA.
+
+ 4. You may not bypass, disable, or circumvent any
+ encryption, security, digital rights management or
+ authentication mechanism in the SDK.
+
+ 5. You may not use the SDK in any manner that would cause it
+ to become subject to an open source software license. As
+ examples, licenses that require as a condition of use,
+ modification, and/or distribution that the SDK be:
+
+ a. Disclosed or distributed in source code form;
+
+ b. Licensed for the purpose of making derivative works;
+ or
+
+ c. Redistributable at no charge.
+
+ 6. Unless you have an agreement with NVIDIA for this
+ purpose, you may not use the SDK with any system or
+ application where the use or failure of the system or
+ application can reasonably be expected to threaten or
+ result in personal injury, death, or catastrophic loss.
+ Examples include use in avionics, navigation, military,
+ medical, life support or other life critical applications.
+ NVIDIA does not design, test or manufacture the SDK for
+ these critical uses and NVIDIA shall not be liable to you
+ or any third party, in whole or in part, for any claims or
+ damages arising from such uses.
+
+ 7. You agree to defend, indemnify and hold harmless NVIDIA
+ and its affiliates, and their respective employees,
+ contractors, agents, officers and directors, from and
+ against any and all claims, damages, obligations, losses,
+ liabilities, costs or debt, fines, restitutions and
+ expenses (including but not limited to attorney’s fees
+ and costs incident to establishing the right of
+ indemnification) arising out of or related to your use of
+ the SDK outside of the scope of this Agreement, or not in
+ compliance with its terms.
+
+
+1.3. Ownership
+
+ 1. NVIDIA or its licensors hold all rights, title and
+ interest in and to the SDK and its modifications and
+ derivative works, including their respective intellectual
+ property rights, subject to your rights described in this
+ section. This SDK may include software and materials from
+ NVIDIA’s licensors, and these licensors are intended
+ third party beneficiaries that may enforce this Agreement
+ with respect to their intellectual property rights.
+
+ 2. You hold all rights, title and interest in and to your
+ applications and your derivative works of the sample
+ source code delivered in the SDK, including their
+ respective intellectual property rights, subject to
+ NVIDIA’s rights described in this section.
+
+ 3. You may, but don’t have to, provide to NVIDIA
+ suggestions, feature requests or other feedback regarding
+ the SDK, including possible enhancements or modifications
+ to the SDK. For any feedback that you voluntarily provide,
+ you hereby grant NVIDIA and its affiliates a perpetual,
+ non-exclusive, worldwide, irrevocable license to use,
+ reproduce, modify, license, sublicense (through multiple
+ tiers of sublicensees), and distribute (through multiple
+ tiers of distributors) it without the payment of any
+ royalties or fees to you. NVIDIA will use feedback at its
+ choice. NVIDIA is constantly looking for ways to improve
+ its products, so you may send feedback to NVIDIA through
+ the developer portal at https://developer.nvidia.com.
+
+
+1.4. No Warranties
+
+THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
+FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
+ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
+OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
+BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
+ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
+WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
+DEALING OR COURSE OF TRADE.
+
+
+1.5. Limitation of Liability
+
+TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
+AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
+PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
+OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
+PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
+WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
+WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
+OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
+PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
+LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
+TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
+AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
+NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
+LIMIT.
+
+These exclusions and limitations of liability shall apply
+regardless if NVIDIA or its affiliates have been advised of
+the possibility of such damages, and regardless of whether a
+remedy fails its essential purpose. These exclusions and
+limitations of liability form an essential basis of the
+bargain between the parties, and, absent any of these
+exclusions or limitations of liability, the provisions of this
+Agreement, including, without limitation, the economic terms,
+would be substantially different.
+
+
+1.6. Termination
+
+ 1. This Agreement will continue to apply until terminated by
+ either you or NVIDIA as described below.
+
+ 2. If you want to terminate this Agreement, you may do so by
+ stopping to use the SDK.
+
+ 3. NVIDIA may, at any time, terminate this Agreement if:
+
+ a. (i) you fail to comply with any term of this
+ Agreement and the non-compliance is not fixed within
+ thirty (30) days following notice from NVIDIA (or
+ immediately if you violate NVIDIA’s intellectual
+ property rights);
+
+ b. (ii) you commence or participate in any legal
+ proceeding against NVIDIA with respect to the SDK; or
+
+ c. (iii) NVIDIA decides to no longer provide the SDK in
+ a country or, in NVIDIA’s sole discretion, the
+ continued use of it is no longer commercially viable.
+
+ 4. Upon any termination of this Agreement, you agree to
+ promptly discontinue use of the SDK and destroy all copies
+ in your possession or control. Your prior distributions in
+ accordance with this Agreement are not affected by the
+ termination of this Agreement. Upon written request, you
+ will certify in writing that you have complied with your
+ commitments under this section. Upon any termination of
+ this Agreement all provisions survive except for the
+ license grant provisions.
+
+
+1.7. General
+
+If you wish to assign this Agreement or your rights and
+obligations, including by merger, consolidation, dissolution
+or operation of law, contact NVIDIA to ask for permission. Any
+attempted assignment not approved by NVIDIA in writing shall
+be void and of no effect. NVIDIA may assign, delegate or
+transfer this Agreement and its rights and obligations, and if
+to a non-affiliate you will be notified.
+
+You agree to cooperate with NVIDIA and provide reasonably
+requested information to verify your compliance with this
+Agreement.
+
+This Agreement will be governed in all respects by the laws of
+the United States and of the State of Delaware as those laws
+are applied to contracts entered into and performed entirely
+within Delaware by Delaware residents, without regard to the
+conflicts of laws principles. The United Nations Convention on
+Contracts for the International Sale of Goods is specifically
+disclaimed. You agree to all terms of this Agreement in the
+English language.
+
+The state or federal courts residing in Santa Clara County,
+California shall have exclusive jurisdiction over any dispute
+or claim arising out of this Agreement. Notwithstanding this,
+you agree that NVIDIA shall still be allowed to apply for
+injunctive remedies or an equivalent type of urgent legal
+relief in any jurisdiction.
+
+If any court of competent jurisdiction determines that any
+provision of this Agreement is illegal, invalid or
+unenforceable, such provision will be construed as limited to
+the extent necessary to be consistent with and fully
+enforceable under the law and the remaining provisions will
+remain in full force and effect. Unless otherwise specified,
+remedies are cumulative.
+
+Each party acknowledges and agrees that the other is an
+independent contractor in the performance of this Agreement.
+
+The SDK has been developed entirely at private expense and is
+“commercial items” consisting of “commercial computer
+software” and “commercial computer software
+documentation” provided with RESTRICTED RIGHTS. Use,
+duplication or disclosure by the U.S. Government or a U.S.
+Government subcontractor is subject to the restrictions in
+this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
+in subparagraphs (c)(1) and (2) of the Commercial Computer
+Software - Restricted Rights clause at FAR 52.227-19, as
+applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
+Expressway, Santa Clara, CA 95051.
+
+The SDK is subject to United States export laws and
+regulations. You agree that you will not ship, transfer or
+export the SDK into any country, or use the SDK in any manner,
+prohibited by the United States Bureau of Industry and
+Security or economic sanctions regulations administered by the
+U.S. Department of Treasury’s Office of Foreign Assets
+Control (OFAC), or any applicable export laws, restrictions or
+regulations. These laws include restrictions on destinations,
+end users and end use. By accepting this Agreement, you
+confirm that you are not a resident or citizen of any country
+currently embargoed by the U.S. and that you are not otherwise
+prohibited from receiving the SDK.
+
+Any notice delivered by NVIDIA to you under this Agreement
+will be delivered via mail, email or fax. You agree that any
+notices that NVIDIA sends you electronically will satisfy any
+legal communication requirements. Please direct your legal
+notices or other correspondence to NVIDIA Corporation, 2788
+San Tomas Expressway, Santa Clara, California 95051, United
+States of America, Attention: Legal Department.
+
+This Agreement and any exhibits incorporated into this
+Agreement constitute the entire agreement of the parties with
+respect to the subject matter of this Agreement and supersede
+all prior negotiations or documentation exchanged between the
+parties relating to this SDK license. Any additional and/or
+conflicting terms on documents issued by you are null, void,
+and invalid. Any amendment or waiver under this Agreement
+shall be in writing and signed by representatives of both
+parties.
+
+
+2. CUDA Toolkit Supplement to Software License Agreement for
+NVIDIA Software Development Kits
+------------------------------------------------------------
+
+
+Release date: August 16, 2018
+-----------------------------
+
+The terms in this supplement govern your use of the NVIDIA
+CUDA Toolkit SDK under the terms of your license agreement
+(“Agreement”) as modified by this supplement. Capitalized
+terms used but not defined below have the meaning assigned to
+them in the Agreement.
+
+This supplement is an exhibit to the Agreement and is
+incorporated as an integral part of the Agreement. In the
+event of conflict between the terms in this supplement and the
+terms in the Agreement, the terms in this supplement govern.
+
+
+2.1. License Scope
+
+The SDK is licensed for you to develop applications only for
+use in systems with NVIDIA GPUs.
+
+
+2.2. Distribution
+
+The portions of the SDK that are distributable under the
+Agreement are listed in Attachment A.
+
+
+2.3. Operating Systems
+
+Those portions of the SDK designed exclusively for use on the
+Linux or FreeBSD operating systems, or other operating systems
+derived from the source code to these operating systems, may
+be copied and redistributed for use in accordance with this
+Agreement, provided that the object code files are not
+modified in any way (except for unzipping of compressed
+files).
+
+
+2.4. Audio and Video Encoders and Decoders
+
+You acknowledge and agree that it is your sole responsibility
+to obtain any additional third-party licenses required to
+make, have made, use, have used, sell, import, and offer for
+sale your products or services that include or incorporate any
+third-party software and content relating to audio and/or
+video encoders and decoders from, including but not limited
+to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
+MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
+under this Agreement any necessary patent or other rights with
+respect to any audio and/or video encoders and decoders.
+
+
+2.5. Licensing
+
+If the distribution terms in this Agreement are not suitable
+for your organization, or for any questions regarding this
+Agreement, please contact NVIDIA at
+nvidia-compute-license-questions@nvidia.com.
+
+
+2.6. Attachment A
+
+The following portions of the SDK are distributable under the
+Agreement:
+
+Component
+
+CUDA Runtime
+
+Windows
+
+cudart.dll, cudart_static.lib, cudadevrt.lib
+
+Mac OSX
+
+libcudart.dylib, libcudart_static.a, libcudadevrt.a
+
+Linux
+
+libcudart.so, libcudart_static.a, libcudadevrt.a
+
+Android
+
+libcudart.so, libcudart_static.a, libcudadevrt.a
+
+Component
+
+CUDA FFT Library
+
+Windows
+
+cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
+
+Mac OSX
+
+libcufft.dylib, libcufft_static.a, libcufftw.dylib,
+libcufftw_static.a
+
+Linux
+
+libcufft.so, libcufft_static.a, libcufftw.so,
+libcufftw_static.a
+
+Android
+
+libcufft.so, libcufft_static.a, libcufftw.so,
+libcufftw_static.a
+
+Component
+
+CUDA BLAS Library
+
+Windows
+
+cublas.dll, cublasLt.dll
+
+Mac OSX
+
+libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
+libcublasLt_static.a
+
+Linux
+
+libcublas.so, libcublasLt.so, libcublas_static.a,
+libcublasLt_static.a
+
+Android
+
+libcublas.so, libcublasLt.so, libcublas_static.a,
+libcublasLt_static.a
+
+Component
+
+NVIDIA "Drop-in" BLAS Library
+
+Windows
+
+nvblas.dll
+
+Mac OSX
+
+libnvblas.dylib
+
+Linux
+
+libnvblas.so
+
+Component
+
+CUDA Sparse Matrix Library
+
+Windows
+
+cusparse.dll, cusparse.lib
+
+Mac OSX
+
+libcusparse.dylib, libcusparse_static.a
+
+Linux
+
+libcusparse.so, libcusparse_static.a
+
+Android
+
+libcusparse.so, libcusparse_static.a
+
+Component
+
+CUDA Linear Solver Library
+
+Windows
+
+cusolver.dll, cusolver.lib
+
+Mac OSX
+
+libcusolver.dylib, libcusolver_static.a
+
+Linux
+
+libcusolver.so, libcusolver_static.a
+
+Android
+
+libcusolver.so, libcusolver_static.a
+
+Component
+
+CUDA Random Number Generation Library
+
+Windows
+
+curand.dll, curand.lib
+
+Mac OSX
+
+libcurand.dylib, libcurand_static.a
+
+Linux
+
+libcurand.so, libcurand_static.a
+
+Android
+
+libcurand.so, libcurand_static.a
+
+Component
+
+CUDA Accelerated Graph Library
+
+Component
+
+NVIDIA Performance Primitives Library
+
+Windows
+
+nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
+nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
+nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
+nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
+nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
+
+Mac OSX
+
+libnppc.dylib, libnppc_static.a, libnppial.dylib,
+libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
+libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
+libnppidei_static.a, libnppif.dylib, libnppif_static.a,
+libnppig.dylib, libnppig_static.a, libnppim.dylib,
+libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
+libnpps.dylib, libnpps_static.a
+
+Linux
+
+libnppc.so, libnppc_static.a, libnppial.so,
+libnppial_static.a, libnppicc.so, libnppicc_static.a,
+libnppicom.so, libnppicom_static.a, libnppidei.so,
+libnppidei_static.a, libnppif.so, libnppif_static.a
+libnppig.so, libnppig_static.a, libnppim.so,
+libnppim_static.a, libnppist.so, libnppist_static.a,
+libnppisu.so, libnppisu_static.a, libnppitc.so
+libnppitc_static.a, libnpps.so, libnpps_static.a
+
+Android
+
+libnppc.so, libnppc_static.a, libnppial.so,
+libnppial_static.a, libnppicc.so, libnppicc_static.a,
+libnppicom.so, libnppicom_static.a, libnppidei.so,
+libnppidei_static.a, libnppif.so, libnppif_static.a
+libnppig.so, libnppig_static.a, libnppim.so,
+libnppim_static.a, libnppist.so, libnppist_static.a,
+libnppisu.so, libnppisu_static.a, libnppitc.so
+libnppitc_static.a, libnpps.so, libnpps_static.a
+
+Component
+
+NVIDIA JPEG Library
+
+Linux
+
+libnvjpeg.so, libnvjpeg_static.a
+
+Component
+
+Internal common library required for statically linking to
+cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
+
+Mac OSX
+
+libculibos.a
+
+Linux
+
+libculibos.a
+
+Component
+
+NVIDIA Runtime Compilation Library and Header
+
+All
+
+nvrtc.h
+
+Windows
+
+nvrtc.dll, nvrtc-builtins.dll
+
+Mac OSX
+
+libnvrtc.dylib, libnvrtc-builtins.dylib
+
+Linux
+
+libnvrtc.so, libnvrtc-builtins.so
+
+Component
+
+NVIDIA Optimizing Compiler Library
+
+Windows
+
+nvvm.dll
+
+Mac OSX
+
+libnvvm.dylib
+
+Linux
+
+libnvvm.so
+
+Component
+
+NVIDIA Common Device Math Functions Library
+
+Windows
+
+libdevice.10.bc
+
+Mac OSX
+
+libdevice.10.bc
+
+Linux
+
+libdevice.10.bc
+
+Component
+
+CUDA Occupancy Calculation Header Library
+
+All
+
+cuda_occupancy.h
+
+Component
+
+CUDA Half Precision Headers
+
+All
+
+cuda_fp16.h, cuda_fp16.hpp
+
+Component
+
+CUDA Profiling Tools Interface (CUPTI) Library
+
+Windows
+
+cupti.dll
+
+Mac OSX
+
+libcupti.dylib
+
+Linux
+
+libcupti.so
+
+Component
+
+NVIDIA Tools Extension Library
+
+Windows
+
+nvToolsExt.dll, nvToolsExt.lib
+
+Mac OSX
+
+libnvToolsExt.dylib
+
+Linux
+
+libnvToolsExt.so
+
+Component
+
+NVIDIA CUDA Driver Libraries
+
+Linux
+
+libcuda.so, libnvidia-fatbinaryloader.so,
+libnvidia-ptxjitcompiler.so
+
+The NVIDIA CUDA Driver Libraries are only distributable in
+applications that meet this criteria:
+
+ 1. The application was developed starting from a NVIDIA CUDA
+ container obtained from Docker Hub or the NVIDIA GPU
+ Cloud, and
+
+ 2. The resulting application is packaged as a Docker
+ container and distributed to users on Docker Hub or the
+ NVIDIA GPU Cloud only.
+
+
+2.7. Attachment B
+
+
+Additional Licensing Obligations
+
+The following third party components included in the SOFTWARE
+are licensed to Licensee pursuant to the following terms and
+conditions:
+
+ 1. Licensee's use of the GDB third party component is
+ subject to the terms and conditions of GNU GPL v3:
+
+ This product includes copyrighted third-party software licensed
+ under the terms of the GNU General Public License v3 ("GPL v3").
+ All third-party software packages are copyright by their respective
+ authors. GPL v3 terms and conditions are hereby incorporated into
+ the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
+
+ Consistent with these licensing requirements, the software
+ listed below is provided under the terms of the specified
+ open source software licenses. To obtain source code for
+ software provided under licenses that require
+ redistribution of source code, including the GNU General
+ Public License (GPL) and GNU Lesser General Public License
+ (LGPL), contact oss-requests@nvidia.com. This offer is
+ valid for a period of three (3) years from the date of the
+ distribution of this product by NVIDIA CORPORATION.
+
+ Component License
+ CUDA-GDB GPL v3
+
+ 2. Licensee represents and warrants that any and all third
+ party licensing and/or royalty payment obligations in
+ connection with Licensee's use of the H.264 video codecs
+ are solely the responsibility of Licensee.
+
+ 3. Licensee's use of the Thrust library is subject to the
+ terms and conditions of the Apache License Version 2.0.
+ All third-party software packages are copyright by their
+ respective authors. Apache License Version 2.0 terms and
+ conditions are hereby incorporated into the Agreement by
+ this reference.
+ http://www.apache.org/licenses/LICENSE-2.0.html
+
+ In addition, Licensee acknowledges the following notice:
+ Thrust includes source code from the Boost Iterator,
+ Tuple, System, and Random Number libraries.
+
+ Boost Software License - Version 1.0 - August 17th, 2003
+ . . . .
+
+ Permission is hereby granted, free of charge, to any person or
+ organization obtaining a copy of the software and accompanying
+ documentation covered by this license (the "Software") to use,
+ reproduce, display, distribute, execute, and transmit the Software,
+ and to prepare derivative works of the Software, and to permit
+ third-parties to whom the Software is furnished to do so, all
+ subject to the following:
+
+ The copyright notices in the Software and this entire statement,
+ including the above license grant, this restriction and the following
+ disclaimer, must be included in all copies of the Software, in whole
+ or in part, and all derivative works of the Software, unless such
+ copies or derivative works are solely in the form of machine-executable
+ object code generated by a source language processor.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+
+ 4. Licensee's use of the LLVM third party component is
+ subject to the following terms and conditions:
+
+ ======================================================
+ LLVM Release License
+ ======================================================
+ University of Illinois/NCSA
+ Open Source License
+
+ Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
+ All rights reserved.
+
+ Developed by:
+
+ LLVM Team
+
+ University of Illinois at Urbana-Champaign
+
+ http://llvm.org
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to
+ deal with the Software without restriction, including without limitation the
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ sell copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimers.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimers in the
+ documentation and/or other materials provided with the distribution.
+
+ * Neither the names of the LLVM Team, University of Illinois at Urbana-
+ Champaign, nor the names of its contributors may be used to endorse or
+ promote products derived from this Software without specific prior
+ written permission.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+ THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS WITH THE SOFTWARE.
+
+ 5. Licensee's use (e.g. nvprof) of the PCRE third party
+ component is subject to the following terms and
+ conditions:
+
+ ------------
+ PCRE LICENCE
+ ------------
+ PCRE is a library of functions to support regular expressions whose syntax
+ and semantics are as close as possible to those of the Perl 5 language.
+ Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
+ specified below. The documentation for PCRE, supplied in the "doc"
+ directory, is distributed under the same terms as the software itself. The
+ basic library functions are written in C and are freestanding. Also
+ included in the distribution is a set of C++ wrapper functions, and a just-
+ in-time compiler that can be used to optimize pattern matching. These are
+ both optional features that can be omitted when the library is built.
+
+ THE BASIC LIBRARY FUNCTIONS
+ ---------------------------
+ Written by: Philip Hazel
+ Email local part: ph10
+ Email domain: cam.ac.uk
+ University of Cambridge Computing Service,
+ Cambridge, England.
+ Copyright (c) 1997-2012 University of Cambridge
+ All rights reserved.
+
+ PCRE JUST-IN-TIME COMPILATION SUPPORT
+ -------------------------------------
+ Written by: Zoltan Herczeg
+ Email local part: hzmester
+ Emain domain: freemail.hu
+ Copyright(c) 2010-2012 Zoltan Herczeg
+ All rights reserved.
+
+ STACK-LESS JUST-IN-TIME COMPILER
+ --------------------------------
+ Written by: Zoltan Herczeg
+ Email local part: hzmester
+ Emain domain: freemail.hu
+ Copyright(c) 2009-2012 Zoltan Herczeg
+ All rights reserved.
+
+ THE C++ WRAPPER FUNCTIONS
+ -------------------------
+ Contributed by: Google Inc.
+ Copyright (c) 2007-2012, Google Inc.
+ All rights reserved.
+
+ THE "BSD" LICENCE
+ -----------------
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ * Neither the name of the University of Cambridge nor the name of Google
+ Inc. nor the names of their contributors may be used to endorse or
+ promote products derived from this software without specific prior
+ written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+ 6. Some of the cuBLAS library routines were written by or
+ derived from code written by Vasily Volkov and are subject
+ to the Modified Berkeley Software Distribution License as
+ follows:
+
+ Copyright (c) 2007-2009, Regents of the University of California
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of the University of California, Berkeley nor
+ the names of its contributors may be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+ 7. Some of the cuBLAS library routines were written by or
+ derived from code written by Davide Barbieri and are
+ subject to the Modified Berkeley Software Distribution
+ License as follows:
+
+ Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+ 8. Some of the cuBLAS library routines were derived from
+ code developed by the University of Tennessee and are
+ subject to the Modified Berkeley Software Distribution
+ License as follows:
+
+ Copyright (c) 2010 The University of Tennessee.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer listed in this license in the documentation and/or
+ other materials provided with the distribution.
+ * Neither the name of the copyright holders nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 9. Some of the cuBLAS library routines were written by or
+ derived from code written by Jonathan Hogg and are subject
+ to the Modified Berkeley Software Distribution License as
+ follows:
+
+ Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of the STFC nor the names of its contributors
+ may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 10. Some of the cuBLAS library routines were written by or
+ derived from code written by Ahmad M. Abdelfattah, David
+ Keyes, and Hatem Ltaief, and are subject to the Apache
+ License, Version 2.0, as follows:
+
+ -- (C) Copyright 2013 King Abdullah University of Science and Technology
+ Authors:
+ Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa)
+ David Keyes (david.keyes@kaust.edu.sa)
+ Hatem Ltaief (hatem.ltaief@kaust.edu.sa)
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the King Abdullah University of Science and
+ Technology nor the names of its contributors may be used to endorse
+ or promote products derived from this software without specific prior
+ written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
+
+ 11. Some of the cuSPARSE library routines were written by or
+ derived from code written by Li-Wen Chang and are subject
+ to the NCSA Open Source License as follows:
+
+ Copyright (c) 2012, University of Illinois.
+
+ All rights reserved.
+
+ Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal with the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimers in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the names of IMPACT Group, University of Illinois, nor
+ the names of its contributors may be used to endorse or promote
+ products derived from this Software without specific prior
+ written permission.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
+ SOFTWARE.
+
+ 12. Some of the cuRAND library routines were written by or
+ derived from code written by Mutsuo Saito and Makoto
+ Matsumoto and are subject to the following license:
+
+ Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
+ University. All rights reserved.
+
+ Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
+ University and University of Tokyo. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of the Hiroshima University nor the names of
+ its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 13. Some of the cuRAND library routines were derived from
+ code developed by D. E. Shaw Research and are subject to
+ the following license:
+
+ Copyright 2010-2011, D. E. Shaw Research.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions, and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions, and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of D. E. Shaw Research nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 14. Some of the Math library routines were written by or
+ derived from code developed by Norbert Juffa and are
+ subject to the following license:
+
+ Copyright (c) 2015-2017, Norbert Juffa
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 15. Licensee's use of the lz4 third party component is
+ subject to the following terms and conditions:
+
+ Copyright (C) 2011-2013, Yann Collet.
+ BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ 16. The NPP library uses code from the Boost Math Toolkit,
+ and is subject to the following license:
+
+ Boost Software License - Version 1.0 - August 17th, 2003
+ . . . .
+
+ Permission is hereby granted, free of charge, to any person or
+ organization obtaining a copy of the software and accompanying
+ documentation covered by this license (the "Software") to use,
+ reproduce, display, distribute, execute, and transmit the Software,
+ and to prepare derivative works of the Software, and to permit
+ third-parties to whom the Software is furnished to do so, all
+ subject to the following:
+
+ The copyright notices in the Software and this entire statement,
+ including the above license grant, this restriction and the following
+ disclaimer, must be included in all copies of the Software, in whole
+ or in part, and all derivative works of the Software, unless such
+ copies or derivative works are solely in the form of machine-executable
+ object code generated by a source language processor.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+
+ 17. Portions of the Nsight Eclipse Edition is subject to the
+ following license:
+
+ The Eclipse Foundation makes available all content in this plug-in
+ ("Content"). Unless otherwise indicated below, the Content is provided
+ to you under the terms and conditions of the Eclipse Public License
+ Version 1.0 ("EPL"). A copy of the EPL is available at http://
+ www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
+ will mean the Content.
+
+ If you did not receive this Content directly from the Eclipse
+ Foundation, the Content is being redistributed by another party
+ ("Redistributor") and different terms and conditions may apply to your
+ use of any object code in the Content. Check the Redistributor's
+ license that was provided with the Content. If no such license exists,
+ contact the Redistributor. Unless otherwise indicated below, the terms
+ and conditions of the EPL still apply to any source code in the
+ Content and such source code may be obtained at http://www.eclipse.org.
+
+ 18. Some of the cuBLAS library routines uses code from
+ OpenAI, which is subject to the following license:
+
+ License URL
+ https://github.com/openai/openai-gemm/blob/master/LICENSE
+
+ License Text
+ The MIT License
+
+ Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+ 19. Licensee's use of the Visual Studio Setup Configuration
+ Samples is subject to the following license:
+
+ The MIT License (MIT)
+ Copyright (C) Microsoft Corporation. All rights reserved.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without restriction,
+ including without limitation the rights to use, copy, modify, merge,
+ publish, distribute, sublicense, and/or sell copies of the Software,
+ and to permit persons to whom the Software is furnished to do so,
+ subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ 20. Licensee's use of linmath.h header for CPU functions for
+ GL vector/matrix operations from lunarG is subject to the
+ Apache License Version 2.0.
+
+ 21. The DX12-CUDA sample uses the d3dx12.h header, which is
+ subject to the MIT license .
+
+-----------------
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..aabd4f7520077f5f9c18bae61ce8ab5754bc57fd
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA
@@ -0,0 +1,36 @@
+Metadata-Version: 2.1
+Name: nvidia-cusparse-cu12
+Version: 12.1.0.106
+Summary: CUSPARSE native runtime libraries
+Home-page: https://developer.nvidia.com/cuda-zone
+Author: Nvidia CUDA Installer Team
+Author-email: cuda_installer@nvidia.com
+License: NVIDIA Proprietary Software
+Keywords: cuda,nvidia,runtime,machine learning,deep learning
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: Other/Proprietary License
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Scientific/Engineering
+Classifier: Topic :: Scientific/Engineering :: Mathematics
+Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: Linux
+Requires-Python: >=3
+License-File: License.txt
+Requires-Dist: nvidia-nvjitlink-cu12
+
+CUSPARSE native runtime libraries
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..7bdd0969540e15ef13d7062911ef1f804e902f44
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD
@@ -0,0 +1,18 @@
+nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nvidia/__pycache__/__init__.cpython-310.pyc,,
+nvidia/cusparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nvidia/cusparse/__pycache__/__init__.cpython-310.pyc,,
+nvidia/cusparse/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nvidia/cusparse/include/__pycache__/__init__.cpython-310.pyc,,
+nvidia/cusparse/include/cusparse.h,sha256=yhV9iTcEW9XEyhaJmX4iddh_cMb8sfNAy6qva5ae4qw,287290
+nvidia/cusparse/include/cusparse_v2.h,sha256=jkH2A9hYc-TEF0vuQ_SurbhPNEHkYGUIRuxKXhFAqnw,2587
+nvidia/cusparse/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nvidia/cusparse/lib/__pycache__/__init__.cpython-310.pyc,,
+nvidia/cusparse/lib/libcusparse.so.12,sha256=UARmovVZ3mIqcbuSDT0pI-aRNSRXR6J0LuE-3_C6YIU,264876688
+nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
+nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA,sha256=XpBtE4L1lFCx7gDu7Klx9dijNWQW26PS3fcOGjNIsXg,1550
+nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD,,
+nvidia_cusparse_cu12-12.1.0.106.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
+nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/REQUESTED b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..06e355fe0e3ed7077903f119ae6928a17da8eb6f
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-manylinux1_x86_64
+
diff --git a/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt
@@ -0,0 +1 @@
+nvidia
diff --git a/evalkit_tf446/lib/python3.10/site-packages/ruff/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/ruff/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/evalkit_tf446/lib/python3.10/site-packages/ruff/__main__.py b/evalkit_tf446/lib/python3.10/site-packages/ruff/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e73bba334e32131744542408514d6f6b28d77c52
--- /dev/null
+++ b/evalkit_tf446/lib/python3.10/site-packages/ruff/__main__.py
@@ -0,0 +1,86 @@
+import os
+import sys
+import sysconfig
+
+
+def find_ruff_bin() -> str:
+ """Return the ruff binary path."""
+
+ ruff_exe = "ruff" + sysconfig.get_config_var("EXE")
+
+ scripts_path = os.path.join(sysconfig.get_path("scripts"), ruff_exe)
+ if os.path.isfile(scripts_path):
+ return scripts_path
+
+ if sys.version_info >= (3, 10):
+ user_scheme = sysconfig.get_preferred_scheme("user")
+ elif os.name == "nt":
+ user_scheme = "nt_user"
+ elif sys.platform == "darwin" and sys._framework:
+ user_scheme = "osx_framework_user"
+ else:
+ user_scheme = "posix_user"
+
+ user_path = os.path.join(
+ sysconfig.get_path("scripts", scheme=user_scheme), ruff_exe
+ )
+ if os.path.isfile(user_path):
+ return user_path
+
+ # Search in `bin` adjacent to package root (as created by `pip install --target`).
+ pkg_root = os.path.dirname(os.path.dirname(__file__))
+ target_path = os.path.join(pkg_root, "bin", ruff_exe)
+ if os.path.isfile(target_path):
+ return target_path
+
+ # Search for pip-specific build environments.
+ #
+ # Expect to find ruff in /pip-build-env-/overlay/bin/ruff
+ # Expect to find a "normal" folder at /pip-build-env-/normal
+ #
+ # See: https://github.com/pypa/pip/blob/102d8187a1f5a4cd5de7a549fd8a9af34e89a54f/src/pip/_internal/build_env.py#L87
+ paths = os.environ.get("PATH", "").split(os.pathsep)
+ if len(paths) >= 2:
+
+ def get_last_three_path_parts(path: str) -> list[str]:
+ """Return a list of up to the last three parts of a path."""
+ parts = []
+
+ while len(parts) < 3:
+ head, tail = os.path.split(path)
+ if tail or head != path:
+ parts.append(tail)
+ path = head
+ else:
+ parts.append(path)
+ break
+
+ return parts
+
+ maybe_overlay = get_last_three_path_parts(paths[0])
+ maybe_normal = get_last_three_path_parts(paths[1])
+ if (
+ len(maybe_normal) >= 3
+ and maybe_normal[-1].startswith("pip-build-env-")
+ and maybe_normal[-2] == "normal"
+ and len(maybe_overlay) >= 3
+ and maybe_overlay[-1].startswith("pip-build-env-")
+ and maybe_overlay[-2] == "overlay"
+ ):
+ # The overlay must contain the ruff binary.
+ candidate = os.path.join(paths[0], ruff_exe)
+ if os.path.isfile(candidate):
+ return candidate
+
+ raise FileNotFoundError(scripts_path)
+
+
+if __name__ == "__main__":
+ ruff = os.fsdecode(find_ruff_bin())
+ if sys.platform == "win32":
+ import subprocess
+
+ completed_process = subprocess.run([ruff, *sys.argv[1:]])
+ sys.exit(completed_process.returncode)
+ else:
+ os.execvp(ruff, [ruff, *sys.argv[1:]])
diff --git a/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..468fa6cdfc892fe6fa02eaf248629999d4ede9a3
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__init__.cpython-310.pyc differ
diff --git a/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__main__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__main__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..92c8def2704b5520133e40212e575d8878559121
Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/ruff/__pycache__/__main__.cpython-310.pyc differ