Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- wemm/lib/python3.10/site-packages/accelerate-1.2.1.dist-info/INSTALLER +1 -0
- wemm/lib/python3.10/site-packages/accelerate-1.2.1.dist-info/top_level.txt +1 -0
- wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/INSTALLER +1 -0
- wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/LICENSE +177 -0
- wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/REQUESTED +0 -0
- wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/top_level.txt +1 -0
- wemm/lib/python3.10/site-packages/jinja2-3.1.5.dist-info/INSTALLER +1 -0
- wemm/lib/python3.10/site-packages/jinja2-3.1.5.dist-info/REQUESTED +0 -0
- wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/INSTALLER +1 -0
- wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/METADATA +88 -0
- wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/RECORD +67 -0
- wemm/lib/python3.10/site-packages/networkx/classes/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/classes/__pycache__/reportviews.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/classes/tests/test_coreviews.py +362 -0
- wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/INSTALLER +1 -0
- wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/METADATA +37 -0
- wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/RECORD +16 -0
- wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/WHEEL +5 -0
- wemm/lib/python3.10/site-packages/qcloud_cos/__pycache__/version.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/qcloud_cos/ai_recognition.py +1048 -0
- wemm/lib/python3.10/site-packages/qcloud_cos/cos_comm.py +594 -0
- wemm/lib/python3.10/site-packages/simplejson-3.19.3.dist-info/LICENSE.txt +79 -0
- wemm/lib/python3.10/site-packages/simplejson-3.19.3.dist-info/REQUESTED +0 -0
- wemm/lib/python3.10/site-packages/torchgen/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/__pycache__/code_template.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/__pycache__/gen_executorch.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/__pycache__/gen_vmap_plumbing.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/__pycache__/utils.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/dispatcher.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/functionalization.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/structured.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/ufunc.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/unboxing.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/lazy.py +470 -0
- wemm/lib/python3.10/site-packages/torchgen/api/types/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/types/__pycache__/types.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/api/types/signatures.py +422 -0
- wemm/lib/python3.10/site-packages/torchgen/api/unboxing.py +248 -0
- wemm/lib/python3.10/site-packages/torchgen/dest/__init__.py +19 -0
- wemm/lib/python3.10/site-packages/torchgen/dest/__pycache__/lazy_ts_lowering.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/torchgen/dest/ufunc.py +545 -0
- wemm/lib/python3.10/site-packages/torchgen/gen_executorch.py +779 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/native/native_functions.yaml +0 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/native/tags.yaml +47 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/DispatchKeyFunction.h +23 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/NativeFunction.h +17 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/NativeMetaFunctions.h +19 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/Operators.h +74 -0
- wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/RedispatchFunctions.h +32 -0
wemm/lib/python3.10/site-packages/accelerate-1.2.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
wemm/lib/python3.10/site-packages/accelerate-1.2.1.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
accelerate
|
wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Apache License
|
| 3 |
+
Version 2.0, January 2004
|
| 4 |
+
http://www.apache.org/licenses/
|
| 5 |
+
|
| 6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 7 |
+
|
| 8 |
+
1. Definitions.
|
| 9 |
+
|
| 10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 12 |
+
|
| 13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 14 |
+
the copyright owner that is granting the License.
|
| 15 |
+
|
| 16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 17 |
+
other entities that control, are controlled by, or are under common
|
| 18 |
+
control with that entity. For the purposes of this definition,
|
| 19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 20 |
+
direction or management of such entity, whether by contract or
|
| 21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 23 |
+
|
| 24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 25 |
+
exercising permissions granted by this License.
|
| 26 |
+
|
| 27 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 28 |
+
including but not limited to software source code, documentation
|
| 29 |
+
source, and configuration files.
|
| 30 |
+
|
| 31 |
+
"Object" form shall mean any form resulting from mechanical
|
| 32 |
+
transformation or translation of a Source form, including but
|
| 33 |
+
not limited to compiled object code, generated documentation,
|
| 34 |
+
and conversions to other media types.
|
| 35 |
+
|
| 36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 37 |
+
Object form, made available under the License, as indicated by a
|
| 38 |
+
copyright notice that is included in or attached to the work
|
| 39 |
+
(an example is provided in the Appendix below).
|
| 40 |
+
|
| 41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 42 |
+
form, that is based on (or derived from) the Work and for which the
|
| 43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 45 |
+
of this License, Derivative Works shall not include works that remain
|
| 46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 47 |
+
the Work and Derivative Works thereof.
|
| 48 |
+
|
| 49 |
+
"Contribution" shall mean any work of authorship, including
|
| 50 |
+
the original version of the Work and any modifications or additions
|
| 51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 55 |
+
means any form of electronic, verbal, or written communication sent
|
| 56 |
+
to the Licensor or its representatives, including but not limited to
|
| 57 |
+
communication on electronic mailing lists, source code control systems,
|
| 58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 60 |
+
excluding communication that is conspicuously marked or otherwise
|
| 61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 62 |
+
|
| 63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 65 |
+
subsequently incorporated within the Work.
|
| 66 |
+
|
| 67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 72 |
+
Work and such Derivative Works in Source or Object form.
|
| 73 |
+
|
| 74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 77 |
+
(except as stated in this section) patent license to make, have made,
|
| 78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 79 |
+
where such license applies only to those patent claims licensable
|
| 80 |
+
by such Contributor that are necessarily infringed by their
|
| 81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 83 |
+
institute patent litigation against any entity (including a
|
| 84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 85 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 86 |
+
or contributory patent infringement, then any patent licenses
|
| 87 |
+
granted to You under this License for that Work shall terminate
|
| 88 |
+
as of the date such litigation is filed.
|
| 89 |
+
|
| 90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 91 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 92 |
+
modifications, and in Source or Object form, provided that You
|
| 93 |
+
meet the following conditions:
|
| 94 |
+
|
| 95 |
+
(a) You must give any other recipients of the Work or
|
| 96 |
+
Derivative Works a copy of this License; and
|
| 97 |
+
|
| 98 |
+
(b) You must cause any modified files to carry prominent notices
|
| 99 |
+
stating that You changed the files; and
|
| 100 |
+
|
| 101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 102 |
+
that You distribute, all copyright, patent, trademark, and
|
| 103 |
+
attribution notices from the Source form of the Work,
|
| 104 |
+
excluding those notices that do not pertain to any part of
|
| 105 |
+
the Derivative Works; and
|
| 106 |
+
|
| 107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 108 |
+
distribution, then any Derivative Works that You distribute must
|
| 109 |
+
include a readable copy of the attribution notices contained
|
| 110 |
+
within such NOTICE file, excluding those notices that do not
|
| 111 |
+
pertain to any part of the Derivative Works, in at least one
|
| 112 |
+
of the following places: within a NOTICE text file distributed
|
| 113 |
+
as part of the Derivative Works; within the Source form or
|
| 114 |
+
documentation, if provided along with the Derivative Works; or,
|
| 115 |
+
within a display generated by the Derivative Works, if and
|
| 116 |
+
wherever such third-party notices normally appear. The contents
|
| 117 |
+
of the NOTICE file are for informational purposes only and
|
| 118 |
+
do not modify the License. You may add Your own attribution
|
| 119 |
+
notices within Derivative Works that You distribute, alongside
|
| 120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 121 |
+
that such additional attribution notices cannot be construed
|
| 122 |
+
as modifying the License.
|
| 123 |
+
|
| 124 |
+
You may add Your own copyright statement to Your modifications and
|
| 125 |
+
may provide additional or different license terms and conditions
|
| 126 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 127 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 128 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 129 |
+
the conditions stated in this License.
|
| 130 |
+
|
| 131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 133 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 134 |
+
this License, without any additional terms or conditions.
|
| 135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 136 |
+
the terms of any separate license agreement you may have executed
|
| 137 |
+
with Licensor regarding such Contributions.
|
| 138 |
+
|
| 139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 141 |
+
except as required for reasonable and customary use in describing the
|
| 142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 143 |
+
|
| 144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 145 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 148 |
+
implied, including, without limitation, any warranties or conditions
|
| 149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 151 |
+
appropriateness of using or redistributing the Work and assume any
|
| 152 |
+
risks associated with Your exercise of permissions under this License.
|
| 153 |
+
|
| 154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 155 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 156 |
+
unless required by applicable law (such as deliberate and grossly
|
| 157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 158 |
+
liable to You for damages, including any direct, indirect, special,
|
| 159 |
+
incidental, or consequential damages of any character arising as a
|
| 160 |
+
result of this License or out of the use or inability to use the
|
| 161 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 162 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 163 |
+
other commercial damages or losses), even if such Contributor
|
| 164 |
+
has been advised of the possibility of such damages.
|
| 165 |
+
|
| 166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 169 |
+
or other liability obligations and/or rights consistent with this
|
| 170 |
+
License. However, in accepting such obligations, You may act only
|
| 171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 172 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 173 |
+
defend, and hold each Contributor harmless for any liability
|
| 174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 175 |
+
of your accepting any such warranty or additional liability.
|
| 176 |
+
|
| 177 |
+
END OF TERMS AND CONDITIONS
|
wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/REQUESTED
ADDED
|
File without changes
|
wemm/lib/python3.10/site-packages/boto3-1.26.118.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
boto3
|
wemm/lib/python3.10/site-packages/jinja2-3.1.5.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
wemm/lib/python3.10/site-packages/jinja2-3.1.5.dist-info/REQUESTED
ADDED
|
File without changes
|
wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/METADATA
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: lit
|
| 3 |
+
Version: 18.1.8
|
| 4 |
+
Summary: A Software Testing Tool
|
| 5 |
+
Home-page: http://llvm.org
|
| 6 |
+
Author: Daniel Dunbar
|
| 7 |
+
Author-email: daniel@minormatter.com
|
| 8 |
+
License: Apache-2.0 with LLVM exception
|
| 9 |
+
Keywords: test C++ automatic discovery
|
| 10 |
+
Classifier: Development Status :: 3 - Alpha
|
| 11 |
+
Classifier: Environment :: Console
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 14 |
+
Classifier: Natural Language :: English
|
| 15 |
+
Classifier: Operating System :: OS Independent
|
| 16 |
+
Classifier: Programming Language :: Python
|
| 17 |
+
Classifier: Topic :: Software Development :: Testing
|
| 18 |
+
License-File: LICENSE.TXT
|
| 19 |
+
|
| 20 |
+
===============================
|
| 21 |
+
lit - A Software Testing Tool
|
| 22 |
+
===============================
|
| 23 |
+
|
| 24 |
+
About
|
| 25 |
+
=====
|
| 26 |
+
|
| 27 |
+
*lit* is a portable tool for executing LLVM and Clang style test suites,
|
| 28 |
+
summarizing their results, and providing indication of failures. *lit* is
|
| 29 |
+
designed to be a lightweight testing tool with as simple a user interface as
|
| 30 |
+
possible.
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
Features
|
| 34 |
+
========
|
| 35 |
+
|
| 36 |
+
* Portable!
|
| 37 |
+
* Flexible test discovery.
|
| 38 |
+
* Parallel test execution.
|
| 39 |
+
* Support for multiple test formats and test suite designs.
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
Documentation
|
| 43 |
+
=============
|
| 44 |
+
|
| 45 |
+
The official *lit* documentation is in the man page, available online at the LLVM
|
| 46 |
+
Command Guide: http://llvm.org/cmds/lit.html.
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
Source
|
| 50 |
+
======
|
| 51 |
+
|
| 52 |
+
The *lit* source is available as part of LLVM, in the LLVM source repository:
|
| 53 |
+
https://github.com/llvm/llvm-project/tree/main/llvm/utils/lit
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
Contributing to lit
|
| 57 |
+
===================
|
| 58 |
+
|
| 59 |
+
Please browse the issues labeled *tools:llvm-lit* in LLVM's issue tracker for
|
| 60 |
+
ideas on what to work on:
|
| 61 |
+
https://github.com/llvm/llvm-project/labels/tools%3Allvm-lit
|
| 62 |
+
|
| 63 |
+
Before submitting patches, run the test suite to ensure nothing has regressed::
|
| 64 |
+
|
| 65 |
+
# From within your LLVM source directory.
|
| 66 |
+
utils/lit/lit.py \
|
| 67 |
+
--path /path/to/your/llvm/build/bin \
|
| 68 |
+
utils/lit/tests
|
| 69 |
+
|
| 70 |
+
Note that lit's tests depend on ``not`` and ``FileCheck``, LLVM utilities.
|
| 71 |
+
You will need to have built LLVM tools in order to run lit's test suite
|
| 72 |
+
successfully.
|
| 73 |
+
|
| 74 |
+
You'll also want to confirm that lit continues to work when testing LLVM.
|
| 75 |
+
Follow the instructions in http://llvm.org/docs/TestingGuide.html to run the
|
| 76 |
+
regression test suite:
|
| 77 |
+
|
| 78 |
+
make check-llvm
|
| 79 |
+
|
| 80 |
+
And be sure to run the llvm-lit wrapper script as well:
|
| 81 |
+
|
| 82 |
+
/path/to/your/llvm/build/bin/llvm-lit utils/lit/tests
|
| 83 |
+
|
| 84 |
+
Finally, make sure lit works when installed via setuptools:
|
| 85 |
+
|
| 86 |
+
python utils/lit/setup.py install
|
| 87 |
+
lit --path /path/to/your/llvm/build/bin utils/lit/tests
|
| 88 |
+
|
wemm/lib/python3.10/site-packages/lit-18.1.8.dist-info/RECORD
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/lit,sha256=wAQ2RaOuoxIpQB2Vghy5cBwQs1ADzGEKdmFiYkI7uwE,217
|
| 2 |
+
lit-18.1.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
lit-18.1.8.dist-info/LICENSE.TXT,sha256=jYXBBX10Lll5hcfU5jILAVqROThc_0y64G_8Dr6Jr-4,15141
|
| 4 |
+
lit-18.1.8.dist-info/METADATA,sha256=wWTOEzjFOnByj9vUpg-7m6z4w6wOxxQm0bV8JKtErgA,2516
|
| 5 |
+
lit-18.1.8.dist-info/RECORD,,
|
| 6 |
+
lit-18.1.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 7 |
+
lit-18.1.8.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
| 8 |
+
lit-18.1.8.dist-info/entry_points.txt,sha256=VkD2kCAmwO0d87cx0ZmqEiJ9Pw_38l-E9bWJ_cV-eRw,38
|
| 9 |
+
lit-18.1.8.dist-info/top_level.txt,sha256=re7FJOS-i0hzk2jilipjbt2x1XAfweNrDeWsfDbiraw,4
|
| 10 |
+
lit/BooleanExpression.py,sha256=au2js-cvxFZf9IIyHsHZK86FSejNDAvP9qxIbJ3BHZ4,12039
|
| 11 |
+
lit/LitConfig.py,sha256=p5--IGnsVhbNmcOCXUGUvEC0ItnYWq44jg10GMxIszM,8431
|
| 12 |
+
lit/LitTestCase.py,sha256=X9QmPKZ43_ynMVagmqQyx9UbmOP8O4GofApCFCfTsVY,1593
|
| 13 |
+
lit/ProgressBar.py,sha256=1S0AM7Zl9A-2vOPTDr_mYzqTCYkef0ZdyZxK7DzjR7k,11087
|
| 14 |
+
lit/ShCommands.py,sha256=2C6r-4Mf-z3axTpsLtFlq3sulf1A6YEmozv9Xnv1DTk,3242
|
| 15 |
+
lit/ShUtil.py,sha256=pRRwmMkTlXdipvKXEkHnB2HNL4ssfzDkj_89ckKy8lA,8811
|
| 16 |
+
lit/Test.py,sha256=WI24gogrtizHfk_H2Hxddu291QTGBTMkn6134KSjYkU,13734
|
| 17 |
+
lit/TestRunner.py,sha256=t37BiqWKwOLnuC3rn0d0UNEe8PD84kPCAjTAq2DTlD4,84905
|
| 18 |
+
lit/TestTimes.py,sha256=L1n8NDZCZBxTv9tpIm9-kz9RdQfa-VHwsG_0vkvFXiQ,1629
|
| 19 |
+
lit/TestingConfig.py,sha256=Nb68jG6vMibHfn3oTU4CvPWVOPnzSRRStbBriP_kFRU,8690
|
| 20 |
+
lit/__init__.py,sha256=B53XbNxbmy10HEpv2uSfm0oR4JDIY1yGDba2wZA86Ug,191
|
| 21 |
+
lit/__pycache__/BooleanExpression.cpython-310.pyc,,
|
| 22 |
+
lit/__pycache__/LitConfig.cpython-310.pyc,,
|
| 23 |
+
lit/__pycache__/LitTestCase.cpython-310.pyc,,
|
| 24 |
+
lit/__pycache__/ProgressBar.cpython-310.pyc,,
|
| 25 |
+
lit/__pycache__/ShCommands.cpython-310.pyc,,
|
| 26 |
+
lit/__pycache__/ShUtil.cpython-310.pyc,,
|
| 27 |
+
lit/__pycache__/Test.cpython-310.pyc,,
|
| 28 |
+
lit/__pycache__/TestRunner.cpython-310.pyc,,
|
| 29 |
+
lit/__pycache__/TestTimes.cpython-310.pyc,,
|
| 30 |
+
lit/__pycache__/TestingConfig.cpython-310.pyc,,
|
| 31 |
+
lit/__pycache__/__init__.cpython-310.pyc,,
|
| 32 |
+
lit/__pycache__/cl_arguments.cpython-310.pyc,,
|
| 33 |
+
lit/__pycache__/discovery.cpython-310.pyc,,
|
| 34 |
+
lit/__pycache__/display.cpython-310.pyc,,
|
| 35 |
+
lit/__pycache__/main.cpython-310.pyc,,
|
| 36 |
+
lit/__pycache__/reports.cpython-310.pyc,,
|
| 37 |
+
lit/__pycache__/run.cpython-310.pyc,,
|
| 38 |
+
lit/__pycache__/util.cpython-310.pyc,,
|
| 39 |
+
lit/__pycache__/worker.cpython-310.pyc,,
|
| 40 |
+
lit/builtin_commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 41 |
+
lit/builtin_commands/__pycache__/__init__.cpython-310.pyc,,
|
| 42 |
+
lit/builtin_commands/__pycache__/cat.cpython-310.pyc,,
|
| 43 |
+
lit/builtin_commands/__pycache__/diff.cpython-310.pyc,,
|
| 44 |
+
lit/builtin_commands/cat.py,sha256=SSu5cLp93tFTxb4i6qtuF5JhGa5ce4AvUOPLklKaObM,1892
|
| 45 |
+
lit/builtin_commands/diff.py,sha256=DgPO2kTHKZwfxn1oNp-S2T1cjpQJtf4EcLeMGX5RgtE,9802
|
| 46 |
+
lit/cl_arguments.py,sha256=TP0MW8TImuPM31tX-E4Z4azhkNk-ChmcVBhhIBzJLyE,11174
|
| 47 |
+
lit/discovery.py,sha256=f_Ze7eIqLlSBew7pcvLOxRHiPqAvkQnFrD5Dc4qRjMI,9618
|
| 48 |
+
lit/display.py,sha256=iDzxhcDR-qGgOga-fYh-N8JlsBZqKNNomTj4HldWr4s,5987
|
| 49 |
+
lit/formats/__init__.py,sha256=N8-_QUMtwIKB_2sg_AxT7fTjfrvygb4xtzeAy6gUZxU,215
|
| 50 |
+
lit/formats/__pycache__/__init__.cpython-310.pyc,,
|
| 51 |
+
lit/formats/__pycache__/base.cpython-310.pyc,,
|
| 52 |
+
lit/formats/__pycache__/googletest.cpython-310.pyc,,
|
| 53 |
+
lit/formats/__pycache__/shtest.cpython-310.pyc,,
|
| 54 |
+
lit/formats/base.py,sha256=WfzVSdoMxwWEw864_nTNRP0MJFvYC483jMuFd0GEJiM,2299
|
| 55 |
+
lit/formats/googletest.py,sha256=u_YLpvDyMG9jnoD2zXNEgKe91IHlNNE97oyCnVrXqW4,14953
|
| 56 |
+
lit/formats/shtest.py,sha256=0TqWvzIgO-wAdPVAdc3ZObn5LgPWRwIZHECxQkNXR18,992
|
| 57 |
+
lit/llvm/__init__.py,sha256=dRYTMMuZZ8AbNmUbAu7tF0oGyiEGKsg08Za2V_i-7KU,176
|
| 58 |
+
lit/llvm/__pycache__/__init__.cpython-310.pyc,,
|
| 59 |
+
lit/llvm/__pycache__/config.cpython-310.pyc,,
|
| 60 |
+
lit/llvm/__pycache__/subst.cpython-310.pyc,,
|
| 61 |
+
lit/llvm/config.py,sha256=w61DQA3aEKzlaxfvS45pCk6NjlFMlsXCUVu0N8GzkG4,32023
|
| 62 |
+
lit/llvm/subst.py,sha256=tcNyKU9DnjpmJK5nPg4OY3PwTpq-yPtWYew2ihrOXVA,5430
|
| 63 |
+
lit/main.py,sha256=6-zVGJg4k8Zsew36foQ_ZyJC5spD_07_DkrimoNpC1E,11855
|
| 64 |
+
lit/reports.py,sha256=z_tW12WmBsib4utxqPUqWCeuCMxqwcLlh4619teM2sI,11056
|
| 65 |
+
lit/run.py,sha256=ElgAxiSVweS5X-O0dw4zytjdq7Uyb5B7MNHh-abneB8,4692
|
| 66 |
+
lit/util.py,sha256=BFYG2BA7keivLMrU8uFeDurBgrA2CXxQYfQcM6iRjTE,17299
|
| 67 |
+
lit/worker.py,sha256=734wH1gJsGPgioa6vHydMhEzWNsH0ceqrzCcBjjcNxI,2666
|
wemm/lib/python3.10/site-packages/networkx/classes/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (570 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/classes/__pycache__/reportviews.cpython-310.pyc
ADDED
|
Binary file (49.2 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-310.pyc
ADDED
|
Binary file (17.6 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/classes/tests/test_coreviews.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pickle
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestAtlasView:
|
| 9 |
+
# node->data
|
| 10 |
+
def setup_method(self):
|
| 11 |
+
self.d = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}}
|
| 12 |
+
self.av = nx.classes.coreviews.AtlasView(self.d)
|
| 13 |
+
|
| 14 |
+
def test_pickle(self):
|
| 15 |
+
view = self.av
|
| 16 |
+
pview = pickle.loads(pickle.dumps(view, -1))
|
| 17 |
+
assert view == pview
|
| 18 |
+
assert view.__slots__ == pview.__slots__
|
| 19 |
+
pview = pickle.loads(pickle.dumps(view))
|
| 20 |
+
assert view == pview
|
| 21 |
+
assert view.__slots__ == pview.__slots__
|
| 22 |
+
|
| 23 |
+
def test_len(self):
|
| 24 |
+
assert len(self.av) == len(self.d)
|
| 25 |
+
|
| 26 |
+
def test_iter(self):
|
| 27 |
+
assert list(self.av) == list(self.d)
|
| 28 |
+
|
| 29 |
+
def test_getitem(self):
|
| 30 |
+
assert self.av[1] is self.d[1]
|
| 31 |
+
assert self.av[2]["color"] == 1
|
| 32 |
+
pytest.raises(KeyError, self.av.__getitem__, 3)
|
| 33 |
+
|
| 34 |
+
def test_copy(self):
|
| 35 |
+
avcopy = self.av.copy()
|
| 36 |
+
assert avcopy[0] == self.av[0]
|
| 37 |
+
assert avcopy == self.av
|
| 38 |
+
assert avcopy[0] is not self.av[0]
|
| 39 |
+
assert avcopy is not self.av
|
| 40 |
+
avcopy[5] = {}
|
| 41 |
+
assert avcopy != self.av
|
| 42 |
+
|
| 43 |
+
avcopy[0]["ht"] = 4
|
| 44 |
+
assert avcopy[0] != self.av[0]
|
| 45 |
+
self.av[0]["ht"] = 4
|
| 46 |
+
assert avcopy[0] == self.av[0]
|
| 47 |
+
del self.av[0]["ht"]
|
| 48 |
+
|
| 49 |
+
assert not hasattr(self.av, "__setitem__")
|
| 50 |
+
|
| 51 |
+
def test_items(self):
|
| 52 |
+
assert sorted(self.av.items()) == sorted(self.d.items())
|
| 53 |
+
|
| 54 |
+
def test_str(self):
|
| 55 |
+
out = str(self.d)
|
| 56 |
+
assert str(self.av) == out
|
| 57 |
+
|
| 58 |
+
def test_repr(self):
|
| 59 |
+
out = "AtlasView(" + str(self.d) + ")"
|
| 60 |
+
assert repr(self.av) == out
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class TestAdjacencyView:
|
| 64 |
+
# node->nbr->data
|
| 65 |
+
def setup_method(self):
|
| 66 |
+
dd = {"color": "blue", "weight": 1.2}
|
| 67 |
+
self.nd = {0: dd, 1: {}, 2: {"color": 1}}
|
| 68 |
+
self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {"color": 1}}}
|
| 69 |
+
self.adjview = nx.classes.coreviews.AdjacencyView(self.adj)
|
| 70 |
+
|
| 71 |
+
def test_pickle(self):
|
| 72 |
+
view = self.adjview
|
| 73 |
+
pview = pickle.loads(pickle.dumps(view, -1))
|
| 74 |
+
assert view == pview
|
| 75 |
+
assert view.__slots__ == pview.__slots__
|
| 76 |
+
|
| 77 |
+
def test_len(self):
|
| 78 |
+
assert len(self.adjview) == len(self.adj)
|
| 79 |
+
|
| 80 |
+
def test_iter(self):
|
| 81 |
+
assert list(self.adjview) == list(self.adj)
|
| 82 |
+
|
| 83 |
+
def test_getitem(self):
|
| 84 |
+
assert self.adjview[1] is not self.adj[1]
|
| 85 |
+
assert self.adjview[3][0] is self.adjview[0][3]
|
| 86 |
+
assert self.adjview[2][3]["color"] == 1
|
| 87 |
+
pytest.raises(KeyError, self.adjview.__getitem__, 4)
|
| 88 |
+
|
| 89 |
+
def test_copy(self):
|
| 90 |
+
avcopy = self.adjview.copy()
|
| 91 |
+
assert avcopy[0] == self.adjview[0]
|
| 92 |
+
assert avcopy[0] is not self.adjview[0]
|
| 93 |
+
|
| 94 |
+
avcopy[2][3]["ht"] = 4
|
| 95 |
+
assert avcopy[2] != self.adjview[2]
|
| 96 |
+
self.adjview[2][3]["ht"] = 4
|
| 97 |
+
assert avcopy[2] == self.adjview[2]
|
| 98 |
+
del self.adjview[2][3]["ht"]
|
| 99 |
+
|
| 100 |
+
assert not hasattr(self.adjview, "__setitem__")
|
| 101 |
+
|
| 102 |
+
def test_items(self):
|
| 103 |
+
view_items = sorted((n, dict(d)) for n, d in self.adjview.items())
|
| 104 |
+
assert view_items == sorted(self.adj.items())
|
| 105 |
+
|
| 106 |
+
def test_str(self):
|
| 107 |
+
out = str(dict(self.adj))
|
| 108 |
+
assert str(self.adjview) == out
|
| 109 |
+
|
| 110 |
+
def test_repr(self):
|
| 111 |
+
out = self.adjview.__class__.__name__ + "(" + str(self.adj) + ")"
|
| 112 |
+
assert repr(self.adjview) == out
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestMultiAdjacencyView(TestAdjacencyView):
|
| 116 |
+
# node->nbr->key->data
|
| 117 |
+
def setup_method(self):
|
| 118 |
+
dd = {"color": "blue", "weight": 1.2}
|
| 119 |
+
self.kd = {0: dd, 1: {}, 2: {"color": 1}}
|
| 120 |
+
self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {"color": 1}}}
|
| 121 |
+
self.adj = {3: self.nd, 0: {3: {3: dd}}, 1: {}, 2: {3: {8: {}}}}
|
| 122 |
+
self.adjview = nx.classes.coreviews.MultiAdjacencyView(self.adj)
|
| 123 |
+
|
| 124 |
+
def test_getitem(self):
|
| 125 |
+
assert self.adjview[1] is not self.adj[1]
|
| 126 |
+
assert self.adjview[3][0][3] is self.adjview[0][3][3]
|
| 127 |
+
assert self.adjview[3][2][3]["color"] == 1
|
| 128 |
+
pytest.raises(KeyError, self.adjview.__getitem__, 4)
|
| 129 |
+
|
| 130 |
+
def test_copy(self):
|
| 131 |
+
avcopy = self.adjview.copy()
|
| 132 |
+
assert avcopy[0] == self.adjview[0]
|
| 133 |
+
assert avcopy[0] is not self.adjview[0]
|
| 134 |
+
|
| 135 |
+
avcopy[2][3][8]["ht"] = 4
|
| 136 |
+
assert avcopy[2] != self.adjview[2]
|
| 137 |
+
self.adjview[2][3][8]["ht"] = 4
|
| 138 |
+
assert avcopy[2] == self.adjview[2]
|
| 139 |
+
del self.adjview[2][3][8]["ht"]
|
| 140 |
+
|
| 141 |
+
assert not hasattr(self.adjview, "__setitem__")
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class TestUnionAtlas:
|
| 145 |
+
# node->data
|
| 146 |
+
def setup_method(self):
|
| 147 |
+
self.s = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}}
|
| 148 |
+
self.p = {3: {"color": "blue", "weight": 1.2}, 4: {}, 2: {"watch": 2}}
|
| 149 |
+
self.av = nx.classes.coreviews.UnionAtlas(self.s, self.p)
|
| 150 |
+
|
| 151 |
+
def test_pickle(self):
|
| 152 |
+
view = self.av
|
| 153 |
+
pview = pickle.loads(pickle.dumps(view, -1))
|
| 154 |
+
assert view == pview
|
| 155 |
+
assert view.__slots__ == pview.__slots__
|
| 156 |
+
|
| 157 |
+
def test_len(self):
|
| 158 |
+
assert len(self.av) == len(self.s.keys() | self.p.keys()) == 5
|
| 159 |
+
|
| 160 |
+
def test_iter(self):
|
| 161 |
+
assert set(self.av) == set(self.s) | set(self.p)
|
| 162 |
+
|
| 163 |
+
def test_getitem(self):
|
| 164 |
+
assert self.av[0] is self.s[0]
|
| 165 |
+
assert self.av[4] is self.p[4]
|
| 166 |
+
assert self.av[2]["color"] == 1
|
| 167 |
+
pytest.raises(KeyError, self.av[2].__getitem__, "watch")
|
| 168 |
+
pytest.raises(KeyError, self.av.__getitem__, 8)
|
| 169 |
+
|
| 170 |
+
def test_copy(self):
|
| 171 |
+
avcopy = self.av.copy()
|
| 172 |
+
assert avcopy[0] == self.av[0]
|
| 173 |
+
assert avcopy[0] is not self.av[0]
|
| 174 |
+
assert avcopy is not self.av
|
| 175 |
+
avcopy[5] = {}
|
| 176 |
+
assert avcopy != self.av
|
| 177 |
+
|
| 178 |
+
avcopy[0]["ht"] = 4
|
| 179 |
+
assert avcopy[0] != self.av[0]
|
| 180 |
+
self.av[0]["ht"] = 4
|
| 181 |
+
assert avcopy[0] == self.av[0]
|
| 182 |
+
del self.av[0]["ht"]
|
| 183 |
+
|
| 184 |
+
assert not hasattr(self.av, "__setitem__")
|
| 185 |
+
|
| 186 |
+
def test_items(self):
|
| 187 |
+
expected = dict(self.p.items())
|
| 188 |
+
expected.update(self.s)
|
| 189 |
+
assert sorted(self.av.items()) == sorted(expected.items())
|
| 190 |
+
|
| 191 |
+
def test_str(self):
|
| 192 |
+
out = str(dict(self.av))
|
| 193 |
+
assert str(self.av) == out
|
| 194 |
+
|
| 195 |
+
def test_repr(self):
|
| 196 |
+
out = f"{self.av.__class__.__name__}({self.s}, {self.p})"
|
| 197 |
+
assert repr(self.av) == out
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class TestUnionAdjacency:
|
| 201 |
+
# node->nbr->data
|
| 202 |
+
def setup_method(self):
|
| 203 |
+
dd = {"color": "blue", "weight": 1.2}
|
| 204 |
+
self.nd = {0: dd, 1: {}, 2: {"color": 1}}
|
| 205 |
+
self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {"color": 1}}}
|
| 206 |
+
self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {"color": 1}}}
|
| 207 |
+
self.adjview = nx.classes.coreviews.UnionAdjacency(self.s, self.p)
|
| 208 |
+
|
| 209 |
+
def test_pickle(self):
|
| 210 |
+
view = self.adjview
|
| 211 |
+
pview = pickle.loads(pickle.dumps(view, -1))
|
| 212 |
+
assert view == pview
|
| 213 |
+
assert view.__slots__ == pview.__slots__
|
| 214 |
+
|
| 215 |
+
def test_len(self):
|
| 216 |
+
assert len(self.adjview) == len(self.s)
|
| 217 |
+
|
| 218 |
+
def test_iter(self):
|
| 219 |
+
assert sorted(self.adjview) == sorted(self.s)
|
| 220 |
+
|
| 221 |
+
def test_getitem(self):
|
| 222 |
+
assert self.adjview[1] is not self.s[1]
|
| 223 |
+
assert self.adjview[3][0] is self.adjview[0][3]
|
| 224 |
+
assert self.adjview[2][3]["color"] == 1
|
| 225 |
+
pytest.raises(KeyError, self.adjview.__getitem__, 4)
|
| 226 |
+
|
| 227 |
+
def test_copy(self):
|
| 228 |
+
avcopy = self.adjview.copy()
|
| 229 |
+
assert avcopy[0] == self.adjview[0]
|
| 230 |
+
assert avcopy[0] is not self.adjview[0]
|
| 231 |
+
|
| 232 |
+
avcopy[2][3]["ht"] = 4
|
| 233 |
+
assert avcopy[2] != self.adjview[2]
|
| 234 |
+
self.adjview[2][3]["ht"] = 4
|
| 235 |
+
assert avcopy[2] == self.adjview[2]
|
| 236 |
+
del self.adjview[2][3]["ht"]
|
| 237 |
+
|
| 238 |
+
assert not hasattr(self.adjview, "__setitem__")
|
| 239 |
+
|
| 240 |
+
def test_str(self):
|
| 241 |
+
out = str(dict(self.adjview))
|
| 242 |
+
assert str(self.adjview) == out
|
| 243 |
+
|
| 244 |
+
def test_repr(self):
|
| 245 |
+
clsname = self.adjview.__class__.__name__
|
| 246 |
+
out = f"{clsname}({self.s}, {self.p})"
|
| 247 |
+
assert repr(self.adjview) == out
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
class TestUnionMultiInner(TestUnionAdjacency):
|
| 251 |
+
# nbr->key->data
|
| 252 |
+
def setup_method(self):
|
| 253 |
+
dd = {"color": "blue", "weight": 1.2}
|
| 254 |
+
self.kd = {7: {}, "ekey": {}, 9: {"color": 1}}
|
| 255 |
+
self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {"key": {"color": 1}}}
|
| 256 |
+
self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {"span": 2}}}
|
| 257 |
+
self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p)
|
| 258 |
+
|
| 259 |
+
def test_len(self):
|
| 260 |
+
assert len(self.adjview) == len(self.s.keys() | self.p.keys()) == 4
|
| 261 |
+
|
| 262 |
+
def test_getitem(self):
|
| 263 |
+
assert self.adjview[1] is not self.s[1]
|
| 264 |
+
assert self.adjview[0][7] is self.adjview[0][3]
|
| 265 |
+
assert self.adjview[2]["key"]["color"] == 1
|
| 266 |
+
assert self.adjview[2][1]["span"] == 2
|
| 267 |
+
pytest.raises(KeyError, self.adjview.__getitem__, 4)
|
| 268 |
+
pytest.raises(KeyError, self.adjview[1].__getitem__, "key")
|
| 269 |
+
|
| 270 |
+
def test_copy(self):
|
| 271 |
+
avcopy = self.adjview.copy()
|
| 272 |
+
assert avcopy[0] == self.adjview[0]
|
| 273 |
+
assert avcopy[0] is not self.adjview[0]
|
| 274 |
+
|
| 275 |
+
avcopy[2][1]["width"] = 8
|
| 276 |
+
assert avcopy[2] != self.adjview[2]
|
| 277 |
+
self.adjview[2][1]["width"] = 8
|
| 278 |
+
assert avcopy[2] == self.adjview[2]
|
| 279 |
+
del self.adjview[2][1]["width"]
|
| 280 |
+
|
| 281 |
+
assert not hasattr(self.adjview, "__setitem__")
|
| 282 |
+
assert hasattr(avcopy, "__setitem__")
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class TestUnionMultiAdjacency(TestUnionAdjacency):
|
| 286 |
+
# node->nbr->key->data
|
| 287 |
+
def setup_method(self):
|
| 288 |
+
dd = {"color": "blue", "weight": 1.2}
|
| 289 |
+
self.kd = {7: {}, 8: {}, 9: {"color": 1}}
|
| 290 |
+
self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {"color": 1}}}
|
| 291 |
+
self.s = {3: self.nd, 0: {3: {7: dd}}, 1: {}, 2: {3: {8: {}}}}
|
| 292 |
+
self.p = {3: {}, 0: {3: {9: dd}}, 1: {}, 2: {1: {8: {}}}}
|
| 293 |
+
self.adjview = nx.classes.coreviews.UnionMultiAdjacency(self.s, self.p)
|
| 294 |
+
|
| 295 |
+
def test_getitem(self):
|
| 296 |
+
assert self.adjview[1] is not self.s[1]
|
| 297 |
+
assert self.adjview[3][0][9] is self.adjview[0][3][9]
|
| 298 |
+
assert self.adjview[3][2][9]["color"] == 1
|
| 299 |
+
pytest.raises(KeyError, self.adjview.__getitem__, 4)
|
| 300 |
+
|
| 301 |
+
def test_copy(self):
|
| 302 |
+
avcopy = self.adjview.copy()
|
| 303 |
+
assert avcopy[0] == self.adjview[0]
|
| 304 |
+
assert avcopy[0] is not self.adjview[0]
|
| 305 |
+
|
| 306 |
+
avcopy[2][3][8]["ht"] = 4
|
| 307 |
+
assert avcopy[2] != self.adjview[2]
|
| 308 |
+
self.adjview[2][3][8]["ht"] = 4
|
| 309 |
+
assert avcopy[2] == self.adjview[2]
|
| 310 |
+
del self.adjview[2][3][8]["ht"]
|
| 311 |
+
|
| 312 |
+
assert not hasattr(self.adjview, "__setitem__")
|
| 313 |
+
assert hasattr(avcopy, "__setitem__")
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
class TestFilteredGraphs:
|
| 317 |
+
def setup_method(self):
|
| 318 |
+
self.Graphs = [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]
|
| 319 |
+
|
| 320 |
+
def test_hide_show_nodes(self):
|
| 321 |
+
SubGraph = nx.subgraph_view
|
| 322 |
+
for Graph in self.Graphs:
|
| 323 |
+
G = nx.path_graph(4, Graph)
|
| 324 |
+
SG = G.subgraph([2, 3])
|
| 325 |
+
RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1]))
|
| 326 |
+
assert SG.nodes == RG.nodes
|
| 327 |
+
assert SG.edges == RG.edges
|
| 328 |
+
SGC = SG.copy()
|
| 329 |
+
RGC = RG.copy()
|
| 330 |
+
assert SGC.nodes == RGC.nodes
|
| 331 |
+
assert SGC.edges == RGC.edges
|
| 332 |
+
|
| 333 |
+
def test_str_repr(self):
|
| 334 |
+
SubGraph = nx.subgraph_view
|
| 335 |
+
for Graph in self.Graphs:
|
| 336 |
+
G = nx.path_graph(4, Graph)
|
| 337 |
+
SG = G.subgraph([2, 3])
|
| 338 |
+
RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1]))
|
| 339 |
+
str(SG.adj)
|
| 340 |
+
str(RG.adj)
|
| 341 |
+
repr(SG.adj)
|
| 342 |
+
repr(RG.adj)
|
| 343 |
+
str(SG.adj[2])
|
| 344 |
+
str(RG.adj[2])
|
| 345 |
+
repr(SG.adj[2])
|
| 346 |
+
repr(RG.adj[2])
|
| 347 |
+
|
| 348 |
+
def test_copy(self):
|
| 349 |
+
SubGraph = nx.subgraph_view
|
| 350 |
+
for Graph in self.Graphs:
|
| 351 |
+
G = nx.path_graph(4, Graph)
|
| 352 |
+
SG = G.subgraph([2, 3])
|
| 353 |
+
RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1]))
|
| 354 |
+
RsG = SubGraph(G, filter_node=nx.filters.show_nodes([2, 3]))
|
| 355 |
+
assert G.adj.copy() == G.adj
|
| 356 |
+
assert G.adj[2].copy() == G.adj[2]
|
| 357 |
+
assert SG.adj.copy() == SG.adj
|
| 358 |
+
assert SG.adj[2].copy() == SG.adj[2]
|
| 359 |
+
assert RG.adj.copy() == RG.adj
|
| 360 |
+
assert RG.adj[2].copy() == RG.adj[2]
|
| 361 |
+
assert RsG.adj.copy() == RsG.adj
|
| 362 |
+
assert RsG.adj[2].copy() == RsG.adj[2]
|
wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/METADATA
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: nvidia-cublas-cu11
|
| 3 |
+
Version: 11.10.3.66
|
| 4 |
+
Summary: CUBLAS native runtime libraries
|
| 5 |
+
Home-page: https://developer.nvidia.com/cuda-zone
|
| 6 |
+
Author: Nvidia CUDA Installer Team
|
| 7 |
+
Author-email: cuda_installer@nvidia.com
|
| 8 |
+
License: NVIDIA Proprietary Software
|
| 9 |
+
Keywords: cuda,nvidia,runtime,machine learning,deep learning
|
| 10 |
+
Classifier: Development Status :: 4 - Beta
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Science/Research
|
| 14 |
+
Classifier: License :: Other/Proprietary License
|
| 15 |
+
Classifier: Natural Language :: English
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.5
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 25 |
+
Classifier: Topic :: Scientific/Engineering
|
| 26 |
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
| 27 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 28 |
+
Classifier: Topic :: Software Development
|
| 29 |
+
Classifier: Topic :: Software Development :: Libraries
|
| 30 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 31 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 32 |
+
Requires-Python: >=3
|
| 33 |
+
License-File: License.txt
|
| 34 |
+
Requires-Dist: setuptools
|
| 35 |
+
Requires-Dist: wheel
|
| 36 |
+
|
| 37 |
+
CUBLAS native runtime libraries
|
wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/RECORD
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
nvidia/cublas/include/cublas.h,sha256=a0lLqy-k47NuwyDjuueC3W0Mpc908MTU7o5sMJqE-1w,41246
|
| 2 |
+
nvidia/cublas/include/cublasLt.h,sha256=FeRzOTFwOl0TPCMgnF6qH7RB_3gyJk6D18wodbWE2jM,69858
|
| 3 |
+
nvidia/cublas/include/cublasXt.h,sha256=CW9dyXYGSUW1wEXrVVyhU6OxBK1PUvMoYdVGlQT7L9A,37380
|
| 4 |
+
nvidia/cublas/include/cublas_api.h,sha256=NzuoObejh0gJfney-ufSeL8aZKHNF9I-V52bqtMcmHQ,220682
|
| 5 |
+
nvidia/cublas/include/cublas_v2.h,sha256=DrT-TOKePZcfL_ld1ECGv2F30_9KznXxj5WXoABe2v4,8811
|
| 6 |
+
nvidia/cublas/include/nvblas.h,sha256=dXCLR-2oUiJFzLsDtIAK09m42ct4G0HWdYzBUuDPXpc,23341
|
| 7 |
+
nvidia/cublas/lib/libcublas.so.11,sha256=8VRUT5B05j14BHbR6N9mtBAAjGurzUTHGA9rXhGwjRc,151346592
|
| 8 |
+
nvidia/cublas/lib/libcublasLt.so.11,sha256=hfFE3-ey71iNK2iKvz_dUajiahTkGgEcEtQf6K_JoEE,332762424
|
| 9 |
+
nvidia/cublas/lib/libnvblas.so.11,sha256=eX1B5mULurQr_r_5l-tYnOcm7S_P7Y55WHtkcUIyVDA,733032
|
| 10 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 11 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
|
| 12 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/METADATA,sha256=jBjkDirkcXbmo4mvN540Y-WdlX5SWynVAhl7MRzlMd4,1554
|
| 13 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/RECORD,,
|
| 14 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 15 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
|
| 16 |
+
nvidia_cublas_cu11-11.10.3.66.dist-info/top_level.txt,sha256=R64cT8LTfPfNCPStolkkVMRTsUQTajy66oibURM4Loc,14
|
wemm/lib/python3.10/site-packages/nvidia_cublas_cu11-11.10.3.66.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.37.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-manylinux1_x86_64
|
| 5 |
+
|
wemm/lib/python3.10/site-packages/qcloud_cos/__pycache__/version.cpython-310.pyc
ADDED
|
Binary file (183 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/qcloud_cos/ai_recognition.py
ADDED
|
@@ -0,0 +1,1048 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding=utf-8
|
| 2 |
+
import json
|
| 3 |
+
|
| 4 |
+
from qcloud_cos import CosS3Auth
|
| 5 |
+
from qcloud_cos.cos_client import logger, CosS3Client
|
| 6 |
+
from .cos_comm import *
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class AIRecognitionClient(CosS3Client):
|
| 10 |
+
|
| 11 |
+
def cos_create_ai_object_detect_job(self, Bucket, ObjectKey="",
|
| 12 |
+
DetectUrl=None, **kwargs):
|
| 13 |
+
""" 图像主体检测 https://cloud.tencent.com/document/product/460/97979
|
| 14 |
+
|
| 15 |
+
:param Bucket(string) 存储桶名称.
|
| 16 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 17 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey。 detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg。.
|
| 18 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 19 |
+
:return(dict): response header.
|
| 20 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 21 |
+
|
| 22 |
+
.. code-block:: python
|
| 23 |
+
|
| 24 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 25 |
+
client = CosS3Client(config)
|
| 26 |
+
# 图像主体检测
|
| 27 |
+
response, data = client.cos_create_ai_object_detect_job(
|
| 28 |
+
Bucket='bucket',
|
| 29 |
+
ObjectKey='',
|
| 30 |
+
DetectUrl=''
|
| 31 |
+
)
|
| 32 |
+
print data
|
| 33 |
+
print response
|
| 34 |
+
"""
|
| 35 |
+
headers = mapped(kwargs)
|
| 36 |
+
final_headers = {}
|
| 37 |
+
params = {}
|
| 38 |
+
for key in headers:
|
| 39 |
+
if key.startswith("response"):
|
| 40 |
+
params[key] = headers[key]
|
| 41 |
+
else:
|
| 42 |
+
final_headers[key] = headers[key]
|
| 43 |
+
headers = final_headers
|
| 44 |
+
params["ci-process"] = "AIObjectDetect"
|
| 45 |
+
if DetectUrl is not None:
|
| 46 |
+
params["detect-url"] = DetectUrl
|
| 47 |
+
|
| 48 |
+
params = format_values(params)
|
| 49 |
+
|
| 50 |
+
path = "/" + ObjectKey
|
| 51 |
+
url = self._conf.uri(bucket=Bucket, path=path)
|
| 52 |
+
|
| 53 |
+
logger.info(
|
| 54 |
+
"cos_create_ai_object_detect_job result, url=:{url} ,headers=:{headers}, params=:{params}".format(
|
| 55 |
+
url=url,
|
| 56 |
+
headers=headers,
|
| 57 |
+
params=params))
|
| 58 |
+
rt = self.send_request(
|
| 59 |
+
method='GET',
|
| 60 |
+
url=url,
|
| 61 |
+
auth=CosS3Auth(self._conf, path, params=params),
|
| 62 |
+
params=params,
|
| 63 |
+
headers=headers,
|
| 64 |
+
ci_request=False)
|
| 65 |
+
|
| 66 |
+
data = rt.content
|
| 67 |
+
response = dict(**rt.headers)
|
| 68 |
+
if 'Content-Type' in response:
|
| 69 |
+
if response['Content-Type'] == 'application/xml':
|
| 70 |
+
data = xml_to_dict(rt.content)
|
| 71 |
+
format_dict(data, ['Response'])
|
| 72 |
+
elif response['Content-Type'].startswith('application/json'):
|
| 73 |
+
data = rt.json()
|
| 74 |
+
|
| 75 |
+
return response, data
|
| 76 |
+
|
| 77 |
+
def cos_goods_matting(self, Bucket, ObjectKey="", DetectUrl=None,
|
| 78 |
+
CenterLayout=0, PaddingLayout=None, Stream=True, **kwargs):
|
| 79 |
+
""" 商品抠图 https://cloud.tencent.com/document/product/460/79735
|
| 80 |
+
|
| 81 |
+
:param Bucket(string) 存储桶名称.
|
| 82 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 83 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey.
|
| 84 |
+
:param CenterLayout(int) 抠图商品居中显示; 值为1时居中显示,值为0时不作处理,默认为0.
|
| 85 |
+
:param PaddingLayout(string) 将处理后的图片四边进行留白,形式为 padding-layout=<dx>x<dy>,左右两边各进行 dx 像素的留白,上下两边各进行 dy 像素的留白.
|
| 86 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 87 |
+
:return(dict): response header.
|
| 88 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 89 |
+
|
| 90 |
+
.. code-block:: python
|
| 91 |
+
|
| 92 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 93 |
+
client = CosS3Client(config)
|
| 94 |
+
# 商品抠图
|
| 95 |
+
response, data = client.cos_goods_matting(
|
| 96 |
+
Bucket='bucket',
|
| 97 |
+
ObjectKey='',
|
| 98 |
+
DetectUrl=''
|
| 99 |
+
)
|
| 100 |
+
print data
|
| 101 |
+
print response
|
| 102 |
+
"""
|
| 103 |
+
params = {}
|
| 104 |
+
if DetectUrl is not None:
|
| 105 |
+
params["detect-url"] = DetectUrl
|
| 106 |
+
if CenterLayout != 0:
|
| 107 |
+
params["center-layout"] = CenterLayout
|
| 108 |
+
if PaddingLayout is not None:
|
| 109 |
+
params["padding-layout"] = PaddingLayout
|
| 110 |
+
path = "/" + ObjectKey
|
| 111 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 112 |
+
CiProcess="GoodsMatting", Params=params,
|
| 113 |
+
NeedHeader=True, Stream=Stream, **kwargs)
|
| 114 |
+
|
| 115 |
+
def cos_ai_body_recognition(self, Bucket, ObjectKey='', DetectUrl=None,
|
| 116 |
+
**kwargs):
|
| 117 |
+
""" 人体识别 https://cloud.tencent.com/document/product/460/83196
|
| 118 |
+
|
| 119 |
+
:param Bucket(string) 存储桶名称.
|
| 120 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 121 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg.
|
| 122 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 123 |
+
:return(dict): response header.
|
| 124 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 125 |
+
|
| 126 |
+
.. code-block:: python
|
| 127 |
+
|
| 128 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 129 |
+
client = CosS3Client(config)
|
| 130 |
+
# 人体识别
|
| 131 |
+
response, data = client.cos_ai_body_recognition(
|
| 132 |
+
Bucket='bucket',
|
| 133 |
+
ObjectKey='',
|
| 134 |
+
DetectUrl=''
|
| 135 |
+
)
|
| 136 |
+
print data
|
| 137 |
+
print response
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
params = {}
|
| 141 |
+
if DetectUrl is not None:
|
| 142 |
+
params["detect-url"] = DetectUrl
|
| 143 |
+
|
| 144 |
+
path = "/" + ObjectKey
|
| 145 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 146 |
+
CiProcess="AIBodyRecognition", Params=params,
|
| 147 |
+
NeedHeader=True, **kwargs)
|
| 148 |
+
|
| 149 |
+
def cos_ai_detect_face(self, Bucket, ObjectKey, MaxFaceNum=1, **kwargs):
|
| 150 |
+
""" 人脸检测 https://cloud.tencent.com/document/product/460/63223
|
| 151 |
+
|
| 152 |
+
:param Bucket(string) 存储桶名称.
|
| 153 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 154 |
+
:param MaxFaceNum(int) 最多处理的人脸数目。默认值为1(仅检测图片中面积最大的那张人脸),最大���为120。此参数用于控制处理待检测图片中的人脸个数,值越小,处理速度越快。.
|
| 155 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 156 |
+
:return(dict): response header.
|
| 157 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 158 |
+
|
| 159 |
+
.. code-block:: python
|
| 160 |
+
|
| 161 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 162 |
+
client = CosS3Client(config)
|
| 163 |
+
# 人脸检测
|
| 164 |
+
response, data = client.cos_ai_detect_face(
|
| 165 |
+
Bucket='bucket',
|
| 166 |
+
ObjectKey='',
|
| 167 |
+
MaxFaceNum=''
|
| 168 |
+
)
|
| 169 |
+
print data
|
| 170 |
+
print response
|
| 171 |
+
"""
|
| 172 |
+
|
| 173 |
+
params = {}
|
| 174 |
+
params["max-face-num"] = MaxFaceNum
|
| 175 |
+
|
| 176 |
+
path = "/" + ObjectKey
|
| 177 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="DetectFace",
|
| 178 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 179 |
+
|
| 180 |
+
def cos_ai_detect_pet(self, Bucket, ObjectKey, **kwargs):
|
| 181 |
+
""" 宠物识别 https://cloud.tencent.com/document/product/460/95753
|
| 182 |
+
|
| 183 |
+
:param Bucket(string) 存储桶名称.
|
| 184 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 185 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 186 |
+
:return(dict): response header.
|
| 187 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 188 |
+
|
| 189 |
+
.. code-block:: python
|
| 190 |
+
|
| 191 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 192 |
+
client = CosS3Client(config)
|
| 193 |
+
# 宠物识别
|
| 194 |
+
response, data = client.cos_ai_detect_pet(
|
| 195 |
+
Bucket='bucket',
|
| 196 |
+
ObjectKey=''
|
| 197 |
+
)
|
| 198 |
+
print data
|
| 199 |
+
print response
|
| 200 |
+
"""
|
| 201 |
+
|
| 202 |
+
params = {}
|
| 203 |
+
|
| 204 |
+
path = "/" + ObjectKey
|
| 205 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="detect-pet",
|
| 206 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 207 |
+
|
| 208 |
+
def cos_ai_enhance_image(self, Bucket, ObjectKey='', Denoise=3,
|
| 209 |
+
Sharpen=3, DetectUrl=None, IgnoreError=None, Stream=True, **kwargs):
|
| 210 |
+
""" 图像增强 https://cloud.tencent.com/document/product/460/83792
|
| 211 |
+
|
| 212 |
+
:param Bucket(string) 存储桶名称.
|
| 213 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 214 |
+
:param Denoise(int) 去噪强度值,取值范围为 0 - 5 之间的整数,值为 0 时不进行去噪操作,默认值为3。.
|
| 215 |
+
:param Sharpen(int) 锐化强度值,取值范围为 0 - 5 之间的整数,值为 0 时不进行锐化操作,默认值为3。.
|
| 216 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了detect-url 时,后台会处理 detect-url链接,无需再填写 ObjectKey ,detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为 http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg.
|
| 217 |
+
:param IgnoreError(int) .
|
| 218 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 219 |
+
:return(dict): response header.
|
| 220 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 221 |
+
|
| 222 |
+
.. code-block:: python
|
| 223 |
+
|
| 224 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 225 |
+
client = CosS3Client(config)
|
| 226 |
+
# 图像增强
|
| 227 |
+
response, data = client.cos_ai_enhance_image(
|
| 228 |
+
Bucket='bucket',
|
| 229 |
+
ObjectKey='',
|
| 230 |
+
Denoise='',
|
| 231 |
+
Sharpen='',
|
| 232 |
+
DetectUrl='',
|
| 233 |
+
IgnoreError=''
|
| 234 |
+
)
|
| 235 |
+
print data
|
| 236 |
+
print response
|
| 237 |
+
"""
|
| 238 |
+
|
| 239 |
+
params = {}
|
| 240 |
+
if Denoise is not None:
|
| 241 |
+
params["denoise"] = Denoise
|
| 242 |
+
if Sharpen is not None:
|
| 243 |
+
params["sharpen"] = Sharpen
|
| 244 |
+
if DetectUrl is not None:
|
| 245 |
+
params["detect-url"] = DetectUrl
|
| 246 |
+
if IgnoreError is not None:
|
| 247 |
+
params["ignore-error"] = IgnoreError
|
| 248 |
+
|
| 249 |
+
path = "/" + ObjectKey
|
| 250 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 251 |
+
CiProcess="AIEnhanceImage", Params=params,
|
| 252 |
+
NeedHeader=True, Stream=Stream, **kwargs)
|
| 253 |
+
|
| 254 |
+
def cos_ai_face_effect(self, Bucket, Type, ObjectKey="", DetectUrl=None,
|
| 255 |
+
Whitening=30, Smoothing=10, FaceLifting=70, EyeEnlarging=70,
|
| 256 |
+
Gender=None, Age=None, **kwargs):
|
| 257 |
+
""" 人脸特效 https://cloud.tencent.com/document/product/460/47197
|
| 258 |
+
|
| 259 |
+
:param Bucket(string) 存储桶名称.
|
| 260 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 261 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg。.
|
| 262 |
+
:param Type(string) 人脸特效类型,人脸美颜:face-beautify;人脸性别转换:face-gender-transformation;人脸年龄变化:face-age-transformation;人像分割:face-segmentation.
|
| 263 |
+
:param Whitening(int) type为face-beautify时生效,美白程度,取值范围[0,100]。0不美白,100代表最高程度。默认值30.
|
| 264 |
+
:param Smoothing(int) type为face-beautify时生效,磨皮程度,取值范围[0,100]。0不磨皮,100代表最高程度。默认值10.
|
| 265 |
+
:param FaceLifting(int) type为face-beautify时生效,瘦脸程度,取值范围[0,100]。0不瘦脸,100代表最高程度。默认值70.
|
| 266 |
+
:param EyeEnlarging(int) type为face-beautify时生效,大眼程度,取值范围[0,100]。0不大眼,100代表最高程度。默认值70.
|
| 267 |
+
:param Gender(int) type为face-gender-transformation时生效,选择转换方向,0:男变女,1:女变男。无默认值,为必选项。限制:仅对图片中面积最大的人脸进行转换。.
|
| 268 |
+
:param Age(int) type为face-age-transformation时生效,变化到的人脸年龄,[10,80]。无默认值,为必选项。限制:仅对图片中面积最大的人脸进行转换。.
|
| 269 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 270 |
+
:return(dict): response header.
|
| 271 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 272 |
+
|
| 273 |
+
.. code-block:: python
|
| 274 |
+
|
| 275 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 276 |
+
client = CosS3Client(config)
|
| 277 |
+
# 人脸特效
|
| 278 |
+
response, data = client.cos_ai_face_effect(
|
| 279 |
+
Bucket='bucket',
|
| 280 |
+
ObjectKey='',
|
| 281 |
+
DetectUrl='',
|
| 282 |
+
Type='',
|
| 283 |
+
Whitening='',
|
| 284 |
+
Smoothing='',
|
| 285 |
+
FaceLifting='',
|
| 286 |
+
EyeEnlarging='',
|
| 287 |
+
Gender='',
|
| 288 |
+
Age=''
|
| 289 |
+
)
|
| 290 |
+
print data
|
| 291 |
+
print response
|
| 292 |
+
"""
|
| 293 |
+
|
| 294 |
+
params = {}
|
| 295 |
+
params["type"] = Type
|
| 296 |
+
if DetectUrl is not None:
|
| 297 |
+
params["detect-url"] = DetectUrl
|
| 298 |
+
if Whitening is not None:
|
| 299 |
+
params["whitening"] = Whitening
|
| 300 |
+
if Smoothing is not None:
|
| 301 |
+
params["smoothing"] = Smoothing
|
| 302 |
+
if FaceLifting is not None:
|
| 303 |
+
params["faceLifting"] = FaceLifting
|
| 304 |
+
if EyeEnlarging is not None:
|
| 305 |
+
params["eyeEnlarging"] = EyeEnlarging
|
| 306 |
+
if Gender is not None:
|
| 307 |
+
params["gender"] = Gender
|
| 308 |
+
if Age is not None:
|
| 309 |
+
params["age"] = Age
|
| 310 |
+
|
| 311 |
+
path = "/" + ObjectKey
|
| 312 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="face-effect",
|
| 313 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 314 |
+
|
| 315 |
+
def cos_ai_game_rec(self, Bucket, ObjectKey='', DetectUrl=None, **kwargs):
|
| 316 |
+
""" 游戏场景识别 https://cloud.tencent.com/document/product/460/93153
|
| 317 |
+
|
| 318 |
+
:param Bucket(string) 存储桶名称.
|
| 319 |
+
:param ObjectKey(string) 图片地址.
|
| 320 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 对任意公网可访问的图片进行游戏场景识别。不填写 detect-url 时,后台会默认处理 objectkey ;填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 objectkey , detect-url 示例:http://www.example.com/abc.jpg。.
|
| 321 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 322 |
+
:return(dict): response header.
|
| 323 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 324 |
+
|
| 325 |
+
.. code-block:: python
|
| 326 |
+
|
| 327 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 328 |
+
client = CosS3Client(config)
|
| 329 |
+
# 游戏场景识别
|
| 330 |
+
response, data = client.cos_ai_game_rec(
|
| 331 |
+
Bucket='bucket',
|
| 332 |
+
ObjectKey='',
|
| 333 |
+
DetectUrl=''
|
| 334 |
+
)
|
| 335 |
+
print data
|
| 336 |
+
print response
|
| 337 |
+
"""
|
| 338 |
+
|
| 339 |
+
params = {}
|
| 340 |
+
if DetectUrl is not None:
|
| 341 |
+
params["detect-url"] = DetectUrl
|
| 342 |
+
|
| 343 |
+
path = "/" + ObjectKey
|
| 344 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="AIGameRec",
|
| 345 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 346 |
+
|
| 347 |
+
def cos_ai_id_card_ocr(self, Bucket, ObjectKey, CardSide=None, Config=None,
|
| 348 |
+
**kwargs):
|
| 349 |
+
""" 身份证识别 https://cloud.tencent.com/document/product/460/48638
|
| 350 |
+
|
| 351 |
+
:param Bucket(string) 存储桶名称.
|
| 352 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 353 |
+
:param CardSide(string) FRONT:身份证有照片的一面(人像面)BACK:身份证有国徽的一面(国徽面)该参数如果不填,将为您自动判断身份证正反面.
|
| 354 |
+
:param Config(string) 以下可选字段均为 bool 类型,默认 false:CropIdCard,身份证照片裁剪(去掉证件外多余的边缘、自动矫正拍摄角度)CropPortrait,人像照片裁剪(自动抠取身份证头像区域)CopyWarn,复印件告警BorderCheckWarn,边框和框内遮挡告警ReshootWarn,翻拍告警DetectPsWarn,PS 检测告警TempIdWarn,临时身份证告警InvalidDateWarn,身份证有效日期不合法告警Quality,图片质量分数(评价图片的模糊程度)MultiCardDetect,是否开启多卡证检测参数设置方式参考:Config = {"CropIdCard":true,"CropPortrait":true}.
|
| 355 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 356 |
+
:return(dict): response header.
|
| 357 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 358 |
+
|
| 359 |
+
.. code-block:: python
|
| 360 |
+
|
| 361 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 362 |
+
client = CosS3Client(config)
|
| 363 |
+
# 身份证识别
|
| 364 |
+
response, data = client.cos_aiid_card_ocr(
|
| 365 |
+
Bucket='bucket',
|
| 366 |
+
ObjectKey='',
|
| 367 |
+
CardSide='',
|
| 368 |
+
Config=''
|
| 369 |
+
)
|
| 370 |
+
print data
|
| 371 |
+
print response
|
| 372 |
+
"""
|
| 373 |
+
|
| 374 |
+
params = {}
|
| 375 |
+
if CardSide is not None:
|
| 376 |
+
params["CardSide"] = CardSide
|
| 377 |
+
if Config is not None:
|
| 378 |
+
params["Config"] = Config
|
| 379 |
+
|
| 380 |
+
path = "/" + ObjectKey
|
| 381 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="IDCardOCR",
|
| 382 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 383 |
+
|
| 384 |
+
def cos_ai_image_coloring(self, Bucket, ObjectKey="", DetectUrl=None,
|
| 385 |
+
Stream=True, **kwargs):
|
| 386 |
+
""" 图片上色 https://cloud.tencent.com/document/product/460/83794
|
| 387 |
+
|
| 388 |
+
:param Bucket(string) 存储桶名称.
|
| 389 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 390 |
+
:param DetectUrl(string) 待上色图片url,需要进行urlencode,与ObjectKey二选其一,如果同时存在,则默认以ObjectKey为准.
|
| 391 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 392 |
+
:return(dict): response header.
|
| 393 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 394 |
+
|
| 395 |
+
.. code-block:: python
|
| 396 |
+
|
| 397 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 398 |
+
client = CosS3Client(config)
|
| 399 |
+
# 图片上色
|
| 400 |
+
response, data = client.cos_ai_image_coloring(
|
| 401 |
+
Bucket='bucket',
|
| 402 |
+
ObjectKey='',
|
| 403 |
+
DetectUrl=''
|
| 404 |
+
)
|
| 405 |
+
print data
|
| 406 |
+
print response
|
| 407 |
+
"""
|
| 408 |
+
|
| 409 |
+
params = {}
|
| 410 |
+
if DetectUrl is not None:
|
| 411 |
+
params["detect-url"] = DetectUrl
|
| 412 |
+
|
| 413 |
+
path = "/" + ObjectKey
|
| 414 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 415 |
+
CiProcess="AIImageColoring", Params=params,
|
| 416 |
+
Stream=Stream, NeedHeader=True, **kwargs)
|
| 417 |
+
|
| 418 |
+
def cos_ai_image_crop(self, Bucket, Width, Height, ObjectKey="",
|
| 419 |
+
DetectUrl=None, Fixed=0, IgnoreError=None, Stream=True, **kwargs):
|
| 420 |
+
""" 图像智能裁剪 https://cloud.tencent.com/document/product/460/83791
|
| 421 |
+
|
| 422 |
+
:param Bucket(string) 存储桶名称.
|
| 423 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 424 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg.
|
| 425 |
+
:param Width(int) 需要裁剪区域的宽度,与height共同组成所需裁剪的图片宽高比例;输入数字请大于0、小于图片宽度的像素值.
|
| 426 |
+
:param Height(int) 需要裁剪区域的高度,与width共同组成所需裁剪的图片宽高比例;输入数字请大于0、小于图片高度的像素值;width : height建议取值在[1, 2.5]之间,超过这个范围可能会影响效果.
|
| 427 |
+
:param Fixed(int) 是否严格按照 width 和 height 的值进行输出。取值为0时,宽高比例(width : height)会简化为最简分数,即如果width输入10、height输入20,会简化为1:2;取值为1时,输出图片的宽度等于width,高度等于height;默认值为0.
|
| 428 |
+
:param IgnoreError(int) 当此参数为1时,针对文件过大等导致处理失败的场景,会直接返回原图而不报错.
|
| 429 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 430 |
+
:return(dict): response header.
|
| 431 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 432 |
+
|
| 433 |
+
.. code-block:: python
|
| 434 |
+
|
| 435 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 436 |
+
client = CosS3Client(config)
|
| 437 |
+
# 图像智能裁剪
|
| 438 |
+
response, data = client.cos_ai_image_crop(
|
| 439 |
+
Bucket='bucket',
|
| 440 |
+
ObjectKey='',
|
| 441 |
+
DetectUrl='',
|
| 442 |
+
Width='',
|
| 443 |
+
Height='',
|
| 444 |
+
Fixed='',
|
| 445 |
+
IgnoreError=''
|
| 446 |
+
)
|
| 447 |
+
print data
|
| 448 |
+
print response
|
| 449 |
+
"""
|
| 450 |
+
|
| 451 |
+
params = {}
|
| 452 |
+
params["width"] = Width
|
| 453 |
+
params["height"] = Height
|
| 454 |
+
if DetectUrl is not None:
|
| 455 |
+
params["detect-url"] = DetectUrl
|
| 456 |
+
if Fixed is not None:
|
| 457 |
+
params["fixed"] = Fixed
|
| 458 |
+
if IgnoreError is not None:
|
| 459 |
+
params["ignore-error"] = IgnoreError
|
| 460 |
+
|
| 461 |
+
path = "/" + ObjectKey
|
| 462 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="AIImageCrop",
|
| 463 |
+
Params=params, NeedHeader=True, Stream=Stream,
|
| 464 |
+
**kwargs)
|
| 465 |
+
|
| 466 |
+
def cos_ai_license_rec(self, Bucket, CardType, ObjectKey='', DetectUrl=None,
|
| 467 |
+
**kwargs):
|
| 468 |
+
""" 卡证识别 https://cloud.tencent.com/document/product/460/96767
|
| 469 |
+
|
| 470 |
+
:param Bucket(string) 存储桶名称.
|
| 471 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 472 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg.
|
| 473 |
+
:param CardType(string) 卡证识别类型,有效值为IDCard,DriverLicense。<br>IDCard表示身份证;DriverLicense表示驾驶证,默认:DriverLicense.
|
| 474 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 475 |
+
:return(dict): response header.
|
| 476 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 477 |
+
|
| 478 |
+
.. code-block:: python
|
| 479 |
+
|
| 480 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 481 |
+
client = CosS3Client(config)
|
| 482 |
+
# 卡证识别
|
| 483 |
+
response, data = client.cos_ai_license_rec(
|
| 484 |
+
Bucket='bucket',
|
| 485 |
+
ObjectKey='',
|
| 486 |
+
DetectUrl='',
|
| 487 |
+
CardType=''
|
| 488 |
+
)
|
| 489 |
+
print data
|
| 490 |
+
print response
|
| 491 |
+
"""
|
| 492 |
+
|
| 493 |
+
params = {}
|
| 494 |
+
params["ci-process"] = "AILicenseRec"
|
| 495 |
+
params["CardType"] = CardType
|
| 496 |
+
if DetectUrl is not None:
|
| 497 |
+
params["detect-url"] = DetectUrl
|
| 498 |
+
|
| 499 |
+
path = "/" + ObjectKey
|
| 500 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 501 |
+
CiProcess="AILicenseRec", Params=params,
|
| 502 |
+
NeedHeader=True, **kwargs)
|
| 503 |
+
|
| 504 |
+
def cos_ai_pic_matting(self, Bucket, ObjectKey='', DetectUrl=None,
|
| 505 |
+
CenterLayout=0, PaddingLayout=None, Stream=True, **kwargs):
|
| 506 |
+
""" 通用抠图 https://cloud.tencent.com/document/product/460/106750
|
| 507 |
+
|
| 508 |
+
:param Bucket(string) 存储桶名称.
|
| 509 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 510 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey detect-url 示例:http://www.example.com/abc.jpg ,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg。.
|
| 511 |
+
:param CenterLayout(int) 抠图主体居中显示;值为1时居中显示,值为0不做处理,默认为0.
|
| 512 |
+
:param PaddingLayout(string) 将处理后的图片四边进行留白,形式为 padding-layout=<dx>x<dy>,左右两边各进行 dx 像素的留白,上下两边各进行 dy 像素的留白,例如:padding-layout=20x10默认不进行留白操作,dx、dy 最大值为1000像素。.
|
| 513 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 514 |
+
:return(dict): response header.
|
| 515 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 516 |
+
|
| 517 |
+
.. code-block:: python
|
| 518 |
+
|
| 519 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 520 |
+
client = CosS3Client(config)
|
| 521 |
+
# 通用抠图
|
| 522 |
+
response, data = client.cos_ai_pic_matting(
|
| 523 |
+
Bucket='bucket',
|
| 524 |
+
ObjectKey='',
|
| 525 |
+
DetectUrl='',
|
| 526 |
+
CenterLayout='',
|
| 527 |
+
PaddingLayout=''
|
| 528 |
+
)
|
| 529 |
+
print data
|
| 530 |
+
print response
|
| 531 |
+
"""
|
| 532 |
+
|
| 533 |
+
params = {}
|
| 534 |
+
if DetectUrl is not None:
|
| 535 |
+
params["detect-url"] = DetectUrl
|
| 536 |
+
if CenterLayout is not None:
|
| 537 |
+
params["center-layout"] = CenterLayout
|
| 538 |
+
if PaddingLayout is not None:
|
| 539 |
+
params["padding-layout"] = PaddingLayout
|
| 540 |
+
|
| 541 |
+
path = "/" + ObjectKey
|
| 542 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 543 |
+
CiProcess="AIPicMatting", Params=params,
|
| 544 |
+
NeedHeader=True, Stream=Stream, **kwargs)
|
| 545 |
+
|
| 546 |
+
def cos_ai_portrait_matting(self, Bucket, ObjectKey='', DetectUrl=None,
|
| 547 |
+
CenterLayout=0, PaddingLayout=None, Stream=True, **kwargs):
|
| 548 |
+
""" 人像抠图 https://cloud.tencent.com/document/product/460/106751
|
| 549 |
+
|
| 550 |
+
:param Bucket(string) 存储桶名称.
|
| 551 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 552 |
+
:param DetectUrl(string) 您可以通过填写 detect-url 处理任意公网可访问的图片链接。不填写 detect-url 时,后台会默认处理 ObjectKey ,填写了 detect-url 时,后台会处理 detect-url 链接,无需再填写 ObjectKey。 detect-url 示例:http://www.example.com/abc.jpg,需要进行 UrlEncode,处理后为http%25253A%25252F%25252Fwww.example.com%25252Fabc.jpg。.
|
| 553 |
+
:param CenterLayout(int) 抠图主体居中显示;值为1时居中显示,值为0不做处理,默认为0.
|
| 554 |
+
:param PaddingLayout(string) 将处理后的图片四边进行留白,形式为 padding-layout=x,左右两边各进行 dx 像素的留白,上下两边各进行 dy 像素的留白,例如:padding-layout=20x10默认不进行留白操作,dx、dy最大值为1000像素。.
|
| 555 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 556 |
+
:return(dict): response header.
|
| 557 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 558 |
+
|
| 559 |
+
.. code-block:: python
|
| 560 |
+
|
| 561 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 562 |
+
client = CosS3Client(config)
|
| 563 |
+
# 人像抠图
|
| 564 |
+
response, data = client.cos_ai_portrait_matting(
|
| 565 |
+
Bucket='bucket',
|
| 566 |
+
ObjectKey='',
|
| 567 |
+
DetectUrl='',
|
| 568 |
+
CenterLayout='',
|
| 569 |
+
PaddingLayout=''
|
| 570 |
+
)
|
| 571 |
+
print data
|
| 572 |
+
print response
|
| 573 |
+
"""
|
| 574 |
+
|
| 575 |
+
params = {}
|
| 576 |
+
if DetectUrl is not None:
|
| 577 |
+
params["detect-url"] = DetectUrl
|
| 578 |
+
if CenterLayout is not None:
|
| 579 |
+
params["center-layout"] = CenterLayout
|
| 580 |
+
if PaddingLayout is not None:
|
| 581 |
+
params["padding-layout"] = PaddingLayout
|
| 582 |
+
|
| 583 |
+
path = "/" + ObjectKey
|
| 584 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 585 |
+
CiProcess="AIPortraitMatting", Params=params,
|
| 586 |
+
NeedHeader=True, Stream=Stream, **kwargs)
|
| 587 |
+
|
| 588 |
+
def cos_auto_translation_block(self, Bucket, InputText, SourceLang,
|
| 589 |
+
TargetLang, TextDomain='general', TextStyle='sentence', **kwargs):
|
| 590 |
+
""" 实时文字翻译 https://cloud.tencent.com/document/product/460/83547
|
| 591 |
+
|
| 592 |
+
:param Bucket(string) 存储桶名称.
|
| 593 |
+
:param InputText(string) 待翻译的文本.
|
| 594 |
+
:param SourceLang(string) 输入语言,如 "zh".
|
| 595 |
+
:param TargetLang(string) 输出语言,如 "en".
|
| 596 |
+
:param TextDomain(string) 文本所属业务领域,如: "ecommerce", //缺省值为 general.
|
| 597 |
+
:param TextStyle(string) 文本类型,如: "title", //缺省值为 sentence.
|
| 598 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 599 |
+
:return(dict): response header.
|
| 600 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 601 |
+
|
| 602 |
+
.. code-block:: python
|
| 603 |
+
|
| 604 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 605 |
+
client = CosS3Client(config)
|
| 606 |
+
# 实时文字翻译
|
| 607 |
+
response, data = client.cos_auto_translation_block(
|
| 608 |
+
Bucket='bucket',
|
| 609 |
+
InputText='',
|
| 610 |
+
SourceLang='',
|
| 611 |
+
TargetLang='',
|
| 612 |
+
TextDomain='',
|
| 613 |
+
TextStyle=''
|
| 614 |
+
)
|
| 615 |
+
print data
|
| 616 |
+
print response
|
| 617 |
+
"""
|
| 618 |
+
|
| 619 |
+
params = {}
|
| 620 |
+
params["InputText"] = InputText
|
| 621 |
+
params["SourceLang"] = SourceLang
|
| 622 |
+
params["TargetLang"] = TargetLang
|
| 623 |
+
if TextDomain is not None:
|
| 624 |
+
params["TextDomain"] = TextDomain
|
| 625 |
+
if TextStyle is not None:
|
| 626 |
+
params["TextStyle"] = TextStyle
|
| 627 |
+
|
| 628 |
+
path = "/"
|
| 629 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 630 |
+
CiProcess="AutoTranslationBlock", Params=params,
|
| 631 |
+
NeedHeader=True, **kwargs)
|
| 632 |
+
|
| 633 |
+
def cos_get_action_sequence(self, Bucket, **kwargs):
|
| 634 |
+
""" 获取动作顺序 https://cloud.tencent.com/document/product/460/48648
|
| 635 |
+
|
| 636 |
+
:param Bucket(string) 存储桶名称.
|
| 637 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 638 |
+
:return(dict): response header.
|
| 639 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 640 |
+
|
| 641 |
+
.. code-block:: python
|
| 642 |
+
|
| 643 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 644 |
+
client = CosS3Client(config)
|
| 645 |
+
# 获取动作顺序
|
| 646 |
+
response, data = client.cos_get_action_sequence(
|
| 647 |
+
Bucket='bucket'
|
| 648 |
+
)
|
| 649 |
+
print data
|
| 650 |
+
print response
|
| 651 |
+
"""
|
| 652 |
+
|
| 653 |
+
params = {}
|
| 654 |
+
|
| 655 |
+
path = "/"
|
| 656 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 657 |
+
CiProcess="GetActionSequence", Params=params,
|
| 658 |
+
NeedHeader=True, **kwargs)
|
| 659 |
+
|
| 660 |
+
def cos_get_live_code(self, Bucket, **kwargs):
|
| 661 |
+
""" 获取数字验证码 https://cloud.tencent.com/document/product/460/48647
|
| 662 |
+
|
| 663 |
+
:param Bucket(string) 存储桶名称.
|
| 664 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 665 |
+
:return(dict): response header.
|
| 666 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 667 |
+
|
| 668 |
+
.. code-block:: python
|
| 669 |
+
|
| 670 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 671 |
+
client = CosS3Client(config)
|
| 672 |
+
# 获取数字验证码
|
| 673 |
+
response, data = client.cos_get_live_code(
|
| 674 |
+
Bucket='bucket'
|
| 675 |
+
)
|
| 676 |
+
print data
|
| 677 |
+
print response
|
| 678 |
+
"""
|
| 679 |
+
|
| 680 |
+
params = {}
|
| 681 |
+
path = "/"
|
| 682 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="GetLiveCode",
|
| 683 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 684 |
+
|
| 685 |
+
def cos_image_repair(self, Bucket, ObjectKey="", DetectUrl=None,
|
| 686 |
+
MaskPic=None, MaskPoly=None, Stream=True, **kwargs):
|
| 687 |
+
""" 图像修复 https://cloud.tencent.com/document/product/460/79042
|
| 688 |
+
|
| 689 |
+
:param Bucket(string) 存储桶名称.
|
| 690 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 691 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 692 |
+
:return(dict): response header.
|
| 693 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 694 |
+
|
| 695 |
+
.. code-block:: python
|
| 696 |
+
|
| 697 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 698 |
+
client = CosS3Client(config)
|
| 699 |
+
# 图像修复
|
| 700 |
+
response, data = client.cos_image_repair(
|
| 701 |
+
Bucket='bucket',
|
| 702 |
+
ObjectKey=''
|
| 703 |
+
)
|
| 704 |
+
print data
|
| 705 |
+
print response
|
| 706 |
+
"""
|
| 707 |
+
|
| 708 |
+
params = {}
|
| 709 |
+
if DetectUrl is not None:
|
| 710 |
+
params['detect-url'] = DetectUrl
|
| 711 |
+
if MaskPic is not None:
|
| 712 |
+
params['MaskPic'] = MaskPic
|
| 713 |
+
if MaskPoly is not None:
|
| 714 |
+
params['MaskPoly'] = MaskPoly
|
| 715 |
+
path = "/" + ObjectKey
|
| 716 |
+
return self.ci_process(Bucket=Bucket, Key=path, CiProcess="ImageRepair",
|
| 717 |
+
Params=params, NeedHeader=True, Stream=Stream,
|
| 718 |
+
**kwargs)
|
| 719 |
+
|
| 720 |
+
def cos_liveness_recognition(self, Bucket, ObjectKey, IdCard, Name,
|
| 721 |
+
LivenessType, ValidateData=None, BestFrameNum=None, **kwargs):
|
| 722 |
+
""" 活体人脸核身 https://cloud.tencent.com/document/product/460/48641
|
| 723 |
+
|
| 724 |
+
:param Bucket(string) 存储桶名称.
|
| 725 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 726 |
+
:param IdCard(string) 身份证号.
|
| 727 |
+
:param Name(string) 姓名。中文请使用 UTF-8编码.
|
| 728 |
+
:param LivenessType(string) 活体检测类型,取值:LIP/ACTION/SILENTLIP 为数字模式,ACTION 为动作模式,SILENT 为静默模式,三种模式选择一种传入.
|
| 729 |
+
:param ValidateData(string) 数字模式传参:数字验证码(1234),需先调用接口获取数字验证码动作模式传参:传动作顺序(2,1 or 1,2),需先调用接口获取动作顺序静默模式传参:空.
|
| 730 |
+
:param BestFrameNum(int) 需要返回多张最佳截图,取值范围1 - 10,不设置默认返回一张最佳截图.
|
| 731 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 732 |
+
:return(dict): response header.
|
| 733 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 734 |
+
|
| 735 |
+
.. code-block:: python
|
| 736 |
+
|
| 737 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 738 |
+
client = CosS3Client(config)
|
| 739 |
+
# 活体人脸核身
|
| 740 |
+
response, data = client.cos_liveness_recognition(
|
| 741 |
+
Bucket='bucket',
|
| 742 |
+
ObjectKey='',
|
| 743 |
+
CiProcess='',
|
| 744 |
+
IdCard='',
|
| 745 |
+
Name='',
|
| 746 |
+
LivenessType='',
|
| 747 |
+
ValidateData='',
|
| 748 |
+
BestFrameNum=''
|
| 749 |
+
)
|
| 750 |
+
print data
|
| 751 |
+
print response
|
| 752 |
+
"""
|
| 753 |
+
|
| 754 |
+
params = {}
|
| 755 |
+
params["IdCard"] = IdCard
|
| 756 |
+
params["Name"] = Name
|
| 757 |
+
params["LivenessType"] = LivenessType
|
| 758 |
+
if ValidateData is not None:
|
| 759 |
+
params["ValidateData"] = ValidateData
|
| 760 |
+
if BestFrameNum is not None:
|
| 761 |
+
params["BestFrameNum"] = BestFrameNum
|
| 762 |
+
|
| 763 |
+
path = "/" + ObjectKey
|
| 764 |
+
return self.ci_process(Bucket=Bucket, Key=path,
|
| 765 |
+
CiProcess="LivenessRecognition",
|
| 766 |
+
Params=params, NeedHeader=True, **kwargs)
|
| 767 |
+
|
| 768 |
+
def ci_image_search_bucket(self, Bucket, Body, **kwargs):
|
| 769 |
+
""" 开通以图搜图 https://cloud.tencent.com/document/product/460/63899
|
| 770 |
+
|
| 771 |
+
:param Bucket(string) 存储桶名称.
|
| 772 |
+
:param Body:(dict) 开通以图搜图配置信息.
|
| 773 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 774 |
+
:return(dict): response header.
|
| 775 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 776 |
+
|
| 777 |
+
.. code-block:: python
|
| 778 |
+
|
| 779 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 780 |
+
client = CosS3Client(config)
|
| 781 |
+
# 开通以图搜图
|
| 782 |
+
response, data = client.ci_image_search_bucket(
|
| 783 |
+
Bucket='bucket',
|
| 784 |
+
Body={}
|
| 785 |
+
)
|
| 786 |
+
print data
|
| 787 |
+
print response
|
| 788 |
+
"""
|
| 789 |
+
headers = mapped(kwargs)
|
| 790 |
+
final_headers = {}
|
| 791 |
+
params = {}
|
| 792 |
+
for key in headers:
|
| 793 |
+
if key.startswith("response"):
|
| 794 |
+
params[key] = headers[key]
|
| 795 |
+
else:
|
| 796 |
+
final_headers[key] = headers[key]
|
| 797 |
+
headers = final_headers
|
| 798 |
+
|
| 799 |
+
params = format_values(params)
|
| 800 |
+
xml_config = format_xml(data=Body, root='Request')
|
| 801 |
+
path = "/" + "ImageSearchBucket"
|
| 802 |
+
url = self._conf.uri(bucket=Bucket, path=path,
|
| 803 |
+
endpoint=self._conf._endpoint_ci)
|
| 804 |
+
|
| 805 |
+
logger.info(
|
| 806 |
+
"ci_image_search_bucket result, url=:{url} ,headers=:{headers}, params=:{params},xml_config=:{xml_config}".format(
|
| 807 |
+
url=url,
|
| 808 |
+
headers=headers,
|
| 809 |
+
params=params,
|
| 810 |
+
xml_config=xml_config))
|
| 811 |
+
rt = self.send_request(
|
| 812 |
+
method='POST',
|
| 813 |
+
url=url,
|
| 814 |
+
data=xml_config,
|
| 815 |
+
auth=CosS3Auth(self._conf, path, params=params),
|
| 816 |
+
params=params,
|
| 817 |
+
headers=headers,
|
| 818 |
+
ci_request=True)
|
| 819 |
+
|
| 820 |
+
data = rt.content
|
| 821 |
+
response = dict(**rt.headers)
|
| 822 |
+
if 'Content-Type' in response:
|
| 823 |
+
if response[
|
| 824 |
+
'Content-Type'] == 'application/xml' and 'Content-Length' in response and \
|
| 825 |
+
response['Content-Length'] != 0:
|
| 826 |
+
data = xml_to_dict(rt.content)
|
| 827 |
+
format_dict(data, ['Response'])
|
| 828 |
+
elif response['Content-Type'].startswith('application/json'):
|
| 829 |
+
data = rt.json()
|
| 830 |
+
|
| 831 |
+
return response, data
|
| 832 |
+
|
| 833 |
+
def cos_add_image_search(self, Bucket, ObjectKey, Body, **kwargs):
|
| 834 |
+
""" 添加图库图片 https://cloud.tencent.com/document/product/460/63900
|
| 835 |
+
|
| 836 |
+
:param Bucket(string) 存储桶名称.
|
| 837 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 838 |
+
:param Body:(dict) 添加图库图片配置信息.
|
| 839 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 840 |
+
:return(dict): response header.
|
| 841 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 842 |
+
|
| 843 |
+
.. code-block:: python
|
| 844 |
+
|
| 845 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 846 |
+
client = CosS3Client(config)
|
| 847 |
+
# 添加图库图片
|
| 848 |
+
response, data = client.cos_add_image_search(
|
| 849 |
+
Bucket='bucket',
|
| 850 |
+
ObjectKey='',
|
| 851 |
+
Body={}
|
| 852 |
+
)
|
| 853 |
+
print data
|
| 854 |
+
print response
|
| 855 |
+
"""
|
| 856 |
+
headers = mapped(kwargs)
|
| 857 |
+
final_headers = {}
|
| 858 |
+
params = {}
|
| 859 |
+
for key in headers:
|
| 860 |
+
if key.startswith("response"):
|
| 861 |
+
params[key] = headers[key]
|
| 862 |
+
else:
|
| 863 |
+
final_headers[key] = headers[key]
|
| 864 |
+
headers = final_headers
|
| 865 |
+
params["ci-process"] = "ImageSearch"
|
| 866 |
+
params["action"] = "AddImage"
|
| 867 |
+
params = format_values(params)
|
| 868 |
+
|
| 869 |
+
xml_config = format_xml(data=Body, root='Request')
|
| 870 |
+
|
| 871 |
+
path = "/" + ObjectKey
|
| 872 |
+
url = self._conf.uri(bucket=Bucket, path=path)
|
| 873 |
+
|
| 874 |
+
logger.info(
|
| 875 |
+
"cos_add_image_search result, url=:{url} ,headers=:{headers}, params=:{params},xml_config=:{xml_config}".format(
|
| 876 |
+
url=url,
|
| 877 |
+
headers=headers,
|
| 878 |
+
params=params,
|
| 879 |
+
xml_config=xml_config))
|
| 880 |
+
rt = self.send_request(
|
| 881 |
+
method='POST',
|
| 882 |
+
url=url,
|
| 883 |
+
data=xml_config,
|
| 884 |
+
auth=CosS3Auth(self._conf, path, params=params),
|
| 885 |
+
params=params,
|
| 886 |
+
headers=headers,
|
| 887 |
+
ci_request=False)
|
| 888 |
+
|
| 889 |
+
data = rt.content
|
| 890 |
+
response = dict(**rt.headers)
|
| 891 |
+
if 'Content-Type' in response:
|
| 892 |
+
if response[
|
| 893 |
+
'Content-Type'] == 'application/xml' and 'Content-Length' in response and \
|
| 894 |
+
response['Content-Length'] != 0:
|
| 895 |
+
data = xml_to_dict(rt.content)
|
| 896 |
+
format_dict(data, ['Response'])
|
| 897 |
+
elif response['Content-Type'].startswith('application/json'):
|
| 898 |
+
data = rt.json()
|
| 899 |
+
|
| 900 |
+
return response, data
|
| 901 |
+
|
| 902 |
+
def cos_get_search_image(self, Bucket, ObjectKey, MatchThreshold=0,
|
| 903 |
+
Offset=0, Limit=10, Filter=None, **kwargs):
|
| 904 |
+
""" 图片搜索接口 https://cloud.tencent.com/document/product/460/63901
|
| 905 |
+
|
| 906 |
+
:param Bucket(string) 存储桶名称.
|
| 907 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 908 |
+
:param MatchThreshold(int) 出参 Score 中,只有超过 MatchThreshold 值的结果才会返回。默认为0.
|
| 909 |
+
:param Offset(int) 起始序号,默认值为0.
|
| 910 |
+
:param Limit(int) 返回数量,默认值为10,最大值为100.
|
| 911 |
+
:param Filter(string) 针对入库时提交的 Tags 信息进行条件过滤。支持>、>=、<、<=、=、!=,多个条件之间支持 AND 和 OR 进行连接.
|
| 912 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 913 |
+
:return(dict): response header.
|
| 914 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 915 |
+
|
| 916 |
+
.. code-block:: python
|
| 917 |
+
|
| 918 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 919 |
+
client = CosS3Client(config)
|
| 920 |
+
# 图片搜索接口
|
| 921 |
+
response, data = client.cos_get_search_image(
|
| 922 |
+
Bucket='bucket',
|
| 923 |
+
ObjectKey='',
|
| 924 |
+
MatchThreshold='',
|
| 925 |
+
Offset='',
|
| 926 |
+
Limit='',
|
| 927 |
+
Filter=''
|
| 928 |
+
)
|
| 929 |
+
print data
|
| 930 |
+
print response
|
| 931 |
+
"""
|
| 932 |
+
headers = mapped(kwargs)
|
| 933 |
+
final_headers = {}
|
| 934 |
+
params = {}
|
| 935 |
+
for key in headers:
|
| 936 |
+
if key.startswith("response"):
|
| 937 |
+
params[key] = headers[key]
|
| 938 |
+
else:
|
| 939 |
+
final_headers[key] = headers[key]
|
| 940 |
+
headers = final_headers
|
| 941 |
+
params["ci-process"] = "ImageSearch"
|
| 942 |
+
params["action"] = "SearchImage"
|
| 943 |
+
if MatchThreshold is not None:
|
| 944 |
+
params["MatchThreshold"] = MatchThreshold
|
| 945 |
+
if Offset is not None:
|
| 946 |
+
params["Offset"] = Offset
|
| 947 |
+
if Limit is not None:
|
| 948 |
+
params["Limit"] = Limit
|
| 949 |
+
if Filter is not None:
|
| 950 |
+
params["Filter"] = Filter
|
| 951 |
+
|
| 952 |
+
params = format_values(params)
|
| 953 |
+
|
| 954 |
+
path = "/" + ObjectKey
|
| 955 |
+
url = self._conf.uri(bucket=Bucket, path=path)
|
| 956 |
+
|
| 957 |
+
logger.info(
|
| 958 |
+
"cos_get_search_image result, url=:{url} ,headers=:{headers}, params=:{params}".format(
|
| 959 |
+
url=url,
|
| 960 |
+
headers=headers,
|
| 961 |
+
params=params))
|
| 962 |
+
rt = self.send_request(
|
| 963 |
+
method='GET',
|
| 964 |
+
url=url,
|
| 965 |
+
auth=CosS3Auth(self._conf, path, params=params),
|
| 966 |
+
params=params,
|
| 967 |
+
headers=headers,
|
| 968 |
+
ci_request=False)
|
| 969 |
+
|
| 970 |
+
data = rt.content
|
| 971 |
+
response = dict(**rt.headers)
|
| 972 |
+
if 'Content-Type' in response:
|
| 973 |
+
if response[
|
| 974 |
+
'Content-Type'] == 'application/xml' and 'Content-Length' in response and \
|
| 975 |
+
response['Content-Length'] != 0:
|
| 976 |
+
data = xml_to_dict(rt.content)
|
| 977 |
+
format_dict(data, ['Response'])
|
| 978 |
+
elif response['Content-Type'].startswith('application/json'):
|
| 979 |
+
data = rt.json()
|
| 980 |
+
|
| 981 |
+
return response, data
|
| 982 |
+
|
| 983 |
+
def cos_delete_image_search(self, Bucket, ObjectKey, Body, **kwargs):
|
| 984 |
+
""" 删除图库图片 https://cloud.tencent.com/document/product/460/63902
|
| 985 |
+
|
| 986 |
+
:param Bucket(string) 存储桶名称.
|
| 987 |
+
:param ObjectKey(string) 设置 ObjectKey.
|
| 988 |
+
:param Body:(dict) 删除图库图片配置信息.
|
| 989 |
+
:param kwargs:(dict) 设置上传的headers.
|
| 990 |
+
:return(dict): response header.
|
| 991 |
+
:return(dict): 请求成功返回的结果,dict类型.
|
| 992 |
+
|
| 993 |
+
.. code-block:: python
|
| 994 |
+
|
| 995 |
+
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token) # 获取配置对象
|
| 996 |
+
client = CosS3Client(config)
|
| 997 |
+
# 删除图库图片
|
| 998 |
+
response, data = client.cos_delete_image_search(
|
| 999 |
+
Bucket='bucket',
|
| 1000 |
+
ObjectKey='',
|
| 1001 |
+
Body={}
|
| 1002 |
+
)
|
| 1003 |
+
print data
|
| 1004 |
+
print response
|
| 1005 |
+
"""
|
| 1006 |
+
headers = mapped(kwargs)
|
| 1007 |
+
final_headers = {}
|
| 1008 |
+
params = {}
|
| 1009 |
+
for key in headers:
|
| 1010 |
+
if key.startswith("response"):
|
| 1011 |
+
params[key] = headers[key]
|
| 1012 |
+
else:
|
| 1013 |
+
final_headers[key] = headers[key]
|
| 1014 |
+
headers = final_headers
|
| 1015 |
+
|
| 1016 |
+
params["ci-process"] = "ImageSearch"
|
| 1017 |
+
params["action"] = "DeleteImage"
|
| 1018 |
+
params = format_values(params)
|
| 1019 |
+
body = format_xml(data=Body, root='Request')
|
| 1020 |
+
path = "/" + ObjectKey
|
| 1021 |
+
url = self._conf.uri(bucket=Bucket, path=path)
|
| 1022 |
+
|
| 1023 |
+
logger.info(
|
| 1024 |
+
"cos_delete_image_search result, url=:{url} ,headers=:{headers}, params=:{params},body=:{body}".format(
|
| 1025 |
+
url=url,
|
| 1026 |
+
headers=headers,
|
| 1027 |
+
params=params,
|
| 1028 |
+
body=body))
|
| 1029 |
+
rt = self.send_request(
|
| 1030 |
+
method='POST',
|
| 1031 |
+
url=url,
|
| 1032 |
+
data=body,
|
| 1033 |
+
auth=CosS3Auth(self._conf, path, params=params),
|
| 1034 |
+
params=params,
|
| 1035 |
+
headers=headers,
|
| 1036 |
+
ci_request=False)
|
| 1037 |
+
|
| 1038 |
+
data = rt.content
|
| 1039 |
+
response = dict(**rt.headers)
|
| 1040 |
+
if 'Content-Type' in response:
|
| 1041 |
+
if response['Content-Type'] == 'application/xml' and 'Content-Length' in response and \
|
| 1042 |
+
response['Content-Length'] != 0:
|
| 1043 |
+
data = xml_to_dict(rt.content)
|
| 1044 |
+
format_dict(data, ['Response'])
|
| 1045 |
+
elif response['Content-Type'].startswith('application/json'):
|
| 1046 |
+
data = rt.json()
|
| 1047 |
+
|
| 1048 |
+
return response, data
|
wemm/lib/python3.10/site-packages/qcloud_cos/cos_comm.py
ADDED
|
@@ -0,0 +1,594 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding=utf-8
|
| 2 |
+
|
| 3 |
+
from six import text_type, binary_type, string_types
|
| 4 |
+
from six.moves.urllib.parse import quote, unquote, urlparse
|
| 5 |
+
import hashlib
|
| 6 |
+
import base64
|
| 7 |
+
import os
|
| 8 |
+
import io
|
| 9 |
+
import re
|
| 10 |
+
import sys
|
| 11 |
+
import threading
|
| 12 |
+
import xml.dom.minidom
|
| 13 |
+
import xml.etree.ElementTree
|
| 14 |
+
from datetime import datetime
|
| 15 |
+
from xmltodict import unparse
|
| 16 |
+
from .xml2dict import Xml2Dict
|
| 17 |
+
from .cos_exception import CosClientError
|
| 18 |
+
from .cos_exception import CosServiceError
|
| 19 |
+
|
| 20 |
+
SINGLE_UPLOAD_LENGTH = 5 * 1024 * 1024 * 1024 # 单次上传文件最大为5GB
|
| 21 |
+
DEFAULT_CHUNK_SIZE = 1024 * 1024 # 计算MD5值时,文件单次读取的块大小为1MB
|
| 22 |
+
# kwargs中params到http headers的映射
|
| 23 |
+
maplist = {
|
| 24 |
+
'ContentLength': 'Content-Length',
|
| 25 |
+
'ContentMD5': 'Content-MD5',
|
| 26 |
+
'ContentType': 'Content-Type',
|
| 27 |
+
'CacheControl': 'Cache-Control',
|
| 28 |
+
'ContentDisposition': 'Content-Disposition',
|
| 29 |
+
'ContentEncoding': 'Content-Encoding',
|
| 30 |
+
'ContentLanguage': 'Content-Language',
|
| 31 |
+
'Expires': 'Expires',
|
| 32 |
+
'ResponseContentType': 'response-content-type',
|
| 33 |
+
'ResponseContentLanguage': 'response-content-language',
|
| 34 |
+
'ResponseExpires': 'response-expires',
|
| 35 |
+
'ResponseCacheControl': 'response-cache-control',
|
| 36 |
+
'ResponseContentDisposition': 'response-content-disposition',
|
| 37 |
+
'ResponseContentEncoding': 'response-content-encoding',
|
| 38 |
+
'Metadata': 'Metadata',
|
| 39 |
+
'ACL': 'x-cos-acl',
|
| 40 |
+
'GrantFullControl': 'x-cos-grant-full-control',
|
| 41 |
+
'GrantWrite': 'x-cos-grant-write',
|
| 42 |
+
'GrantRead': 'x-cos-grant-read',
|
| 43 |
+
'StorageClass': 'x-cos-storage-class',
|
| 44 |
+
'Range': 'Range',
|
| 45 |
+
'IfMatch': 'If-Match',
|
| 46 |
+
'IfNoneMatch': 'If-None-Match',
|
| 47 |
+
'IfModifiedSince': 'If-Modified-Since',
|
| 48 |
+
'IfUnmodifiedSince': 'If-Unmodified-Since',
|
| 49 |
+
'CopySourceIfMatch': 'x-cos-copy-source-If-Match',
|
| 50 |
+
'CopySourceIfNoneMatch': 'x-cos-copy-source-If-None-Match',
|
| 51 |
+
'CopySourceIfModifiedSince': 'x-cos-copy-source-If-Modified-Since',
|
| 52 |
+
'CopySourceIfUnmodifiedSince': 'x-cos-copy-source-If-Unmodified-Since',
|
| 53 |
+
'VersionId': 'versionId',
|
| 54 |
+
'ServerSideEncryption': 'x-cos-server-side-encryption',
|
| 55 |
+
'SSEKMSKeyId': 'x-cos-server-side-encryption-cos-kms-key-id',
|
| 56 |
+
'SSEKMSContext': 'x-cos-server-side-encryption-context',
|
| 57 |
+
'SSECustomerAlgorithm': 'x-cos-server-side-encryption-customer-algorithm',
|
| 58 |
+
'SSECustomerKey': 'x-cos-server-side-encryption-customer-key',
|
| 59 |
+
'SSECustomerKeyMD5': 'x-cos-server-side-encryption-customer-key-MD5',
|
| 60 |
+
'CopySourceSSECustomerAlgorithm': 'x-cos-copy-source-server-side-encryption-customer-algorithm',
|
| 61 |
+
'CopySourceSSECustomerKey': 'x-cos-copy-source-server-side-encryption-customer-key',
|
| 62 |
+
'CopySourceSSECustomerKeyMD5': 'x-cos-copy-source-server-side-encryption-customer-key-MD5',
|
| 63 |
+
'Referer': 'Referer',
|
| 64 |
+
'PicOperations': 'Pic-Operations',
|
| 65 |
+
'TrafficLimit': 'x-cos-traffic-limit',
|
| 66 |
+
'Accept': 'Accept'
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def to_str(s):
|
| 71 |
+
"""非字符串转换为字符串"""
|
| 72 |
+
if isinstance(s, text_type) or isinstance(s, binary_type):
|
| 73 |
+
return s
|
| 74 |
+
return str(s)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def to_unicode(s):
|
| 78 |
+
"""将字符串转为unicode"""
|
| 79 |
+
if isinstance(s, binary_type):
|
| 80 |
+
try:
|
| 81 |
+
return s.decode('utf-8')
|
| 82 |
+
except UnicodeDecodeError as e:
|
| 83 |
+
raise CosClientError('your bytes strings can not be decoded in utf8, utf8 support only!')
|
| 84 |
+
return s
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def to_bytes(s):
|
| 88 |
+
"""将字符串转为bytes"""
|
| 89 |
+
if isinstance(s, text_type):
|
| 90 |
+
try:
|
| 91 |
+
return s.encode('utf-8')
|
| 92 |
+
except UnicodeEncodeError as e:
|
| 93 |
+
raise CosClientError('your unicode strings can not encoded in utf8, utf8 support only!')
|
| 94 |
+
return s
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def get_raw_md5(data):
|
| 98 |
+
"""计算md5 md5的输入必须为bytes"""
|
| 99 |
+
data = to_bytes(data)
|
| 100 |
+
m2 = hashlib.md5(data)
|
| 101 |
+
etag = '"' + str(m2.hexdigest()) + '"'
|
| 102 |
+
return etag
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_md5(data):
|
| 106 |
+
"""计算 base64 md5 md5的输入必须为bytes"""
|
| 107 |
+
data = to_bytes(data)
|
| 108 |
+
m2 = hashlib.md5(data)
|
| 109 |
+
MD5 = base64.standard_b64encode(m2.digest())
|
| 110 |
+
return MD5
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def get_content_md5(body):
|
| 114 |
+
"""计算任何输入流的md5值"""
|
| 115 |
+
if isinstance(body, text_type) or isinstance(body, binary_type):
|
| 116 |
+
return get_md5(body)
|
| 117 |
+
elif hasattr(body, 'tell') and hasattr(body, 'seek') and hasattr(body, 'read'):
|
| 118 |
+
file_position = body.tell() # 记录文件当前位置
|
| 119 |
+
# avoid OOM
|
| 120 |
+
md5 = hashlib.md5()
|
| 121 |
+
chunk = body.read(DEFAULT_CHUNK_SIZE)
|
| 122 |
+
while chunk:
|
| 123 |
+
md5.update(to_bytes(chunk))
|
| 124 |
+
chunk = body.read(DEFAULT_CHUNK_SIZE)
|
| 125 |
+
md5_str = base64.standard_b64encode(md5.digest())
|
| 126 |
+
try:
|
| 127 |
+
body.seek(file_position) # 恢复初始的文件位置
|
| 128 |
+
except Exception as e:
|
| 129 |
+
raise CosClientError('seek unsupported to calculate md5!')
|
| 130 |
+
return md5_str
|
| 131 |
+
else:
|
| 132 |
+
raise CosClientError('unsupported body type to calculate md5!')
|
| 133 |
+
return None
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def dict_to_xml(data):
|
| 137 |
+
"""V5使用xml格式,将输入的dict转换为xml"""
|
| 138 |
+
doc = xml.dom.minidom.Document()
|
| 139 |
+
root = doc.createElement('CompleteMultipartUpload')
|
| 140 |
+
doc.appendChild(root)
|
| 141 |
+
|
| 142 |
+
if 'Part' not in data:
|
| 143 |
+
raise CosClientError("Invalid Parameter, Part Is Required!")
|
| 144 |
+
|
| 145 |
+
for i in data['Part']:
|
| 146 |
+
nodePart = doc.createElement('Part')
|
| 147 |
+
|
| 148 |
+
if 'PartNumber' not in i:
|
| 149 |
+
raise CosClientError("Invalid Parameter, PartNumber Is Required!")
|
| 150 |
+
|
| 151 |
+
nodeNumber = doc.createElement('PartNumber')
|
| 152 |
+
nodeNumber.appendChild(doc.createTextNode(str(i['PartNumber'])))
|
| 153 |
+
|
| 154 |
+
if 'ETag' not in i:
|
| 155 |
+
raise CosClientError("Invalid Parameter, ETag Is Required!")
|
| 156 |
+
|
| 157 |
+
nodeETag = doc.createElement('ETag')
|
| 158 |
+
nodeETag.appendChild(doc.createTextNode(str(i['ETag'])))
|
| 159 |
+
|
| 160 |
+
nodePart.appendChild(nodeNumber)
|
| 161 |
+
nodePart.appendChild(nodeETag)
|
| 162 |
+
root.appendChild(nodePart)
|
| 163 |
+
return doc.toxml('utf-8')
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def xml_to_dict(data, origin_str="", replace_str=""):
|
| 167 |
+
"""V5使用xml格式,将response中的xml转换为dict"""
|
| 168 |
+
root = xml.etree.ElementTree.fromstring(data)
|
| 169 |
+
xmldict = Xml2Dict(root)
|
| 170 |
+
xmlstr = str(xmldict)
|
| 171 |
+
xmlstr = xmlstr.replace("{http://www.qcloud.com/document/product/436/7751}", "")
|
| 172 |
+
xmlstr = xmlstr.replace("{https://cloud.tencent.com/document/product/436}", "")
|
| 173 |
+
xmlstr = xmlstr.replace("{http://doc.s3.amazonaws.com/2006-03-01}", "")
|
| 174 |
+
xmlstr = xmlstr.replace("{http://s3.amazonaws.com/doc/2006-03-01/}", "")
|
| 175 |
+
xmlstr = xmlstr.replace("{http://www.w3.org/2001/XMLSchema-instance}", "")
|
| 176 |
+
if origin_str:
|
| 177 |
+
xmlstr = xmlstr.replace(origin_str, replace_str)
|
| 178 |
+
xmldict = eval(xmlstr)
|
| 179 |
+
return xmldict
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# def get_id_from_xml(data, name):
|
| 183 |
+
# """解析xml中的特定字段"""
|
| 184 |
+
# tree = xml.dom.minidom.parseString(data)
|
| 185 |
+
# root = tree.documentElement
|
| 186 |
+
# result = root.getElementsByTagName(name)
|
| 187 |
+
# # use childNodes to get a list, if has no child get itself
|
| 188 |
+
# return result[0].childNodes[0].nodeValue
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def mapped(headers):
|
| 192 |
+
"""S3到COS参数的一个映射"""
|
| 193 |
+
_headers = dict()
|
| 194 |
+
for i in headers:
|
| 195 |
+
if i in maplist:
|
| 196 |
+
if i == 'Metadata':
|
| 197 |
+
for meta in headers[i]:
|
| 198 |
+
_headers[meta] = headers[i][meta]
|
| 199 |
+
else:
|
| 200 |
+
_headers[maplist[i]] = headers[i]
|
| 201 |
+
else:
|
| 202 |
+
raise CosClientError('No Parameter Named ' + i + ' Please Check It')
|
| 203 |
+
return _headers
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
# def format_xml(data, root, lst=list(), parent_child=False):
|
| 207 |
+
# """将dict转换为xml, xml_config是一个bytes"""
|
| 208 |
+
# if parent_child:
|
| 209 |
+
# xml_config = dicttoxml(data, item_func=lambda x: x[:-2], custom_root=root, attr_type=False)
|
| 210 |
+
# else:
|
| 211 |
+
# xml_config = dicttoxml(data, item_func=lambda x: x, custom_root=root, attr_type=False)
|
| 212 |
+
# for i in lst:
|
| 213 |
+
# xml_config = xml_config.replace(to_bytes(i + i), to_bytes(i))
|
| 214 |
+
# return xml_config
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def format_xml(data, root):
|
| 218 |
+
"""将dict转换为xml, xml_config是一个bytes"""
|
| 219 |
+
input_dict = {root: data}
|
| 220 |
+
xml_config = unparse(input_dict=input_dict).encode('utf-8')
|
| 221 |
+
return xml_config
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def format_values(data):
|
| 225 |
+
"""格式化headers和params中的values为bytes"""
|
| 226 |
+
for i in data:
|
| 227 |
+
data[i] = to_bytes(data[i])
|
| 228 |
+
return data
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def format_endpoint(endpoint, region, module, EnableOldDomain, EnableInternalDomain):
|
| 232 |
+
# 客户使用全球加速域名时,只会传endpoint不会传region。此时这样endpointCi和region同时为None,就会报错。
|
| 233 |
+
if not endpoint and not region and module == u'cos.':
|
| 234 |
+
raise CosClientError("Region or Endpoint is required not empty!")
|
| 235 |
+
|
| 236 |
+
"""格式化终端域名"""
|
| 237 |
+
if endpoint:
|
| 238 |
+
return to_unicode(endpoint)
|
| 239 |
+
elif region:
|
| 240 |
+
region = format_region(region, module, EnableOldDomain, EnableInternalDomain)
|
| 241 |
+
if EnableOldDomain:
|
| 242 |
+
return u"{region}.myqcloud.com".format(region=region)
|
| 243 |
+
else:
|
| 244 |
+
return u"{region}.tencentcos.cn".format(region=region)
|
| 245 |
+
else:
|
| 246 |
+
return None
|
| 247 |
+
|
| 248 |
+
def switch_hostname(host):
|
| 249 |
+
if not host:
|
| 250 |
+
raise CosClientError("Host is required not empty!")
|
| 251 |
+
|
| 252 |
+
# *.cos.*-*.myqcloud.com
|
| 253 |
+
if re.match(r'^.*\.cos\..*\-.*\.myqcloud\.com$', host):
|
| 254 |
+
host = host[:-len(".myqcloud.com")] + ".tencentcos.cn"
|
| 255 |
+
|
| 256 |
+
return host
|
| 257 |
+
|
| 258 |
+
def switch_hostname_for_url(url):
|
| 259 |
+
if not url:
|
| 260 |
+
raise CosClientError("Url is required not empty!")
|
| 261 |
+
|
| 262 |
+
url_parsed = urlparse(url)
|
| 263 |
+
if url_parsed.hostname is not None:
|
| 264 |
+
host = url_parsed.hostname
|
| 265 |
+
new_host = switch_hostname(host)
|
| 266 |
+
if host != new_host:
|
| 267 |
+
new_url = url.replace(host, new_host)
|
| 268 |
+
return new_url
|
| 269 |
+
|
| 270 |
+
return url
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
def format_region(region, module, EnableOldDomain, EnableInternalDomain):
|
| 274 |
+
"""格式化地域"""
|
| 275 |
+
if not isinstance(region, string_types):
|
| 276 |
+
raise CosClientError("region is not string type")
|
| 277 |
+
if not region:
|
| 278 |
+
raise CosClientError("region is required not empty!")
|
| 279 |
+
region = to_unicode(region)
|
| 280 |
+
if not re.match(r'^[A-Za-z0-9][A-Za-z0-9.\-]*[A-Za-z0-9]$', region):
|
| 281 |
+
raise CosClientError("region format is illegal, only digit, letter and - is allowed!")
|
| 282 |
+
if region.find(module) != -1:
|
| 283 |
+
return region # 传入cos.ap-beijing-1这样显示加上cos.的region
|
| 284 |
+
if region == u'cn-north' or region == u'cn-south' or region == u'cn-east' or region == u'cn-south-2' or region == u'cn-southwest' or region == u'sg':
|
| 285 |
+
return region # 老域名不能加cos.
|
| 286 |
+
# 支持v4域名映射到v5
|
| 287 |
+
|
| 288 |
+
# 转换为内部域名 (只有新域名才支持内部域名)
|
| 289 |
+
if not EnableOldDomain and EnableInternalDomain and module == u'cos.':
|
| 290 |
+
module = u'cos-internal.'
|
| 291 |
+
|
| 292 |
+
if region == u'cossh':
|
| 293 |
+
return module + u'ap-shanghai'
|
| 294 |
+
if region == u'cosgz':
|
| 295 |
+
return module + u'ap-guangzhou'
|
| 296 |
+
if region == 'cosbj':
|
| 297 |
+
return module + u'ap-beijing'
|
| 298 |
+
if region == 'costj':
|
| 299 |
+
return module + u'ap-beijing-1'
|
| 300 |
+
if region == u'coscd':
|
| 301 |
+
return module + u'ap-chengdu'
|
| 302 |
+
if region == u'cossgp':
|
| 303 |
+
return module + u'ap-singapore'
|
| 304 |
+
if region == u'coshk':
|
| 305 |
+
return module + u'ap-hongkong'
|
| 306 |
+
if region == u'cosca':
|
| 307 |
+
return module + u'na-toronto'
|
| 308 |
+
if region == u'cosger':
|
| 309 |
+
return module + u'eu-frankfurt'
|
| 310 |
+
|
| 311 |
+
return module + region # 新域名加上cos.
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def format_bucket(bucket, appid):
|
| 315 |
+
"""兼容新老bucket长短命名,appid为空默认为长命名,appid不为空则认为是短命名"""
|
| 316 |
+
if not isinstance(bucket, string_types):
|
| 317 |
+
raise CosClientError("bucket is not string")
|
| 318 |
+
if not bucket:
|
| 319 |
+
raise CosClientError("bucket is required not empty")
|
| 320 |
+
if not (re.match(r'^[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9]$', bucket) or re.match('^[A-Za-z0-9]$', bucket)):
|
| 321 |
+
raise CosClientError("bucket format is illegal, only digit, letter and - is allowed!")
|
| 322 |
+
# appid为空直接返回bucket
|
| 323 |
+
if not appid:
|
| 324 |
+
return to_unicode(bucket)
|
| 325 |
+
if not isinstance(appid, string_types):
|
| 326 |
+
raise CosClientError("appid is not string")
|
| 327 |
+
bucket = to_unicode(bucket)
|
| 328 |
+
appid = to_unicode(appid)
|
| 329 |
+
# appid不为空,检查是否以-appid结尾
|
| 330 |
+
if bucket.endswith(u"-" + appid):
|
| 331 |
+
return bucket
|
| 332 |
+
return bucket + u"-" + appid
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
def path_simplify_check(path):
|
| 336 |
+
"""将path按照posix路径语义合并后,如果结果为空或'/'则抛异常"""
|
| 337 |
+
path = to_unicode(path)
|
| 338 |
+
stack = list()
|
| 339 |
+
tokens = path.split(u'/')
|
| 340 |
+
for token in tokens:
|
| 341 |
+
if token == u'..':
|
| 342 |
+
if stack:
|
| 343 |
+
stack.pop()
|
| 344 |
+
elif token and token != u'.':
|
| 345 |
+
stack.append(token)
|
| 346 |
+
path = u'/' + u'/'.join(stack)
|
| 347 |
+
if path == u'/':
|
| 348 |
+
raise CosClientError("GetObject Key is invalid")
|
| 349 |
+
|
| 350 |
+
|
| 351 |
+
def format_path(path):
|
| 352 |
+
"""检查path是否合法,格式化path"""
|
| 353 |
+
if not isinstance(path, string_types):
|
| 354 |
+
raise CosClientError("key is not string")
|
| 355 |
+
if not path:
|
| 356 |
+
raise CosClientError("Key is required not empty")
|
| 357 |
+
path = to_unicode(path)
|
| 358 |
+
if path[0] == u'/':
|
| 359 |
+
path = path[1:]
|
| 360 |
+
# 提前对path进行encode
|
| 361 |
+
path = quote(to_bytes(path), b'/-_.~')
|
| 362 |
+
return path
|
| 363 |
+
|
| 364 |
+
|
| 365 |
+
def get_copy_source_info(CopySource, EnableOldDomain, EnableInternalDomain):
|
| 366 |
+
"""获取拷贝源的所有信息"""
|
| 367 |
+
appid = u""
|
| 368 |
+
versionid = u""
|
| 369 |
+
region = u""
|
| 370 |
+
endpoint = u""
|
| 371 |
+
if 'Appid' in CopySource:
|
| 372 |
+
appid = CopySource['Appid']
|
| 373 |
+
if 'Bucket' in CopySource:
|
| 374 |
+
bucket = CopySource['Bucket']
|
| 375 |
+
bucket = format_bucket(bucket, appid)
|
| 376 |
+
else:
|
| 377 |
+
raise CosClientError('CopySource Need Parameter Bucket')
|
| 378 |
+
if 'Region' in CopySource:
|
| 379 |
+
region = CopySource['Region']
|
| 380 |
+
if 'Endpoint' in CopySource:
|
| 381 |
+
endpoint = CopySource['Endpoint']
|
| 382 |
+
endpoint = format_endpoint(endpoint, region, u'cos.', EnableOldDomain, EnableInternalDomain)
|
| 383 |
+
if 'Key' in CopySource:
|
| 384 |
+
path = to_unicode(CopySource['Key'])
|
| 385 |
+
if path and path[0] == '/':
|
| 386 |
+
path = path[1:]
|
| 387 |
+
else:
|
| 388 |
+
raise CosClientError('CopySource Need Parameter Key')
|
| 389 |
+
if 'VersionId' in CopySource:
|
| 390 |
+
versionid = to_unicode(CopySource['VersionId'])
|
| 391 |
+
return bucket, path, endpoint, versionid
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def gen_copy_source_url(CopySource, EnableOldDomain, EnableInternalDomain):
|
| 395 |
+
"""拼接拷贝源url"""
|
| 396 |
+
bucket, path, endpoint, versionid = get_copy_source_info(CopySource, EnableOldDomain, EnableInternalDomain)
|
| 397 |
+
path = format_path(path)
|
| 398 |
+
if versionid != u'':
|
| 399 |
+
path = path + u'?versionId=' + versionid
|
| 400 |
+
url = u"{bucket}.{endpoint}/{path}".format(
|
| 401 |
+
bucket=bucket,
|
| 402 |
+
endpoint=endpoint,
|
| 403 |
+
path=path
|
| 404 |
+
)
|
| 405 |
+
return url
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
def gen_copy_source_range(begin_range, end_range):
|
| 409 |
+
"""拼接bytes=begin-end形式的字符串"""
|
| 410 |
+
range = u"bytes={first}-{end}".format(
|
| 411 |
+
first=to_unicode(begin_range),
|
| 412 |
+
end=to_unicode(end_range)
|
| 413 |
+
)
|
| 414 |
+
return range
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def get_file_like_object_length(data):
|
| 418 |
+
try:
|
| 419 |
+
total_length = os.fstat(data.fileno()).st_size
|
| 420 |
+
except IOError:
|
| 421 |
+
if hasattr(data, '__len__'):
|
| 422 |
+
total_length = len(data)
|
| 423 |
+
else:
|
| 424 |
+
# support BytesIO file-like object
|
| 425 |
+
total_length = len(data.getvalue())
|
| 426 |
+
try:
|
| 427 |
+
current_position = data.tell()
|
| 428 |
+
except IOError:
|
| 429 |
+
current_position = 0
|
| 430 |
+
content_len = total_length - current_position
|
| 431 |
+
return content_len
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
def check_object_content_length(data):
|
| 435 |
+
"""put_object接口和upload_part接口的文件大小不允许超过5G"""
|
| 436 |
+
content_len = 0
|
| 437 |
+
if isinstance(data, text_type) or isinstance(data, binary_type):
|
| 438 |
+
content_len = len(to_bytes(data))
|
| 439 |
+
elif hasattr(data, 'fileno') and hasattr(data, 'tell'):
|
| 440 |
+
content_len = get_file_like_object_length(data)
|
| 441 |
+
else:
|
| 442 |
+
# can not get the content-length, use chunked to upload the file
|
| 443 |
+
pass
|
| 444 |
+
if content_len > SINGLE_UPLOAD_LENGTH:
|
| 445 |
+
raise CosClientError('The object size you upload can not be larger than 5GB in put_object or upload_part')
|
| 446 |
+
return None
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
def format_dict(data, key_lst):
|
| 450 |
+
"""转换返回dict中的可重复字段为list"""
|
| 451 |
+
if not (isinstance(data, dict) and isinstance(key_lst, list)):
|
| 452 |
+
return data
|
| 453 |
+
for key in key_lst:
|
| 454 |
+
# 将dict转为list,保持一致
|
| 455 |
+
if key in data and (isinstance(data[key], dict) or isinstance(data[key], string_types)):
|
| 456 |
+
lst = []
|
| 457 |
+
lst.append(data[key])
|
| 458 |
+
data[key] = lst
|
| 459 |
+
if key in data and data[key] is None:
|
| 460 |
+
lst = []
|
| 461 |
+
data[key] = lst
|
| 462 |
+
return data
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def format_dict_or_list(data, key_lst):
|
| 466 |
+
"""转换返回dict或list中的可重复字段为list"""
|
| 467 |
+
if not ((isinstance(data, list) or isinstance(data, dict)) and isinstance(key_lst, list)):
|
| 468 |
+
return data
|
| 469 |
+
if isinstance(data, dict):
|
| 470 |
+
return format_dict(data, key_lst)
|
| 471 |
+
|
| 472 |
+
for data_item in data:
|
| 473 |
+
format_dict(data_item, key_lst)
|
| 474 |
+
|
| 475 |
+
return data
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
def decode_result(data, key_lst, multi_key_list):
|
| 479 |
+
"""decode结果中的字段"""
|
| 480 |
+
for key in key_lst:
|
| 481 |
+
if key in data and data[key]:
|
| 482 |
+
data[key] = unquote(data[key])
|
| 483 |
+
for multi_key in multi_key_list:
|
| 484 |
+
if multi_key[0] in data:
|
| 485 |
+
for item in data[multi_key[0]]:
|
| 486 |
+
if multi_key[1] in item and item[multi_key[1]]:
|
| 487 |
+
item[multi_key[1]] = unquote(item[multi_key[1]])
|
| 488 |
+
return data
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
def get_date(yy, mm, dd):
|
| 492 |
+
"""获取lifecycle中Date字段"""
|
| 493 |
+
date_str = datetime(yy, mm, dd).isoformat()
|
| 494 |
+
final_date_str = date_str + '+08:00'
|
| 495 |
+
return final_date_str
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
def parse_object_canned_acl(result_acl, rsp_headers):
|
| 499 |
+
"""根据ACL返回的body信息,以及default头部来判断CannedACL"""
|
| 500 |
+
if "x-cos-acl" in rsp_headers and rsp_headers["x-cos-acl"] == "default":
|
| 501 |
+
return "default"
|
| 502 |
+
public_read = {'Grantee': {'Type': 'Group', 'URI': 'http://cam.qcloud.com/groups/global/AllUsers'},
|
| 503 |
+
'Permission': 'READ'}
|
| 504 |
+
if 'AccessControlList' in result_acl and result_acl['AccessControlList'] is not None and 'Grant' in result_acl['AccessControlList']:
|
| 505 |
+
if public_read in result_acl['AccessControlList']['Grant']:
|
| 506 |
+
return "public-read"
|
| 507 |
+
return "private"
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def parse_bucket_canned_acl(result_acl):
|
| 511 |
+
"""根据ACL返回的body信息来判断Bucket CannedACL"""
|
| 512 |
+
public_read = {'Grantee': {'Type': 'Group', 'URI': 'http://cam.qcloud.com/groups/global/AllUsers'},
|
| 513 |
+
'Permission': 'READ'}
|
| 514 |
+
public_write = {'Grantee': {'Type': 'Group', 'URI': 'http://cam.qcloud.com/groups/global/AllUsers'},
|
| 515 |
+
'Permission': 'WRITE'}
|
| 516 |
+
if 'AccessControlList' in result_acl and result_acl['AccessControlList'] is not None and 'Grant' in result_acl['AccessControlList']:
|
| 517 |
+
if public_read in result_acl['AccessControlList']['Grant']:
|
| 518 |
+
if public_write in result_acl['AccessControlList']['Grant']:
|
| 519 |
+
return "public-read-write"
|
| 520 |
+
return "public-read"
|
| 521 |
+
return "private"
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def client_can_retry(file_position, **kwargs):
|
| 525 |
+
"""如果客户端请求中不包含data则可以重试,以及判断包含data的请求是否可以重试"""
|
| 526 |
+
if 'data' not in kwargs:
|
| 527 |
+
return True
|
| 528 |
+
body = kwargs['data']
|
| 529 |
+
if isinstance(body, text_type) or isinstance(body, binary_type):
|
| 530 |
+
return True
|
| 531 |
+
if file_position is not None and hasattr(body, 'tell') and hasattr(body, 'seek') and hasattr(body, 'read'):
|
| 532 |
+
try:
|
| 533 |
+
kwargs['data'].seek(file_position)
|
| 534 |
+
return True
|
| 535 |
+
except Exception as ioe:
|
| 536 |
+
return False
|
| 537 |
+
return False
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
class CiDetectType():
|
| 541 |
+
"""ci内容设备的类型设置,可与操作设多个"""
|
| 542 |
+
PORN = 1
|
| 543 |
+
TERRORIST = 2
|
| 544 |
+
POLITICS = 4
|
| 545 |
+
ADS = 8
|
| 546 |
+
ILLEGAL = 16
|
| 547 |
+
ABUSE = 32
|
| 548 |
+
TEENAGER = 64
|
| 549 |
+
|
| 550 |
+
@staticmethod
|
| 551 |
+
def get_detect_type_str(DetectType):
|
| 552 |
+
"""获取审核的文字描述,这里只支持ci域名的入参,cos的跟这个还不一样"""
|
| 553 |
+
detect_type = ''
|
| 554 |
+
if DetectType & CiDetectType.PORN > 0:
|
| 555 |
+
detect_type += 'Porn'
|
| 556 |
+
if DetectType & CiDetectType.TERRORIST > 0:
|
| 557 |
+
if len(detect_type) > 0:
|
| 558 |
+
detect_type += ','
|
| 559 |
+
detect_type += 'Terrorism'
|
| 560 |
+
if DetectType & CiDetectType.POLITICS > 0:
|
| 561 |
+
if len(detect_type) > 0:
|
| 562 |
+
detect_type += ','
|
| 563 |
+
detect_type += 'Politics'
|
| 564 |
+
if DetectType & CiDetectType.ADS > 0:
|
| 565 |
+
if len(detect_type) > 0:
|
| 566 |
+
detect_type += ','
|
| 567 |
+
detect_type += 'Ads'
|
| 568 |
+
if DetectType & CiDetectType.ILLEGAL > 0:
|
| 569 |
+
if len(detect_type) > 0:
|
| 570 |
+
detect_type += ','
|
| 571 |
+
detect_type += 'Illegal'
|
| 572 |
+
if DetectType & CiDetectType.ABUSE > 0:
|
| 573 |
+
if len(detect_type) > 0:
|
| 574 |
+
detect_type += ','
|
| 575 |
+
detect_type += 'Abuse'
|
| 576 |
+
if DetectType & CiDetectType.TEENAGER > 0:
|
| 577 |
+
if len(detect_type) > 0:
|
| 578 |
+
detect_type += ','
|
| 579 |
+
detect_type += 'Teenager'
|
| 580 |
+
|
| 581 |
+
return detect_type
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
class ProgressCallback():
|
| 585 |
+
def __init__(self, file_size, progress_callback):
|
| 586 |
+
self.__lock = threading.Lock()
|
| 587 |
+
self.__finished_size = 0
|
| 588 |
+
self.__file_size = file_size
|
| 589 |
+
self.__progress_callback = progress_callback
|
| 590 |
+
|
| 591 |
+
def report(self, size):
|
| 592 |
+
with self.__lock:
|
| 593 |
+
self.__finished_size += size
|
| 594 |
+
self.__progress_callback(self.__finished_size, self.__file_size)
|
wemm/lib/python3.10/site-packages/simplejson-3.19.3.dist-info/LICENSE.txt
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
simplejson is dual-licensed software. It is available under the terms
|
| 2 |
+
of the MIT license, or the Academic Free License version 2.1. The full
|
| 3 |
+
text of each license agreement is included below. This code is also
|
| 4 |
+
licensed to the Python Software Foundation (PSF) under a Contributor
|
| 5 |
+
Agreement.
|
| 6 |
+
|
| 7 |
+
MIT License
|
| 8 |
+
===========
|
| 9 |
+
|
| 10 |
+
Copyright (c) 2006 Bob Ippolito
|
| 11 |
+
|
| 12 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 13 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 14 |
+
the Software without restriction, including without limitation the rights to
|
| 15 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 16 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 17 |
+
so, subject to the following conditions:
|
| 18 |
+
|
| 19 |
+
The above copyright notice and this permission notice shall be included in all
|
| 20 |
+
copies or substantial portions of the Software.
|
| 21 |
+
|
| 22 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 23 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 24 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 25 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 26 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 27 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 28 |
+
SOFTWARE.
|
| 29 |
+
|
| 30 |
+
Academic Free License v. 2.1
|
| 31 |
+
============================
|
| 32 |
+
|
| 33 |
+
Copyright (c) 2006 Bob Ippolito. All rights reserved.
|
| 34 |
+
|
| 35 |
+
This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work:
|
| 36 |
+
|
| 37 |
+
Licensed under the Academic Free License version 2.1
|
| 38 |
+
|
| 39 |
+
1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following:
|
| 40 |
+
|
| 41 |
+
a) to reproduce the Original Work in copies;
|
| 42 |
+
|
| 43 |
+
b) to prepare derivative works ("Derivative Works") based upon the Original Work;
|
| 44 |
+
|
| 45 |
+
c) to distribute copies of the Original Work and Derivative Works to the public;
|
| 46 |
+
|
| 47 |
+
d) to perform the Original Work publicly; and
|
| 48 |
+
|
| 49 |
+
e) to display the Original Work publicly.
|
| 50 |
+
|
| 51 |
+
2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works.
|
| 52 |
+
|
| 53 |
+
3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work.
|
| 54 |
+
|
| 55 |
+
4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license.
|
| 56 |
+
|
| 57 |
+
5) This section intentionally omitted.
|
| 58 |
+
|
| 59 |
+
6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work.
|
| 60 |
+
|
| 61 |
+
7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer.
|
| 62 |
+
|
| 63 |
+
8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You.
|
| 64 |
+
|
| 65 |
+
9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions.
|
| 66 |
+
|
| 67 |
+
10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware.
|
| 68 |
+
|
| 69 |
+
11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License.
|
| 70 |
+
|
| 71 |
+
12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License.
|
| 72 |
+
|
| 73 |
+
13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable.
|
| 74 |
+
|
| 75 |
+
14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
| 76 |
+
|
| 77 |
+
15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You.
|
| 78 |
+
|
| 79 |
+
This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner.
|
wemm/lib/python3.10/site-packages/simplejson-3.19.3.dist-info/REQUESTED
ADDED
|
File without changes
|
wemm/lib/python3.10/site-packages/torchgen/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (514 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/__pycache__/code_template.cpython-310.pyc
ADDED
|
Binary file (3.04 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/__pycache__/gen_executorch.cpython-310.pyc
ADDED
|
Binary file (21.6 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/__pycache__/gen_vmap_plumbing.cpython-310.pyc
ADDED
|
Binary file (8.74 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (15.9 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/dispatcher.cpython-310.pyc
ADDED
|
Binary file (2.67 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/functionalization.cpython-310.pyc
ADDED
|
Binary file (3.26 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/structured.cpython-310.pyc
ADDED
|
Binary file (3.66 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/ufunc.cpython-310.pyc
ADDED
|
Binary file (4.58 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/__pycache__/unboxing.cpython-310.pyc
ADDED
|
Binary file (4.34 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/lazy.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from torchgen.api.types import (
|
| 4 |
+
BaseCppType,
|
| 5 |
+
BaseCType,
|
| 6 |
+
boolT,
|
| 7 |
+
CType,
|
| 8 |
+
deviceT,
|
| 9 |
+
doubleT,
|
| 10 |
+
layoutT,
|
| 11 |
+
ListCType,
|
| 12 |
+
longT,
|
| 13 |
+
memoryFormatT,
|
| 14 |
+
NamedCType,
|
| 15 |
+
OptionalCType,
|
| 16 |
+
scalarT,
|
| 17 |
+
scalarTypeT,
|
| 18 |
+
stringT,
|
| 19 |
+
SymIntT,
|
| 20 |
+
VectorCType,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from torchgen.model import (
|
| 24 |
+
Argument,
|
| 25 |
+
BaseTy,
|
| 26 |
+
BaseType,
|
| 27 |
+
FunctionSchema,
|
| 28 |
+
ListType,
|
| 29 |
+
OperatorName,
|
| 30 |
+
OptionalType,
|
| 31 |
+
Return,
|
| 32 |
+
TensorOptionsArguments,
|
| 33 |
+
Type,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
_valueT = None
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# A ValueT is an IR type which represents the computation of a Tensor. In other
|
| 41 |
+
# words, a PyTorch user will do operations on lazy tensors, and each output lazy
|
| 42 |
+
# tensor internally tracks a ValueT representing the IR node that would have
|
| 43 |
+
# actually produced the value of this tensor for real.
|
| 44 |
+
#
|
| 45 |
+
# This is configurable because different lazy tensor backends (LTC vs XLA) will
|
| 46 |
+
# have different IR representations. (Though, arguably, after unification they
|
| 47 |
+
# shouldn't!)
|
| 48 |
+
def getValueT() -> BaseCppType:
|
| 49 |
+
global _valueT
|
| 50 |
+
if not _valueT:
|
| 51 |
+
raise NotImplementedError(
|
| 52 |
+
"The value type needs to be set with setValueT() in run_gen_lazy_tensor()"
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
return _valueT
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def setValueT(val: BaseCppType) -> None:
|
| 59 |
+
global _valueT
|
| 60 |
+
_valueT = val
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# this is a bad hack. I need to refactor the data model to represent each arg in the schema as an object,
|
| 64 |
+
# making it easier to represent special properties of an arg.
|
| 65 |
+
tensorListValueT = BaseCppType("torch::lazy", "Value")
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def process_ir_type(
|
| 69 |
+
typ: Type, properties: "LazyIrProperties", *, symint: bool
|
| 70 |
+
) -> Union[BaseCType, VectorCType, OptionalCType, ListCType]:
|
| 71 |
+
"""
|
| 72 |
+
This function takes a type from NativeFunctions and converts it for use with
|
| 73 |
+
lazy tensor codegen.
|
| 74 |
+
|
| 75 |
+
Type conversion for lazy currently consists of
|
| 76 |
+
(1) changing at::Tensors into lazy::Values
|
| 77 |
+
(2) wrapping everything in a BaseCType
|
| 78 |
+
(3) making cpp-reference types into cpp-value types (e.g. vector instead of IntArrayRef)
|
| 79 |
+
|
| 80 |
+
(1) converts at::Tensors to lazy::Values (which wrap lazy::Nodes, with which Lazy IR represents tensors.)
|
| 81 |
+
There is special handling for Optional[Tensor] or List[Tensor], etc- hence 'tensor-like'
|
| 82 |
+
|
| 83 |
+
This is incomplete- there are assertions in places that it's expected to need to add
|
| 84 |
+
more types as the codegen is used with more operators.
|
| 85 |
+
"""
|
| 86 |
+
if isinstance(typ, BaseType):
|
| 87 |
+
if typ.name == BaseTy.Tensor:
|
| 88 |
+
return BaseCType(getValueT())
|
| 89 |
+
elif typ.name == BaseTy.Scalar:
|
| 90 |
+
if properties.TreatScalarsAsConstants:
|
| 91 |
+
return BaseCType(scalarT)
|
| 92 |
+
# at::scalar has special handling,
|
| 93 |
+
# and is wrapped in an lazy::Value just like at::tensor
|
| 94 |
+
return BaseCType(getValueT())
|
| 95 |
+
elif typ.name == BaseTy.ScalarType:
|
| 96 |
+
return BaseCType(scalarTypeT)
|
| 97 |
+
elif typ.name == BaseTy.int:
|
| 98 |
+
return BaseCType(longT)
|
| 99 |
+
elif typ.name == BaseTy.SymInt:
|
| 100 |
+
if symint:
|
| 101 |
+
return BaseCType(getValueT())
|
| 102 |
+
else:
|
| 103 |
+
return BaseCType(longT)
|
| 104 |
+
elif typ.name == BaseTy.bool:
|
| 105 |
+
return BaseCType(boolT)
|
| 106 |
+
elif typ.name == BaseTy.float:
|
| 107 |
+
return BaseCType(doubleT)
|
| 108 |
+
elif typ.name == BaseTy.str:
|
| 109 |
+
return BaseCType(stringT)
|
| 110 |
+
elif typ.name == BaseTy.Device:
|
| 111 |
+
return BaseCType(deviceT)
|
| 112 |
+
elif typ.name == BaseTy.Layout:
|
| 113 |
+
return BaseCType(layoutT)
|
| 114 |
+
elif typ.name == BaseTy.MemoryFormat:
|
| 115 |
+
return BaseCType(memoryFormatT)
|
| 116 |
+
else:
|
| 117 |
+
raise AssertionError(f"TODO add support for type {repr(typ)}")
|
| 118 |
+
elif isinstance(typ, OptionalType):
|
| 119 |
+
return OptionalCType(process_ir_type(typ.elem, properties, symint=symint))
|
| 120 |
+
elif isinstance(typ, ListType):
|
| 121 |
+
if str(typ.elem) == "Tensor?":
|
| 122 |
+
# TODO(whc) is this actually correct? or should it use a Vector like above
|
| 123 |
+
return ListCType(OptionalCType(BaseCType(getValueT())))
|
| 124 |
+
elif str(typ.elem) == "Tensor":
|
| 125 |
+
# this is a TensorList which comes in from GetTensorList as a Value
|
| 126 |
+
return BaseCType(tensorListValueT)
|
| 127 |
+
elif typ.elem == BaseType(BaseTy.SymInt):
|
| 128 |
+
# TODO: return a value type. The problem here is analogous to
|
| 129 |
+
# the problem with tensorListValueT: if you have SymInt[] you
|
| 130 |
+
# cannot conveniently save the list of Value directly, as nodes
|
| 131 |
+
# expect to save values as a vector for ALL arguments. So you
|
| 132 |
+
# need a separate IR node that represents all of the size nodes
|
| 133 |
+
# assembled into a list. I'm not an LTC dev so I don't want to
|
| 134 |
+
# figure it out right now. Y'all figure it out...
|
| 135 |
+
return VectorCType(BaseCType(longT))
|
| 136 |
+
|
| 137 |
+
else:
|
| 138 |
+
return VectorCType(process_ir_type(typ.elem, properties, symint=symint))
|
| 139 |
+
else:
|
| 140 |
+
raise AssertionError(f"unrecognized type {repr(typ)}")
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# TODO: Determining this based off of CType is bad; this should be computed
|
| 144 |
+
# from Type directly; then the same logic as process_ir_type can be used
|
| 145 |
+
#
|
| 146 |
+
# Invariant: passed typ should be an *owning* CType (e.g., we will report
|
| 147 |
+
# that ArrayRef<Value> is NOT a value type)
|
| 148 |
+
def isValueType(typ: CType, properties: "Optional[LazyIrProperties]" = None) -> bool:
|
| 149 |
+
"""
|
| 150 |
+
Given a type, determine if it is a Value-like type. This is equivalent to
|
| 151 |
+
being Tensor-like, but assumes the type has already been transformed.
|
| 152 |
+
"""
|
| 153 |
+
if isinstance(typ, BaseCType):
|
| 154 |
+
# I am regretting my naming conventions, but now we are wrapping at::scalar in
|
| 155 |
+
# lazy value, while preserving other 'scalar' types as scalars in the IR
|
| 156 |
+
treat_scalars_as_constants = properties and properties.TreatScalarsAsConstants
|
| 157 |
+
return (
|
| 158 |
+
typ.type == getValueT()
|
| 159 |
+
or (typ.type == scalarT and not treat_scalars_as_constants)
|
| 160 |
+
or typ.type == SymIntT
|
| 161 |
+
)
|
| 162 |
+
elif typ == VectorCType(BaseCType(SymIntT)):
|
| 163 |
+
# TODO: report True for this
|
| 164 |
+
return False
|
| 165 |
+
elif isinstance(typ, (OptionalCType, ListCType, VectorCType)):
|
| 166 |
+
return isValueType(typ.elem, properties)
|
| 167 |
+
return False
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def isSymIntType(typ: Type) -> bool:
|
| 171 |
+
return isinstance(typ, BaseType) and typ.name == BaseTy.SymInt
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def isWrappedScalarType(typ: Type) -> bool:
|
| 175 |
+
"""
|
| 176 |
+
Given a type, determine if it is a c10::scalar which we will wrap in a lazy Value.
|
| 177 |
+
Since we literally change the type from scalarT to valueT, information is lost.
|
| 178 |
+
This function helps build a list of wrapped scalars to save that information
|
| 179 |
+
"""
|
| 180 |
+
if isinstance(typ, BaseType):
|
| 181 |
+
# I am regretting my naming conventions, but now we are wrapping at::scalar in
|
| 182 |
+
# lazy value, while preserving other 'scalar' types as scalars in the IR
|
| 183 |
+
return typ.name == BaseTy.Scalar
|
| 184 |
+
elif isinstance(typ, (OptionalType, ListType)):
|
| 185 |
+
return isWrappedScalarType(typ.elem)
|
| 186 |
+
return False
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
# TODO: dedupe with Type.is_generator_like
|
| 190 |
+
def isGeneratorType(typ: Type) -> bool:
|
| 191 |
+
if isinstance(typ, BaseType):
|
| 192 |
+
return typ.name == BaseTy.Generator
|
| 193 |
+
elif isinstance(typ, (OptionalType)):
|
| 194 |
+
return isGeneratorType(typ.elem)
|
| 195 |
+
return False
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
# This class caches a few derived properties computed from an Argument
|
| 199 |
+
# and LazyIrProperties
|
| 200 |
+
class LazyArgument:
|
| 201 |
+
name: str
|
| 202 |
+
orig_type: Type
|
| 203 |
+
lazy_type_: Optional[CType]
|
| 204 |
+
is_wrapped_scalar: bool
|
| 205 |
+
is_generator: bool
|
| 206 |
+
# TODO: this is lies, it is false for symint list
|
| 207 |
+
is_symint_or_list: bool
|
| 208 |
+
|
| 209 |
+
# Whether or not we are treating this as symint or not
|
| 210 |
+
symint: bool
|
| 211 |
+
|
| 212 |
+
# true if this argument is or contains a lazy IR value
|
| 213 |
+
is_lazy_value: bool
|
| 214 |
+
|
| 215 |
+
def __init__(self, arg: Argument, properties: "LazyIrProperties", *, symint: bool):
|
| 216 |
+
self.name = arg.name
|
| 217 |
+
self.orig_type = arg.type
|
| 218 |
+
self.symint = symint
|
| 219 |
+
self.is_optional = isinstance(arg.type, OptionalType)
|
| 220 |
+
self.is_generator = isGeneratorType(arg.type)
|
| 221 |
+
if self.is_generator:
|
| 222 |
+
assert (
|
| 223 |
+
self.is_optional
|
| 224 |
+
), "We expect all generators are optional since currently they are"
|
| 225 |
+
# there is no handling for generators in TorchScript IR (or XLA)
|
| 226 |
+
# so we fall back to eager if the (optional)generator has value, and otherwise
|
| 227 |
+
# its null and safe to exclude from lazy IR
|
| 228 |
+
self.lazy_type_ = None
|
| 229 |
+
else:
|
| 230 |
+
self.lazy_type_ = process_ir_type(arg.type, properties, symint=symint)
|
| 231 |
+
self.is_wrapped_scalar = isWrappedScalarType(arg.type)
|
| 232 |
+
self.is_symint_or_list = symint and (
|
| 233 |
+
isSymIntType(arg.type)
|
| 234 |
+
or (isinstance(arg.type, OptionalType) and isSymIntType(arg.type.elem))
|
| 235 |
+
# TODO: lists of symints are not currently treated as value types
|
| 236 |
+
# or (isinstance(arg.type, ListType) and isSymIntType(arg.type.elem))
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
self.is_lazy_value = not self.is_generator and isValueType(
|
| 240 |
+
self.lazy_type, properties
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
@property
|
| 244 |
+
def lazy_type(self) -> CType:
|
| 245 |
+
assert (
|
| 246 |
+
self.lazy_type_ is not None
|
| 247 |
+
), f"Attempted to access lazy_type for invalid argument {self.name}"
|
| 248 |
+
return self.lazy_type_
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class LazyIrProperties:
|
| 252 |
+
"""Collection of properties for an IR node
|
| 253 |
+
|
| 254 |
+
The property groups are listed below. Each group is mutually
|
| 255 |
+
exclusive, meaning that only one property from each group can be True
|
| 256 |
+
at any one time. The properties can be accessed as if they were normal
|
| 257 |
+
attributes. The mutual exclusivity is automatically handled.
|
| 258 |
+
"""
|
| 259 |
+
|
| 260 |
+
Properties: Tuple[Tuple[str, ...], ...] = (
|
| 261 |
+
(
|
| 262 |
+
"ShapePrecompute", # Assume shape has been precomputed
|
| 263 |
+
"ShapeCompute", # Need to compute the shape on construction
|
| 264 |
+
"ShapeCache", # Utilize the shape cache to defer computation
|
| 265 |
+
),
|
| 266 |
+
(
|
| 267 |
+
"Lower", # Codegen full lower function
|
| 268 |
+
"LowerDeclOnly", # Codegen only lower function declaration
|
| 269 |
+
),
|
| 270 |
+
(
|
| 271 |
+
"CanBeReused", # Codegen full reuse function
|
| 272 |
+
"CanBeReusedDeclOnly", # Codegen only reuse function declaration
|
| 273 |
+
),
|
| 274 |
+
(
|
| 275 |
+
"CreateFn", # Codegen full create function
|
| 276 |
+
"CreateFnDeclOnly", # Codegen only create function declaration
|
| 277 |
+
),
|
| 278 |
+
(
|
| 279 |
+
"TreatScalarsAsConstants", # Treat Scalars as constants instead of handling like values
|
| 280 |
+
),
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
def __init__(self, *default_properties: str):
|
| 284 |
+
properties: Dict[Tuple[str, ...], Optional[str]] = {
|
| 285 |
+
p: None for p in LazyIrProperties.Properties
|
| 286 |
+
}
|
| 287 |
+
self.__dict__["properties"] = properties
|
| 288 |
+
for p in default_properties:
|
| 289 |
+
setattr(self, p, True)
|
| 290 |
+
|
| 291 |
+
def __getattr__(self, key: str) -> Any:
|
| 292 |
+
properties = self.__dict__["properties"]
|
| 293 |
+
for values in LazyIrProperties.Properties:
|
| 294 |
+
if key in values:
|
| 295 |
+
return properties[values] == key
|
| 296 |
+
|
| 297 |
+
return self.__getattribute__(key)
|
| 298 |
+
|
| 299 |
+
def __setattr__(self, key: str, value: Any) -> Any:
|
| 300 |
+
properties = self.__dict__["properties"]
|
| 301 |
+
for values in LazyIrProperties.Properties:
|
| 302 |
+
if key in values:
|
| 303 |
+
properties[values] = key if value else None
|
| 304 |
+
return value
|
| 305 |
+
|
| 306 |
+
raise KeyError(f"Invalid property: {key}")
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
# Inspired by a FunctionSchema object, a LazyIrSchema holds the schema of a Lazy IR node.
|
| 310 |
+
# Unlike a FunctionSchema, it has no round-trippable string form (relating to the YAML),
|
| 311 |
+
# but carries type information from a native FunctionSchema modified for use with IR nodes,
|
| 312 |
+
# and preserving original argument names.
|
| 313 |
+
#
|
| 314 |
+
# TODO: This is not idiomatic with how other torchgen APIs transform on schema.
|
| 315 |
+
class LazyIrSchema:
|
| 316 |
+
# The name of the operator this function schema describes.
|
| 317 |
+
name: "OperatorName"
|
| 318 |
+
|
| 319 |
+
positional_args: Tuple[LazyArgument, ...]
|
| 320 |
+
keyword_args: Tuple[LazyArgument, ...]
|
| 321 |
+
|
| 322 |
+
# TODO: Need to handle collisions with argument names at some point
|
| 323 |
+
returns: Tuple["Return", ...]
|
| 324 |
+
|
| 325 |
+
# if this schema has a Generator arg, list its orig ctype/name but don't
|
| 326 |
+
# build a LazyArgument since lazy IR doesn't support it
|
| 327 |
+
generator_arg: Optional[NamedCType] = None
|
| 328 |
+
|
| 329 |
+
# original function schema
|
| 330 |
+
func: FunctionSchema
|
| 331 |
+
|
| 332 |
+
# Whether or not we are code-genning for SymInt or not
|
| 333 |
+
symint: bool
|
| 334 |
+
|
| 335 |
+
properties: LazyIrProperties = LazyIrProperties(
|
| 336 |
+
# default properties
|
| 337 |
+
"ShapePrecompute",
|
| 338 |
+
"Lower",
|
| 339 |
+
"CanBeReused",
|
| 340 |
+
)
|
| 341 |
+
opkind: Optional[str] = None
|
| 342 |
+
|
| 343 |
+
def __init__(
|
| 344 |
+
self,
|
| 345 |
+
func: FunctionSchema,
|
| 346 |
+
properties: Optional[LazyIrProperties] = None,
|
| 347 |
+
*,
|
| 348 |
+
symint: bool,
|
| 349 |
+
):
|
| 350 |
+
if properties:
|
| 351 |
+
self.properties = properties
|
| 352 |
+
|
| 353 |
+
self.func = func
|
| 354 |
+
self.symint = symint
|
| 355 |
+
positional_args: List[LazyArgument] = []
|
| 356 |
+
for arg_field in ["pre_self_positional", "self_arg", "post_self_positional"]:
|
| 357 |
+
if arg_field == "self_arg" and func.arguments.self_arg is not None:
|
| 358 |
+
arg = getattr(func.arguments, "self_arg").argument
|
| 359 |
+
positional_args.append(
|
| 360 |
+
LazyArgument(arg, self.properties, symint=symint)
|
| 361 |
+
)
|
| 362 |
+
elif getattr(func.arguments, arg_field) is not None:
|
| 363 |
+
positional_args.extend(
|
| 364 |
+
LazyArgument(arg, self.properties, symint=symint)
|
| 365 |
+
for arg in getattr(func.arguments, arg_field)
|
| 366 |
+
)
|
| 367 |
+
self.positional_args = tuple(positional_args)
|
| 368 |
+
|
| 369 |
+
keyword_args: List[LazyArgument] = []
|
| 370 |
+
for arg_field in [
|
| 371 |
+
"pre_tensor_options_kwarg_only",
|
| 372 |
+
"tensor_options",
|
| 373 |
+
"post_tensor_options_kwarg_only",
|
| 374 |
+
"out",
|
| 375 |
+
]:
|
| 376 |
+
curr_args = getattr(func.arguments, arg_field)
|
| 377 |
+
if curr_args is not None:
|
| 378 |
+
if isinstance(curr_args, TensorOptionsArguments):
|
| 379 |
+
curr_args = curr_args.all()
|
| 380 |
+
for arg in curr_args:
|
| 381 |
+
if isGeneratorType(arg.type):
|
| 382 |
+
assert (
|
| 383 |
+
self.generator_arg is None
|
| 384 |
+
), "We expect there is only one generator arg"
|
| 385 |
+
self.generator_arg = NamedCType(arg.name, arg.type)
|
| 386 |
+
keyword_args.extend(
|
| 387 |
+
LazyArgument(arg, self.properties, symint=symint)
|
| 388 |
+
for arg in curr_args
|
| 389 |
+
)
|
| 390 |
+
self.keyword_args = tuple(keyword_args)
|
| 391 |
+
self.name = func.name
|
| 392 |
+
self.returns = func.returns
|
| 393 |
+
|
| 394 |
+
@property
|
| 395 |
+
def node_name(self) -> str:
|
| 396 |
+
"""
|
| 397 |
+
Return camel-case version of op in node.
|
| 398 |
+
|
| 399 |
+
Note: This function also appends any `overload_name` in the operation.
|
| 400 |
+
For example, if the op is `bitwise_and.Tensor`, the returned name
|
| 401 |
+
will be `BitwiseAndTensor`.
|
| 402 |
+
"""
|
| 403 |
+
op_name = f"{self.name.name}_{self.name.overload_name}".lower()
|
| 404 |
+
return "".join(word.capitalize() or "" for word in op_name.split("_"))
|
| 405 |
+
|
| 406 |
+
@property
|
| 407 |
+
def aten_name(self) -> str:
|
| 408 |
+
return str(self.name.name)
|
| 409 |
+
|
| 410 |
+
@property
|
| 411 |
+
def base_name(self) -> str:
|
| 412 |
+
return f"{self.name.name.base}"
|
| 413 |
+
|
| 414 |
+
def filtered_args(
|
| 415 |
+
self,
|
| 416 |
+
positional: bool = True,
|
| 417 |
+
keyword: bool = True,
|
| 418 |
+
values: bool = True,
|
| 419 |
+
scalars: bool = True,
|
| 420 |
+
generator: bool = False,
|
| 421 |
+
) -> List[LazyArgument]:
|
| 422 |
+
# This function maintains the sorted order of arguments but provides different filtered views.
|
| 423 |
+
# Some parts of the code care about kwargs vs args (TS lowerings),
|
| 424 |
+
# other parts care about whether they need to wrap the arg in a lazy value or leave it alone.
|
| 425 |
+
# Generators are special cased, as they are needed for fallback/shape-inference but not supported
|
| 426 |
+
# in TS lowerings and therefore also omitted from lazy IR.
|
| 427 |
+
args: List[LazyArgument] = []
|
| 428 |
+
if positional:
|
| 429 |
+
args.extend(self.positional_args)
|
| 430 |
+
if keyword:
|
| 431 |
+
args.extend(self.keyword_args)
|
| 432 |
+
|
| 433 |
+
if values and scalars and generator:
|
| 434 |
+
return args
|
| 435 |
+
elif values and scalars:
|
| 436 |
+
return [a for a in args if not a.is_generator]
|
| 437 |
+
elif values:
|
| 438 |
+
return [a for a in args if a.is_lazy_value]
|
| 439 |
+
elif scalars:
|
| 440 |
+
return [
|
| 441 |
+
a
|
| 442 |
+
for a in args
|
| 443 |
+
if not a.is_lazy_value and (generator or not a.is_generator)
|
| 444 |
+
]
|
| 445 |
+
|
| 446 |
+
return []
|
| 447 |
+
|
| 448 |
+
@property
|
| 449 |
+
def positional_values(self) -> List[LazyArgument]:
|
| 450 |
+
return self.filtered_args(
|
| 451 |
+
positional=True, keyword=False, values=True, scalars=False
|
| 452 |
+
)
|
| 453 |
+
|
| 454 |
+
@property
|
| 455 |
+
def positional_scalars(self) -> List[LazyArgument]:
|
| 456 |
+
return self.filtered_args(
|
| 457 |
+
positional=True, keyword=False, values=False, scalars=True
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
@property
|
| 461 |
+
def keyword_values(self) -> List[LazyArgument]:
|
| 462 |
+
return self.filtered_args(
|
| 463 |
+
positional=False, keyword=True, values=True, scalars=False
|
| 464 |
+
)
|
| 465 |
+
|
| 466 |
+
@property
|
| 467 |
+
def keyword_scalars(self) -> List[LazyArgument]:
|
| 468 |
+
return self.filtered_args(
|
| 469 |
+
positional=False, keyword=True, values=False, scalars=True
|
| 470 |
+
)
|
wemm/lib/python3.10/site-packages/torchgen/api/types/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (234 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/types/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (5.86 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/api/types/signatures.py
ADDED
|
@@ -0,0 +1,422 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
|
| 3 |
+
from typing import Iterator, List, Optional, Sequence, Set, Tuple, Union
|
| 4 |
+
|
| 5 |
+
from torchgen.model import (
|
| 6 |
+
BackendIndex,
|
| 7 |
+
FunctionSchema,
|
| 8 |
+
NativeFunction,
|
| 9 |
+
NativeFunctionsGroup,
|
| 10 |
+
NativeFunctionsViewGroup,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
from .types_base import Binding, CType, Expr
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@dataclass(frozen=True)
|
| 17 |
+
class CppSignature:
|
| 18 |
+
"""
|
| 19 |
+
A CppSignature represents a single overload in the C++ API. For
|
| 20 |
+
any given function schema, there may be multiple CppSignatures
|
| 21 |
+
corresponding to it, based on how we desugar to C++. See also
|
| 22 |
+
CppSignatureGroup.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
# The schema this signature is derived from
|
| 26 |
+
func: FunctionSchema
|
| 27 |
+
|
| 28 |
+
# Is this a C++ signature for a method, i.e. Tensor::my_op(...)?
|
| 29 |
+
method: bool
|
| 30 |
+
|
| 31 |
+
# Is this a faithful C++ signature (i.e. following the JIT schema) or a convenience API
|
| 32 |
+
# (i.e. with a potential TensorOptions argument and out arguments in the front)
|
| 33 |
+
faithful: bool
|
| 34 |
+
|
| 35 |
+
# Is this a symint C++ signature. For BC reasons, functions that take
|
| 36 |
+
# SymInts still present as int64_t in C++, and the SymInt variant is
|
| 37 |
+
# offered at a different overload name
|
| 38 |
+
symint: bool
|
| 39 |
+
|
| 40 |
+
# The set of C++ arguments which should not have defaults applied to them
|
| 41 |
+
cpp_no_default_args: Set[str]
|
| 42 |
+
|
| 43 |
+
# Is this a fallback C++ binding? Fallback bindings are enabled by
|
| 44 |
+
# manual_cpp_binding: True and are alternate, non-public API that
|
| 45 |
+
# lets manual C++ binding implementors access the binding that would
|
| 46 |
+
# have been automatically generated
|
| 47 |
+
fallback_binding: bool = False
|
| 48 |
+
|
| 49 |
+
# Return the unpacked argument structure of this signature,
|
| 50 |
+
# discarding information about which arguments are semantically
|
| 51 |
+
# related to each other.
|
| 52 |
+
def arguments(self) -> Sequence[Binding]:
|
| 53 |
+
return cpp.arguments(
|
| 54 |
+
self.func.arguments,
|
| 55 |
+
faithful=self.faithful,
|
| 56 |
+
symint=self.symint,
|
| 57 |
+
method=self.method,
|
| 58 |
+
cpp_no_default_args=self.cpp_no_default_args,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
def name(self, *, suppress_symint_suffix: bool = False) -> str:
|
| 62 |
+
n = cpp.name(
|
| 63 |
+
self.func,
|
| 64 |
+
faithful_name_for_out_overloads=self.faithful,
|
| 65 |
+
symint_overload=False if suppress_symint_suffix else self.symint,
|
| 66 |
+
)
|
| 67 |
+
if self.fallback_binding:
|
| 68 |
+
n = f"__dispatch_{n}"
|
| 69 |
+
return n
|
| 70 |
+
|
| 71 |
+
# Render the C++ declaration for this signature
|
| 72 |
+
def decl(
|
| 73 |
+
self,
|
| 74 |
+
*,
|
| 75 |
+
name: Optional[str] = None,
|
| 76 |
+
prefix: str = "",
|
| 77 |
+
is_redispatching_fn: bool = False,
|
| 78 |
+
suppress_symint_suffix: bool = False,
|
| 79 |
+
) -> str:
|
| 80 |
+
returns_type = cpp.returns_type(
|
| 81 |
+
self.func.returns, symint=self.symint
|
| 82 |
+
).cpp_type()
|
| 83 |
+
cpp_args = [a.decl() for a in self.arguments()]
|
| 84 |
+
if is_redispatching_fn:
|
| 85 |
+
cpp_args = ["c10::DispatchKeySet dispatchKeySet"] + cpp_args
|
| 86 |
+
cpp_args_str = ", ".join(cpp_args)
|
| 87 |
+
if name is None:
|
| 88 |
+
name = prefix + self.name(suppress_symint_suffix=suppress_symint_suffix)
|
| 89 |
+
return f"{returns_type} {name}({cpp_args_str})"
|
| 90 |
+
|
| 91 |
+
# Render the C++ definition for this signature, not including
|
| 92 |
+
# the body (with curly braces)
|
| 93 |
+
def defn(
|
| 94 |
+
self,
|
| 95 |
+
*,
|
| 96 |
+
name: Optional[str] = None,
|
| 97 |
+
prefix: str = "",
|
| 98 |
+
is_redispatching_fn: bool = False,
|
| 99 |
+
) -> str:
|
| 100 |
+
returns_type = cpp.returns_type(
|
| 101 |
+
self.func.returns, symint=self.symint
|
| 102 |
+
).cpp_type()
|
| 103 |
+
cpp_args = [a.defn() for a in self.arguments()]
|
| 104 |
+
if is_redispatching_fn:
|
| 105 |
+
cpp_args = ["c10::DispatchKeySet dispatchKeySet"] + cpp_args
|
| 106 |
+
cpp_args_str = ", ".join(cpp_args)
|
| 107 |
+
if name is None:
|
| 108 |
+
name = prefix + self.name()
|
| 109 |
+
return f"{returns_type} {name}({cpp_args_str})"
|
| 110 |
+
|
| 111 |
+
def ptr_type(self) -> str:
|
| 112 |
+
args_types_str = ", ".join(a.type for a in self.arguments())
|
| 113 |
+
return f"{cpp.returns_type(self.func.returns, symint=self.symint).cpp_type()} (*)({args_types_str})"
|
| 114 |
+
|
| 115 |
+
# Return the C++ function type, e.g., something like int(bool)
|
| 116 |
+
def type(self) -> str:
|
| 117 |
+
args_types_str = ", ".join(a.type for a in self.arguments())
|
| 118 |
+
return f"{cpp.returns_type(self.func.returns, symint=self.symint).cpp_type()} ({args_types_str})"
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
# Represents group of all CppSignatures associated with a
|
| 122 |
+
# FunctionSchema. Right now, that's the regular, user-visible
|
| 123 |
+
# signature, as well as a "faithful" signature which doesn't
|
| 124 |
+
# have grouping.
|
| 125 |
+
@dataclass(frozen=True)
|
| 126 |
+
class CppSignatureGroup:
|
| 127 |
+
func: FunctionSchema
|
| 128 |
+
signature: CppSignature
|
| 129 |
+
faithful_signature: Optional[CppSignature]
|
| 130 |
+
symint_signature: Optional[CppSignature]
|
| 131 |
+
symint_faithful_signature: Optional[CppSignature]
|
| 132 |
+
|
| 133 |
+
def most_faithful_signature(self) -> CppSignature:
|
| 134 |
+
if self.faithful_signature:
|
| 135 |
+
return self.faithful_signature
|
| 136 |
+
else:
|
| 137 |
+
return self.signature
|
| 138 |
+
|
| 139 |
+
def signatures(self, *, symint: bool = True) -> Iterator[CppSignature]:
|
| 140 |
+
yield self.signature
|
| 141 |
+
if self.faithful_signature:
|
| 142 |
+
yield self.faithful_signature
|
| 143 |
+
if symint:
|
| 144 |
+
if self.symint_signature:
|
| 145 |
+
yield self.symint_signature
|
| 146 |
+
if self.symint_faithful_signature:
|
| 147 |
+
yield self.symint_faithful_signature
|
| 148 |
+
|
| 149 |
+
@staticmethod
|
| 150 |
+
def from_native_function(
|
| 151 |
+
f: NativeFunction, *, method: bool, fallback_binding: bool = False
|
| 152 |
+
) -> "CppSignatureGroup":
|
| 153 |
+
func = f.func
|
| 154 |
+
|
| 155 |
+
def make_sig(*, faithful: bool, symint: bool) -> CppSignature:
|
| 156 |
+
return CppSignature(
|
| 157 |
+
func=func,
|
| 158 |
+
faithful=faithful,
|
| 159 |
+
symint=symint,
|
| 160 |
+
method=method,
|
| 161 |
+
fallback_binding=fallback_binding,
|
| 162 |
+
cpp_no_default_args=f.cpp_no_default_args,
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
def make_sigs(*, symint: bool) -> Tuple[CppSignature, Optional[CppSignature]]:
|
| 166 |
+
faithful_signature: Optional[CppSignature] = None
|
| 167 |
+
if func.arguments.tensor_options is not None or len(func.arguments.out) > 0:
|
| 168 |
+
faithful_signature = make_sig(faithful=True, symint=symint)
|
| 169 |
+
signature = make_sig(faithful=False, symint=symint)
|
| 170 |
+
return signature, faithful_signature
|
| 171 |
+
|
| 172 |
+
signature, faithful_signature = make_sigs(symint=False)
|
| 173 |
+
symint_signature: Optional[CppSignature] = None
|
| 174 |
+
symint_faithful_signature: Optional[CppSignature] = None
|
| 175 |
+
if func.has_symint():
|
| 176 |
+
symint_signature, symint_faithful_signature = make_sigs(symint=True)
|
| 177 |
+
|
| 178 |
+
return CppSignatureGroup(
|
| 179 |
+
func=func,
|
| 180 |
+
signature=signature,
|
| 181 |
+
faithful_signature=faithful_signature,
|
| 182 |
+
symint_signature=symint_signature,
|
| 183 |
+
symint_faithful_signature=symint_faithful_signature,
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
@dataclass(frozen=True)
|
| 188 |
+
class DispatcherSignature:
|
| 189 |
+
# The schema this signature is derived from
|
| 190 |
+
func: FunctionSchema
|
| 191 |
+
|
| 192 |
+
# Allows you to prepend an arbitrary prefix to the signature name.
|
| 193 |
+
# This is useful for parts of the codegen that generate wrappers around kernels,
|
| 194 |
+
# and need to avoid naming collisions.
|
| 195 |
+
prefix: str = ""
|
| 196 |
+
|
| 197 |
+
symint: bool = True
|
| 198 |
+
|
| 199 |
+
def arguments(self) -> List[Binding]:
|
| 200 |
+
return dispatcher.arguments(self.func, symint=self.symint)
|
| 201 |
+
|
| 202 |
+
def name(self) -> str:
|
| 203 |
+
return self.prefix + dispatcher.name(self.func)
|
| 204 |
+
|
| 205 |
+
def decl(self, name: Optional[str] = None) -> str:
|
| 206 |
+
args_str = ", ".join(a.decl() for a in self.arguments())
|
| 207 |
+
if name is None:
|
| 208 |
+
name = self.name()
|
| 209 |
+
return f"{self.returns_type().cpp_type()} {name}({args_str})"
|
| 210 |
+
|
| 211 |
+
def defn(
|
| 212 |
+
self, name: Optional[str] = None, *, is_redispatching_fn: bool = False
|
| 213 |
+
) -> str:
|
| 214 |
+
args = [a.defn() for a in self.arguments()]
|
| 215 |
+
if is_redispatching_fn:
|
| 216 |
+
args = ["c10::DispatchKeySet dispatchKeySet"] + args
|
| 217 |
+
args_str = ", ".join(args)
|
| 218 |
+
if name is None:
|
| 219 |
+
name = self.name()
|
| 220 |
+
return f"{self.returns_type().cpp_type()} {name}({args_str})"
|
| 221 |
+
|
| 222 |
+
def exprs(self) -> List[Expr]:
|
| 223 |
+
return [Expr(a.name, a.nctype) for a in self.arguments()]
|
| 224 |
+
|
| 225 |
+
def returns_type(self) -> CType:
|
| 226 |
+
return dispatcher.returns_type(self.func.returns, symint=self.symint)
|
| 227 |
+
|
| 228 |
+
def ptr_type(self) -> str:
|
| 229 |
+
dispatcher_args_types_str = ", ".join(a.type for a in self.arguments())
|
| 230 |
+
return f"{self.returns_type().cpp_type()} (*)({dispatcher_args_types_str})"
|
| 231 |
+
|
| 232 |
+
# Return the C++ function type, e.g., something like int(bool)
|
| 233 |
+
def type(self) -> str:
|
| 234 |
+
dispatcher_args_types_str = ", ".join(a.type for a in self.arguments())
|
| 235 |
+
return f"{self.returns_type().cpp_type()} ({dispatcher_args_types_str})"
|
| 236 |
+
|
| 237 |
+
@staticmethod
|
| 238 |
+
def from_schema(
|
| 239 |
+
func: FunctionSchema, *, prefix: str = "", symint: bool = True
|
| 240 |
+
) -> "DispatcherSignature":
|
| 241 |
+
return DispatcherSignature(func, prefix, symint)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
@dataclass(frozen=True)
|
| 245 |
+
class NativeSignature:
|
| 246 |
+
# The schema this signature is derived from
|
| 247 |
+
func: FunctionSchema
|
| 248 |
+
|
| 249 |
+
symint: bool
|
| 250 |
+
|
| 251 |
+
prefix: str = ""
|
| 252 |
+
|
| 253 |
+
def name(self) -> str:
|
| 254 |
+
return self.prefix + native.name(self.func)
|
| 255 |
+
|
| 256 |
+
def decl(self, name: Optional[str] = None) -> str:
|
| 257 |
+
args_str = ", ".join(a.decl() for a in self.arguments())
|
| 258 |
+
if name is None:
|
| 259 |
+
name = self.name()
|
| 260 |
+
return f"{native.returns_type(self.func.returns, symint=self.symint).cpp_type()} {name}({args_str})"
|
| 261 |
+
|
| 262 |
+
def defn(self, name: Optional[str] = None) -> str:
|
| 263 |
+
args_str = ", ".join(a.defn() for a in self.arguments())
|
| 264 |
+
if name is None:
|
| 265 |
+
name = self.name()
|
| 266 |
+
return f"{native.returns_type(self.func.returns, symint=self.symint).cpp_type()} {name}({args_str})"
|
| 267 |
+
|
| 268 |
+
def ptr_type(self) -> str:
|
| 269 |
+
# don't include defaults in type signature!
|
| 270 |
+
args_str = ", ".join(a.defn() for a in self.arguments())
|
| 271 |
+
return f"{native.returns_type(self.func.returns, symint=self.symint).cpp_type()} (*)({args_str})"
|
| 272 |
+
|
| 273 |
+
def arguments(self) -> List[Binding]:
|
| 274 |
+
return native.arguments(self.func, symint=self.symint)
|
| 275 |
+
|
| 276 |
+
def returns_type(self) -> CType:
|
| 277 |
+
return native.returns_type(self.func.returns, symint=self.symint)
|
| 278 |
+
|
| 279 |
+
def dispatcher_exprs(self) -> List[Expr]:
|
| 280 |
+
return translate.translate(
|
| 281 |
+
self.arguments(), dispatcher.arguments(self.func), method=False
|
| 282 |
+
)
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
@dataclass(frozen=True)
|
| 286 |
+
class ViewInverseSignature:
|
| 287 |
+
g: NativeFunctionsViewGroup
|
| 288 |
+
|
| 289 |
+
def name(self) -> str:
|
| 290 |
+
assert self.g.view_copy is not None
|
| 291 |
+
return functionalization.name(self.g, is_reverse=True, include_namespace=False)
|
| 292 |
+
|
| 293 |
+
def decl(self) -> str:
|
| 294 |
+
assert self.g.view_copy is not None
|
| 295 |
+
return_type = functionalization.returns_type(self.g.view_copy.func)
|
| 296 |
+
decls = [
|
| 297 |
+
a.decl()
|
| 298 |
+
for a in functionalization.inner_arguments(
|
| 299 |
+
self.g.view_copy.func, is_reverse=True
|
| 300 |
+
)
|
| 301 |
+
]
|
| 302 |
+
return f"static {return_type.cpp_type()} {self.name()}({', '.join(decls)});"
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
@dataclass(frozen=True)
|
| 306 |
+
class FunctionalizationLambda:
|
| 307 |
+
g: NativeFunctionsViewGroup
|
| 308 |
+
|
| 309 |
+
# are we generating the forward lambda or the reverse lambda?
|
| 310 |
+
is_reverse: bool
|
| 311 |
+
|
| 312 |
+
def captures(self) -> List[Expr]:
|
| 313 |
+
# The lambda lives inside of a kernel following the dispatcher API, so its outer context is the dispatcher arguments
|
| 314 |
+
# We also need to read the "reapply views" TLS at the time that the functionalization kernel was executed,
|
| 315 |
+
# and plumb it into the lambda.
|
| 316 |
+
outer_ctx = dispatcher.arguments(self.g.view.func) + [
|
| 317 |
+
functionalization.reapply_views_binding
|
| 318 |
+
]
|
| 319 |
+
capture_bindings = functionalization.capture_arguments(
|
| 320 |
+
self.g.view.func, is_reverse=self.is_reverse
|
| 321 |
+
)
|
| 322 |
+
# allow_expensive_conversions is set because we want to convert
|
| 323 |
+
# some reference types (IntArrayRef) to value types (vector<int64_t>).
|
| 324 |
+
capture_exprs = translate.translate(
|
| 325 |
+
outer_ctx, capture_bindings, method=False, allow_expensive_conversions=True
|
| 326 |
+
)
|
| 327 |
+
return capture_exprs
|
| 328 |
+
|
| 329 |
+
def decl(self) -> str:
|
| 330 |
+
return_type = functionalization.returns_type(self.g.view.func)
|
| 331 |
+
capture_str = ", ".join(
|
| 332 |
+
f"{val.type.name} = {val.expr}" for val in self.captures()
|
| 333 |
+
)
|
| 334 |
+
decls = [
|
| 335 |
+
a.decl()
|
| 336 |
+
for a in functionalization.outer_arguments(is_reverse=self.is_reverse)
|
| 337 |
+
]
|
| 338 |
+
return f"[{capture_str}]({', '.join(decls)}) -> {return_type.cpp_type()}"
|
| 339 |
+
|
| 340 |
+
def inner_call(self, *, reapply_views: Optional[bool] = None) -> str:
|
| 341 |
+
inner_call_name = functionalization.name(
|
| 342 |
+
self.g,
|
| 343 |
+
is_reverse=self.is_reverse,
|
| 344 |
+
include_namespace=True,
|
| 345 |
+
reapply_views=reapply_views,
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
arg_ctx = functionalization.outer_arguments(is_reverse=self.is_reverse)
|
| 349 |
+
capture_ctx = functionalization.capture_arguments(
|
| 350 |
+
self.g.view.func, is_reverse=self.is_reverse
|
| 351 |
+
)
|
| 352 |
+
full_ctx = arg_ctx + capture_ctx
|
| 353 |
+
|
| 354 |
+
assert self.g.view_copy is not None
|
| 355 |
+
call_bindings = functionalization.inner_arguments(
|
| 356 |
+
self.g.view_copy.func, is_reverse=self.is_reverse
|
| 357 |
+
)
|
| 358 |
+
maybe_index = functionalization.inner_call_index(self.g.view_copy.func)
|
| 359 |
+
call_exprs = [
|
| 360 |
+
e.expr for e in translate.translate(full_ctx, call_bindings, method=False)
|
| 361 |
+
]
|
| 362 |
+
if not self.is_reverse and maybe_index is not None:
|
| 363 |
+
return f'{inner_call_name}({", ".join(call_exprs)})[{maybe_index.name}];'
|
| 364 |
+
else:
|
| 365 |
+
return f'{inner_call_name}({", ".join(call_exprs)});'
|
| 366 |
+
|
| 367 |
+
@staticmethod
|
| 368 |
+
def from_func(
|
| 369 |
+
g: NativeFunctionsViewGroup, *, is_reverse: bool
|
| 370 |
+
) -> "FunctionalizationLambda":
|
| 371 |
+
return FunctionalizationLambda(g, is_reverse)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
@dataclass(frozen=True)
|
| 375 |
+
class StructuredImplSignature:
|
| 376 |
+
g: NativeFunctionsGroup
|
| 377 |
+
name: str
|
| 378 |
+
|
| 379 |
+
def defn(self, name: Optional[str] = None) -> str:
|
| 380 |
+
args_str = ", ".join(a.defn() for a in self.arguments())
|
| 381 |
+
return f"TORCH_IMPL_FUNC({self.name})({args_str})"
|
| 382 |
+
|
| 383 |
+
def arguments(self) -> List[Binding]:
|
| 384 |
+
return structured.impl_arguments(self.g)
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
# Helper functions
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
def kernel_signature(
|
| 391 |
+
f: NativeFunction, backend_index: BackendIndex, *, prefix: str = ""
|
| 392 |
+
) -> Union["NativeSignature", "DispatcherSignature"]:
|
| 393 |
+
# Note [External Backends Follow Dispatcher API]
|
| 394 |
+
# Kernel signatures for in-tree backends follow the "native" API,
|
| 395 |
+
# while kernels for out-of-tree backends follow the dispatcher API.
|
| 396 |
+
# See the comments in `native.py` for details, but historically there have been
|
| 397 |
+
# some small differences in schema convention between them and the Dispatcher API.
|
| 398 |
+
# Any differences that require translating between the two will results in a runtime cost,
|
| 399 |
+
# so we'd like to keep the differences as small as possible.
|
| 400 |
+
# With external backends, we'd like to enforce that they write their kernels with schemas
|
| 401 |
+
# that match the Dispatcher API directly, if they can.
|
| 402 |
+
meta = backend_index.get_kernel(f)
|
| 403 |
+
symint = meta is not None and meta.supports_symint()
|
| 404 |
+
if symint:
|
| 405 |
+
assert (
|
| 406 |
+
f.func.has_symint()
|
| 407 |
+
), f"attempted to define symint kernel for {backend_index.dispatch_key} without SymInt in schema"
|
| 408 |
+
if backend_index.external:
|
| 409 |
+
return DispatcherSignature.from_schema(f.func, prefix=prefix, symint=symint)
|
| 410 |
+
else:
|
| 411 |
+
return NativeSignature(f.func, prefix=prefix, symint=symint)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
# Functions only, no types
|
| 415 |
+
from torchgen.api import (
|
| 416 |
+
cpp,
|
| 417 |
+
dispatcher,
|
| 418 |
+
functionalization,
|
| 419 |
+
native,
|
| 420 |
+
structured,
|
| 421 |
+
translate,
|
| 422 |
+
)
|
wemm/lib/python3.10/site-packages/torchgen/api/unboxing.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Tuple
|
| 2 |
+
|
| 3 |
+
from torchgen.api import cpp
|
| 4 |
+
from torchgen.api.types import Binding, CppSignatureGroup, CType
|
| 5 |
+
from torchgen.model import (
|
| 6 |
+
Argument,
|
| 7 |
+
BaseTy,
|
| 8 |
+
BaseType,
|
| 9 |
+
ListType,
|
| 10 |
+
NativeFunction,
|
| 11 |
+
OptionalType,
|
| 12 |
+
Type,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
# This file generates the code for unboxing wrappers, i.e., the glue logic to unbox a boxed operator and convert the
|
| 16 |
+
# ivalues from stack to correct arguments to the unboxed kernel, based on corresponding JIT schema. This codegen is
|
| 17 |
+
# an alternative way to generate unboxing wrappers similar to the existing C++ metaprogramming approach but gets the
|
| 18 |
+
# job done statically. These generated unboxing wrappers will be useful under the scenario where we need to register
|
| 19 |
+
# a fixed set of operators known at compile time and thus can save some time in runtime initialization phase.
|
| 20 |
+
#
|
| 21 |
+
# Here's an example on how the codegen works:
|
| 22 |
+
#
|
| 23 |
+
# - Function Schema (source of truth)
|
| 24 |
+
#
|
| 25 |
+
# aten::empty.names(int[] size, *, Dimname[]? names,
|
| 26 |
+
# ScalarType? dtype=None, Layout? layout=None,
|
| 27 |
+
# Device? device=None, bool? pin_memory=None,
|
| 28 |
+
# MemoryFormat? memory_format=None) -> Tensor
|
| 29 |
+
# - Argument Conversion
|
| 30 |
+
# Generates C++ code to convert an ivalue (from stack) to its underlying C++ type.
|
| 31 |
+
# - int[] size
|
| 32 |
+
# ```cpp
|
| 33 |
+
# const c10::List<c10::IValue> size_list_in = (std::move(peek(stack, 0, 7))).toList();
|
| 34 |
+
#
|
| 35 |
+
# std::vector<int64_t> size_vec;
|
| 36 |
+
# for (c10::IValue size_elem: size_list_in) {
|
| 37 |
+
# int64_t size_base = size_elem.to<int64_t>();
|
| 38 |
+
# size_vec.push_back(size_base);
|
| 39 |
+
# }
|
| 40 |
+
# at::ArrayRef<int64_t> size_list_out(size_vec);
|
| 41 |
+
# ~~~~~~~~~~~~~ <-- The converted argument from ivalues in the stack.
|
| 42 |
+
# Will be passed to unboxed kernel.
|
| 43 |
+
# ```
|
| 44 |
+
# - Dimname[]? names
|
| 45 |
+
# ```cpp
|
| 46 |
+
# c10::optional<c10::IValue> names_opt = (std::move(peek(stack, 1, 7))).toOptional<c10::IValue>();
|
| 47 |
+
# c10::optional<at::ArrayRef<at::Dimname>> names_opt_out;
|
| 48 |
+
# if (names_opt.has_value()) {
|
| 49 |
+
# ~~~~~~~~~~~ <-- Unwrapping optional shell
|
| 50 |
+
# const c10::IValue names_opt_in = names_opt.value();
|
| 51 |
+
# const c10::List<c10::IValue> names_list_in = names_opt_in.toList();
|
| 52 |
+
#
|
| 53 |
+
# std::vector<at::Dimname> names_vec;
|
| 54 |
+
# for (c10::IValue names_elem: names_list_in) {
|
| 55 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~ <-- Unrolling list, then convert elements one by one.
|
| 56 |
+
# at::Dimname names_base = names_elem.to<at::Dimname>();
|
| 57 |
+
# names_vec.push_back(names_base);
|
| 58 |
+
# }
|
| 59 |
+
# at::ArrayRef<at::Dimname> names_list_out(names_vec);
|
| 60 |
+
#
|
| 61 |
+
# names_opt_out = c10::optional<at::ArrayRef<at::Dimname>>(names_list_out);
|
| 62 |
+
# } else {
|
| 63 |
+
# names_opt_out = c10::optional<at::ArrayRef<at::Dimname>>();
|
| 64 |
+
# }
|
| 65 |
+
# ```
|
| 66 |
+
# - ScalarType? dtype (similarly for the rest of the arguments)
|
| 67 |
+
# ```cpp
|
| 68 |
+
# c10::optional<c10::IValue> dtype_opt = (std::move(peek(stack, 2, 7))).toOptional<c10::IValue>();
|
| 69 |
+
# c10::optional<at::ScalarType> dtype_opt_out;
|
| 70 |
+
# if (dtype_opt.has_value()) {
|
| 71 |
+
# const c10::IValue dtype_opt_in = dtype_opt.value();
|
| 72 |
+
# at::ScalarType dtype_base = dtype_opt_in.to<at::ScalarType>();
|
| 73 |
+
# ~~~~~~~~~~~~~~~~~~~~ <-- For base types, convert ivalue to it
|
| 74 |
+
# directly using ".to<T>()" API.
|
| 75 |
+
# dtype_opt_out = c10::optional<at::ScalarType>(dtype_base);
|
| 76 |
+
# } else {
|
| 77 |
+
# dtype_opt_out = c10::optional<at::ScalarType>();
|
| 78 |
+
# }
|
| 79 |
+
# ```
|
| 80 |
+
#
|
| 81 |
+
# - Unboxed Kernel Call
|
| 82 |
+
# ```cpp
|
| 83 |
+
# auto result_ = torch::empty(
|
| 84 |
+
# size_list_out,
|
| 85 |
+
# names_opt_out,
|
| 86 |
+
# options,
|
| 87 |
+
# memory_format_opt_out
|
| 88 |
+
# );
|
| 89 |
+
# ```
|
| 90 |
+
#
|
| 91 |
+
# - Push Result Back to Stack
|
| 92 |
+
# ```cpp
|
| 93 |
+
# drop(stack, 7);
|
| 94 |
+
# pack(stack, std::move(result_));
|
| 95 |
+
# ```
|
| 96 |
+
connector = "\n\t"
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
# Return unboxing function name for a NativeFunction
|
| 100 |
+
def name(f: NativeFunction) -> str:
|
| 101 |
+
return f.func.name.unambiguous_name()
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
# Convert all the arguments in a NativeFunction to C++ code
|
| 105 |
+
def convert_arguments(f: NativeFunction) -> Tuple[List[Binding], List[str]]:
|
| 106 |
+
# we need the 'self' argument so method needs to be False
|
| 107 |
+
args = (
|
| 108 |
+
CppSignatureGroup.from_native_function(f, method=False)
|
| 109 |
+
.most_faithful_signature()
|
| 110 |
+
.arguments()
|
| 111 |
+
)
|
| 112 |
+
code_list = [
|
| 113 |
+
f"c10::IValue {args[i].name} = std::move(peek(stack, {i}, {len(args)}));"
|
| 114 |
+
for i in range(len(args))
|
| 115 |
+
] + [""]
|
| 116 |
+
binding_list = []
|
| 117 |
+
for i, arg in enumerate(args):
|
| 118 |
+
# expecting only Argument
|
| 119 |
+
if not isinstance(arg.argument, Argument):
|
| 120 |
+
raise Exception(
|
| 121 |
+
f"Unexpected argument type, expecting `Argument` but got {arg}"
|
| 122 |
+
)
|
| 123 |
+
argument: Argument = arg.argument
|
| 124 |
+
unboxed_name, _, code, decl = argumenttype_ivalue_convert(
|
| 125 |
+
argument.type,
|
| 126 |
+
argument.name,
|
| 127 |
+
mutable=argument.is_write,
|
| 128 |
+
)
|
| 129 |
+
code_list.extend(decl)
|
| 130 |
+
code_list.extend(code)
|
| 131 |
+
binding_list.append(arg.with_name(unboxed_name))
|
| 132 |
+
return binding_list, code_list
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
# Takes in the type, name and mutability corresponding to an argument, and generates a tuple of:
|
| 136 |
+
# (1) the C++ code necessary to unbox the argument
|
| 137 |
+
# (2) A Binding corresponding to the newly created unboxed variable, including variable name and its CType
|
| 138 |
+
def argumenttype_ivalue_convert(
|
| 139 |
+
t: Type, arg_name: str, *, mutable: bool = False
|
| 140 |
+
) -> Tuple[str, CType, List[str], List[str]]:
|
| 141 |
+
# Unboxing is for mobile, which doesn't care about SymInts
|
| 142 |
+
ctype = cpp.argumenttype_type(
|
| 143 |
+
t=t, mutable=mutable, binds=arg_name, symint=False
|
| 144 |
+
).type
|
| 145 |
+
|
| 146 |
+
if isinstance(t, BaseType):
|
| 147 |
+
out_name = f"{arg_name}_base"
|
| 148 |
+
code, decl = _gen_code_base_type(
|
| 149 |
+
arg_name=arg_name, out_name=out_name, ctype=ctype
|
| 150 |
+
)
|
| 151 |
+
elif isinstance(t, OptionalType):
|
| 152 |
+
out_name = f"{arg_name}_opt_out"
|
| 153 |
+
code, decl = _gen_code_optional_type(
|
| 154 |
+
arg_name=arg_name,
|
| 155 |
+
out_name=out_name,
|
| 156 |
+
t=t,
|
| 157 |
+
ctype=ctype,
|
| 158 |
+
)
|
| 159 |
+
elif isinstance(t, ListType):
|
| 160 |
+
out_name = f"{arg_name}_list_out"
|
| 161 |
+
code, decl = _gen_code_list_type(
|
| 162 |
+
arg_name=arg_name,
|
| 163 |
+
out_name=out_name,
|
| 164 |
+
t=t,
|
| 165 |
+
ctype=ctype,
|
| 166 |
+
)
|
| 167 |
+
else:
|
| 168 |
+
raise Exception(f"Cannot handle type {t}. arg_name: {arg_name}")
|
| 169 |
+
return out_name, ctype, code, decl
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def _gen_code_base_type(
|
| 173 |
+
arg_name: str, out_name: str, ctype: CType
|
| 174 |
+
) -> Tuple[List[str], List[str]]:
|
| 175 |
+
return [
|
| 176 |
+
f"{ctype.cpp_type(strip_ref=True)} {out_name} = {arg_name}.to<{ctype.cpp_type(strip_ref=True)}>();"
|
| 177 |
+
], []
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def _gen_code_optional_type(
|
| 181 |
+
arg_name: str, out_name: str, t: OptionalType, ctype: CType
|
| 182 |
+
) -> Tuple[List[str], List[str]]:
|
| 183 |
+
in_name = f"{arg_name}_opt_in"
|
| 184 |
+
res_name, _, res_code, decl = argumenttype_ivalue_convert(t.elem, in_name)
|
| 185 |
+
return (
|
| 186 |
+
f"""
|
| 187 |
+
c10::optional<c10::IValue> {arg_name}_opt = {arg_name}.toOptional<c10::IValue>();
|
| 188 |
+
{ctype.cpp_type(strip_ref=True)} {out_name};
|
| 189 |
+
if ({arg_name}_opt.has_value()) {{
|
| 190 |
+
const c10::IValue {in_name} = {arg_name}_opt.value();
|
| 191 |
+
{connector.join(res_code)}
|
| 192 |
+
{out_name} = {ctype.cpp_type(strip_ref=True)}({res_name});
|
| 193 |
+
}} else {{
|
| 194 |
+
{out_name} = {ctype.cpp_type(strip_ref=True)}();
|
| 195 |
+
}}
|
| 196 |
+
""".split(
|
| 197 |
+
"\n"
|
| 198 |
+
),
|
| 199 |
+
decl,
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def _gen_code_list_type(
|
| 204 |
+
arg_name: str, out_name: str, t: ListType, ctype: CType
|
| 205 |
+
) -> Tuple[List[str], List[str]]:
|
| 206 |
+
in_name = f"{arg_name}_list_in"
|
| 207 |
+
elem_name = f"{arg_name}_elem"
|
| 208 |
+
code = [f"const c10::List<c10::IValue> {in_name} = {arg_name}.toList();"]
|
| 209 |
+
res_name, res_ctype, res_code, decl = argumenttype_ivalue_convert(t.elem, elem_name)
|
| 210 |
+
# handle list type with size, e.g., bool[4]
|
| 211 |
+
if isinstance(t.elem, BaseType) and t.elem.name == BaseTy.bool and t.size:
|
| 212 |
+
code.extend(
|
| 213 |
+
f"""
|
| 214 |
+
{ctype.cpp_type(strip_ref=True)} {out_name} = as_array<{res_ctype.cpp_type(strip_ref=True)}, {t.size}>({in_name});
|
| 215 |
+
""".split(
|
| 216 |
+
"\n"
|
| 217 |
+
)
|
| 218 |
+
)
|
| 219 |
+
# we have to use c10::List for optional element. e.g., Tensor?[] -> c10::List<c10::optional<at::Tensor>>
|
| 220 |
+
elif isinstance(t.elem, OptionalType):
|
| 221 |
+
code.extend(
|
| 222 |
+
f"""
|
| 223 |
+
{ctype.cpp_type(strip_ref=True)} {out_name};
|
| 224 |
+
for (c10::IValue {elem_name}: {in_name}) {{
|
| 225 |
+
{connector.join(res_code)}
|
| 226 |
+
{out_name}.push_back({res_name});
|
| 227 |
+
}}
|
| 228 |
+
""".split(
|
| 229 |
+
"\n"
|
| 230 |
+
)
|
| 231 |
+
)
|
| 232 |
+
else:
|
| 233 |
+
# use ArrayRef as default.
|
| 234 |
+
vec_name = arg_name + "_vec"
|
| 235 |
+
# need to bring vector instantiation out of scope so that ArrayRef has valid data
|
| 236 |
+
decl.append(f"std::vector<{res_ctype.cpp_type(strip_ref=True)}> {vec_name};")
|
| 237 |
+
code.extend(
|
| 238 |
+
f"""
|
| 239 |
+
for (c10::IValue {elem_name}: {in_name}) {{
|
| 240 |
+
{connector.join(res_code)}
|
| 241 |
+
{vec_name}.push_back({res_name});
|
| 242 |
+
}}
|
| 243 |
+
{ctype.cpp_type(strip_ref=True)} {out_name}({vec_name});
|
| 244 |
+
""".split(
|
| 245 |
+
"\n"
|
| 246 |
+
)
|
| 247 |
+
)
|
| 248 |
+
return code, decl
|
wemm/lib/python3.10/site-packages/torchgen/dest/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .lazy_ir import (
|
| 2 |
+
generate_non_native_lazy_ir_nodes as generate_non_native_lazy_ir_nodes,
|
| 3 |
+
GenLazyIR as GenLazyIR,
|
| 4 |
+
GenLazyNativeFuncDefinition as GenLazyNativeFuncDefinition,
|
| 5 |
+
GenLazyShapeInferenceDefinition as GenLazyShapeInferenceDefinition,
|
| 6 |
+
)
|
| 7 |
+
from .native_functions import (
|
| 8 |
+
compute_native_function_declaration as compute_native_function_declaration,
|
| 9 |
+
)
|
| 10 |
+
from .register_dispatch_key import (
|
| 11 |
+
gen_registration_headers as gen_registration_headers,
|
| 12 |
+
gen_registration_helpers as gen_registration_helpers,
|
| 13 |
+
RegisterDispatchKey as RegisterDispatchKey,
|
| 14 |
+
)
|
| 15 |
+
from .ufunc import (
|
| 16 |
+
compute_ufunc_cpu as compute_ufunc_cpu,
|
| 17 |
+
compute_ufunc_cpu_kernel as compute_ufunc_cpu_kernel,
|
| 18 |
+
compute_ufunc_cuda as compute_ufunc_cuda,
|
| 19 |
+
)
|
wemm/lib/python3.10/site-packages/torchgen/dest/__pycache__/lazy_ts_lowering.cpython-310.pyc
ADDED
|
Binary file (2.02 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/torchgen/dest/ufunc.py
ADDED
|
@@ -0,0 +1,545 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
| 3 |
+
|
| 4 |
+
import torchgen.api.ufunc as ufunc
|
| 5 |
+
from torchgen.api.translate import translate
|
| 6 |
+
from torchgen.api.types import (
|
| 7 |
+
BaseCType,
|
| 8 |
+
Binding,
|
| 9 |
+
CType,
|
| 10 |
+
Expr,
|
| 11 |
+
NamedCType,
|
| 12 |
+
opmath_t,
|
| 13 |
+
scalar_t,
|
| 14 |
+
StructuredImplSignature,
|
| 15 |
+
VectorizedCType,
|
| 16 |
+
)
|
| 17 |
+
from torchgen.api.ufunc import UfunctorBindings
|
| 18 |
+
from torchgen.context import with_native_function
|
| 19 |
+
from torchgen.model import (
|
| 20 |
+
Argument,
|
| 21 |
+
BaseTy,
|
| 22 |
+
BaseType,
|
| 23 |
+
DispatchKey,
|
| 24 |
+
NativeFunctionsGroup,
|
| 25 |
+
ScalarType,
|
| 26 |
+
UfuncKey,
|
| 27 |
+
)
|
| 28 |
+
from torchgen.utils import OrderedSet
|
| 29 |
+
|
| 30 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
|
| 31 |
+
#
|
| 32 |
+
# CUDA STUFF
|
| 33 |
+
#
|
| 34 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
|
| 35 |
+
|
| 36 |
+
# NB: not bothering to generate dispatch stub forward declaration in header,
|
| 37 |
+
# we can just paste it whereever necessary
|
| 38 |
+
|
| 39 |
+
# TODO: use BackendIndex
|
| 40 |
+
# dispatch_key: DispatchKey # only CPU/CUDA right now
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
# Represents functors for implementing CUDA ufuncs.
|
| 44 |
+
# Functors are templated by scalar_t because when USERS instantiate functors
|
| 45 |
+
# they are templated. A functor looks something like this:
|
| 46 |
+
#
|
| 47 |
+
# template <typename scalar_t>
|
| 48 |
+
# struct CUDAFunctorOnSelf_add {
|
| 49 |
+
# using opmath_t = at::opmath_type<scalar_t>;
|
| 50 |
+
# opmath_t other_;
|
| 51 |
+
# opmath_t alpha_;
|
| 52 |
+
# CUDAFunctorOnSelf_add(opmath_t other, opmath_t alpha)
|
| 53 |
+
# : other_(other), alpha_(alpha) {}
|
| 54 |
+
# __device__ scalar_t operator()(scalar_t self) {
|
| 55 |
+
# return ufunc::add(static_cast<opmath_t>(self), other_, alpha_);
|
| 56 |
+
# }
|
| 57 |
+
# };
|
| 58 |
+
#
|
| 59 |
+
@dataclass(frozen=True)
|
| 60 |
+
class UfunctorSignature:
|
| 61 |
+
g: NativeFunctionsGroup
|
| 62 |
+
scalar_tensor_idx: Optional[int]
|
| 63 |
+
name: str
|
| 64 |
+
|
| 65 |
+
def arguments(self) -> UfunctorBindings:
|
| 66 |
+
return ufunc.ufunctor_arguments(
|
| 67 |
+
self.g, scalar_tensor_idx=self.scalar_tensor_idx, scalar_t=scalar_t
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
def fields(self) -> List[Binding]:
|
| 71 |
+
# fields are renamed to have a trailing underscore, as is conventional
|
| 72 |
+
return [b.rename(f"{b.name}_") for b in self.arguments().ctor]
|
| 73 |
+
|
| 74 |
+
def returns_type(self) -> CType:
|
| 75 |
+
# TODO: don't hardcode; return type will be inferred based on tags on
|
| 76 |
+
# the native function
|
| 77 |
+
return BaseCType(scalar_t)
|
| 78 |
+
|
| 79 |
+
def decl_fields(self) -> str:
|
| 80 |
+
return "\n".join(f"{f.type} {f.name};" for f in self.fields())
|
| 81 |
+
|
| 82 |
+
def inline_defn_ctor(self) -> str:
|
| 83 |
+
args_str = ", ".join(a.decl() for a in self.arguments().ctor)
|
| 84 |
+
# NB: hypothetically could do this with translate but the
|
| 85 |
+
# transition here is very regular
|
| 86 |
+
init_str = ", ".join(f"{a.name}_({a.name})" for a in self.arguments().ctor)
|
| 87 |
+
return f"{self.name}({args_str}) : {init_str} {{}}"
|
| 88 |
+
|
| 89 |
+
def decl_apply(self) -> str:
|
| 90 |
+
args_str = ", ".join(a.decl() for a in self.arguments().apply)
|
| 91 |
+
return f"{self.returns_type().cpp_type()} operator()({args_str}) const"
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@dataclass(frozen=True)
|
| 95 |
+
class UfuncSignature:
|
| 96 |
+
g: NativeFunctionsGroup
|
| 97 |
+
name: str
|
| 98 |
+
compute_t: CType
|
| 99 |
+
|
| 100 |
+
def arguments(self) -> List[Binding]:
|
| 101 |
+
return ufunc.ufunc_arguments(self.g, compute_t=self.compute_t)
|
| 102 |
+
|
| 103 |
+
def call(self, ctx: Sequence[Union[Binding, Expr]]) -> str:
|
| 104 |
+
return f"{self.name}({', '.join(a.expr for a in translate(ctx, self.arguments()))})"
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
# steps:
|
| 108 |
+
# 1. take the functional signature
|
| 109 |
+
# 2. use api.ufunc to convert it to template signature. this establishes
|
| 110 |
+
# the type of the template function
|
| 111 |
+
# 3. use api.ufunc (II) to generate a split struct / operator() signature.
|
| 112 |
+
# this establish context in which we call the template signature
|
| 113 |
+
#
|
| 114 |
+
# StructuredImplSignature context
|
| 115 |
+
# ~> functor constructor sig
|
| 116 |
+
#
|
| 117 |
+
# Functor constructor context
|
| 118 |
+
# ~> functor fields sig
|
| 119 |
+
#
|
| 120 |
+
# Functor apply context (functor fields + functor apply sig)
|
| 121 |
+
# ~> template sig
|
| 122 |
+
#
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def eligible_for_binary_scalar_specialization(g: NativeFunctionsGroup) -> bool:
|
| 126 |
+
num_tensors = sum(
|
| 127 |
+
1 for a in g.functional.func.arguments.flat_non_out if a.type.is_tensor_like()
|
| 128 |
+
)
|
| 129 |
+
return num_tensors == 2
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def compute_ufunc_cuda_functors(
|
| 133 |
+
g: NativeFunctionsGroup,
|
| 134 |
+
) -> Tuple[Dict[ScalarType, Dict[UfuncKey, UfunctorSignature]], str]:
|
| 135 |
+
# First, build the functors.
|
| 136 |
+
ufunctor_sigs: Dict[ScalarType, Dict[UfuncKey, UfunctorSignature]] = {}
|
| 137 |
+
ufunctors: List[str] = []
|
| 138 |
+
loops = g.out.ufunc_inner_loop
|
| 139 |
+
scalar_tensor_idx_lookup = {
|
| 140 |
+
UfuncKey.CUDAFunctorOnSelf: 1,
|
| 141 |
+
UfuncKey.CUDAFunctorOnOther: 0,
|
| 142 |
+
UfuncKey.CUDAFunctor: None,
|
| 143 |
+
}
|
| 144 |
+
if eligible_for_binary_scalar_specialization(g):
|
| 145 |
+
keys = [
|
| 146 |
+
UfuncKey.CUDAFunctorOnSelf,
|
| 147 |
+
UfuncKey.CUDAFunctorOnOther,
|
| 148 |
+
UfuncKey.CUDAFunctor,
|
| 149 |
+
]
|
| 150 |
+
else:
|
| 151 |
+
keys = [UfuncKey.CUDAFunctor]
|
| 152 |
+
for k in [UfuncKey.CUDAFunctorOnSelf, UfuncKey.CUDAFunctorOnOther]:
|
| 153 |
+
assert k not in loops, f"cannot use {k} on non-binary function"
|
| 154 |
+
for k in keys:
|
| 155 |
+
# If the key was directly defined, skip functor codegen; we assume the
|
| 156 |
+
# user already done it for us
|
| 157 |
+
if k in loops:
|
| 158 |
+
ufunctor_sig = UfunctorSignature(
|
| 159 |
+
g, scalar_tensor_idx=scalar_tensor_idx_lookup[k], name=loops[k].name
|
| 160 |
+
)
|
| 161 |
+
for dtype in loops[k].supported_dtypes:
|
| 162 |
+
ufunctor_sigs.setdefault(dtype, {})[k] = ufunctor_sig
|
| 163 |
+
continue
|
| 164 |
+
|
| 165 |
+
# Note [ScalarOnly and Generic must match names for CUDA]
|
| 166 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 167 |
+
# Otherwise, look in ANY of the generic entries. For simplicity of
|
| 168 |
+
# codegen, both ScalarOnly and Generic are defined, the ufunc name
|
| 169 |
+
# must match (if they didn't match, we'd have to generate distinct
|
| 170 |
+
# functors per dtype, which is awful, so we're not going to do it unless
|
| 171 |
+
# someone really forces us to)
|
| 172 |
+
ufunc_name = None
|
| 173 |
+
supported_dtypes: OrderedSet[ScalarType] = OrderedSet()
|
| 174 |
+
for lk in [UfuncKey.ScalarOnly, UfuncKey.Generic]:
|
| 175 |
+
if lk not in loops:
|
| 176 |
+
continue
|
| 177 |
+
if ufunc_name is None:
|
| 178 |
+
ufunc_name = loops[lk].name
|
| 179 |
+
else:
|
| 180 |
+
# See Note [ScalarOnly and Generic must match names for CUDA]
|
| 181 |
+
assert (
|
| 182 |
+
ufunc_name == loops[lk].name
|
| 183 |
+
), "ScalarOnly and Generic must have same ufunc name"
|
| 184 |
+
supported_dtypes |= loops[lk].supported_dtypes
|
| 185 |
+
assert ufunc_name is not None
|
| 186 |
+
|
| 187 |
+
name = f"{k}_{ufunc_name}"
|
| 188 |
+
ufunctor_sig = UfunctorSignature(
|
| 189 |
+
g, scalar_tensor_idx=scalar_tensor_idx_lookup[k], name=name
|
| 190 |
+
)
|
| 191 |
+
for dtype in supported_dtypes:
|
| 192 |
+
ufunctor_sigs.setdefault(dtype, {})[k] = ufunctor_sig
|
| 193 |
+
|
| 194 |
+
ufunc_sig = UfuncSignature(
|
| 195 |
+
g, name=f"ufunc::{ufunc_name}", compute_t=BaseCType(opmath_t)
|
| 196 |
+
)
|
| 197 |
+
apply_ctx = ufunctor_sig.fields() + ufunctor_sig.arguments().apply
|
| 198 |
+
ufunctors.append(
|
| 199 |
+
f"""
|
| 200 |
+
template <typename scalar_t>
|
| 201 |
+
struct {ufunctor_sig.name} {{
|
| 202 |
+
using opmath_t = at::opmath_type<scalar_t>;
|
| 203 |
+
{ufunctor_sig.decl_fields()}
|
| 204 |
+
{ufunctor_sig.inline_defn_ctor()}
|
| 205 |
+
__device__ {ufunctor_sig.decl_apply()} {{
|
| 206 |
+
return {ufunc_sig.call(apply_ctx)};
|
| 207 |
+
}}
|
| 208 |
+
}};
|
| 209 |
+
"""
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
return ufunctor_sigs, "\n".join(ufunctors)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
@dataclass(frozen=True)
|
| 216 |
+
class BinaryScalarSpecializationConfig:
|
| 217 |
+
scalar_idx: int
|
| 218 |
+
ctor_tensor: str
|
| 219 |
+
ufunc_key: UfuncKey
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
BinaryScalarSpecializationConfigs = [
|
| 223 |
+
BinaryScalarSpecializationConfig(
|
| 224 |
+
scalar_idx=0,
|
| 225 |
+
ctor_tensor="self",
|
| 226 |
+
ufunc_key=UfuncKey.CUDAFunctorOnOther,
|
| 227 |
+
),
|
| 228 |
+
BinaryScalarSpecializationConfig(
|
| 229 |
+
scalar_idx=1,
|
| 230 |
+
ctor_tensor="other",
|
| 231 |
+
ufunc_key=UfuncKey.CUDAFunctorOnSelf,
|
| 232 |
+
),
|
| 233 |
+
]
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def compute_ufunc_cuda_dtype_body(
|
| 237 |
+
g: NativeFunctionsGroup,
|
| 238 |
+
dtype: ScalarType,
|
| 239 |
+
inner_loops: Dict[UfuncKey, UfunctorSignature],
|
| 240 |
+
parent_ctx: Sequence[Binding],
|
| 241 |
+
) -> str:
|
| 242 |
+
body = "using opmath_t = at::opmath_type<scalar_t>;"
|
| 243 |
+
body += "if (false) {}\n" # for ease of codegen
|
| 244 |
+
for config in BinaryScalarSpecializationConfigs:
|
| 245 |
+
if config.ufunc_key not in inner_loops:
|
| 246 |
+
continue
|
| 247 |
+
ufunctor_sig = inner_loops[config.ufunc_key]
|
| 248 |
+
scalar_idx = config.scalar_idx + 1
|
| 249 |
+
# Make a copy and at the same time widen the type (not permissible
|
| 250 |
+
# without copy; we don't want to mutate the input argument anyway)
|
| 251 |
+
ctx: List[Union[Expr, Binding]] = list(parent_ctx)
|
| 252 |
+
ctx.append(
|
| 253 |
+
Expr(
|
| 254 |
+
expr=f"iter.scalar_value<opmath_t>({scalar_idx})",
|
| 255 |
+
type=NamedCType(config.ctor_tensor, BaseCType(opmath_t)),
|
| 256 |
+
)
|
| 257 |
+
)
|
| 258 |
+
ufunctor_ctor_exprs_str = ", ".join(
|
| 259 |
+
a.expr for a in translate(ctx, ufunctor_sig.arguments().ctor)
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
# NB: ufunctor must be allocated before iter.remove_operand is called,
|
| 263 |
+
# as it relies on iter
|
| 264 |
+
body += f"""\
|
| 265 |
+
else if (iter.is_cpu_scalar({scalar_idx})) {{
|
| 266 |
+
{ufunctor_sig.name}<scalar_t> ufunctor({ufunctor_ctor_exprs_str});
|
| 267 |
+
iter.remove_operand({scalar_idx});
|
| 268 |
+
gpu_kernel(iter, ufunctor);
|
| 269 |
+
}}"""
|
| 270 |
+
|
| 271 |
+
ufunctor_sig = inner_loops[UfuncKey.CUDAFunctor]
|
| 272 |
+
ufunctor_ctor_exprs_str = ", ".join(
|
| 273 |
+
a.expr for a in translate(parent_ctx, ufunctor_sig.arguments().ctor)
|
| 274 |
+
)
|
| 275 |
+
body += f"""
|
| 276 |
+
else {{
|
| 277 |
+
gpu_kernel(iter, {ufunctor_sig.name}<scalar_t>({ufunctor_ctor_exprs_str}));
|
| 278 |
+
}}
|
| 279 |
+
"""
|
| 280 |
+
return body
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
@with_native_function
|
| 284 |
+
def compute_ufunc_cuda(g: NativeFunctionsGroup) -> str:
|
| 285 |
+
# First, build the functors, indexing them by dtype
|
| 286 |
+
ufunctor_sigs, ufunctors = compute_ufunc_cuda_functors(g)
|
| 287 |
+
|
| 288 |
+
# Next, build the conditionals
|
| 289 |
+
sig = StructuredImplSignature(g, ufunc.kernel_name(g, DispatchKey.CUDA))
|
| 290 |
+
dtype_cases = []
|
| 291 |
+
for dtype, inner_ufunc_sigs in ufunctor_sigs.items():
|
| 292 |
+
dtype_cases.append(
|
| 293 |
+
f"""
|
| 294 |
+
AT_DISPATCH_CASE(at::ScalarType::{dtype},
|
| 295 |
+
[&]() {{
|
| 296 |
+
{compute_ufunc_cuda_dtype_body(g, dtype, inner_ufunc_sigs, sig.arguments())}
|
| 297 |
+
}}
|
| 298 |
+
)
|
| 299 |
+
"""
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
dtype_cases_str = "\n".join(dtype_cases)
|
| 303 |
+
|
| 304 |
+
stub_sig = StubSignature(g)
|
| 305 |
+
|
| 306 |
+
return f"""
|
| 307 |
+
{ufunctors}
|
| 308 |
+
|
| 309 |
+
{stub_sig.type_defn()};
|
| 310 |
+
{stub_sig.dispatch_decl()};
|
| 311 |
+
|
| 312 |
+
{stub_sig.kernel_defn()} {{
|
| 313 |
+
AT_DISPATCH_SWITCH(iter.common_dtype(), "{sig.name}",
|
| 314 |
+
{dtype_cases_str}
|
| 315 |
+
);
|
| 316 |
+
}}
|
| 317 |
+
REGISTER_DISPATCH({stub_sig.name}, &{stub_sig.kernel_name});
|
| 318 |
+
|
| 319 |
+
{sig.defn()} {{
|
| 320 |
+
{stub_sig.direct_call(sig.arguments())};
|
| 321 |
+
}}
|
| 322 |
+
"""
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
|
| 326 |
+
#
|
| 327 |
+
# CPU STUFF
|
| 328 |
+
#
|
| 329 |
+
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
@dataclass(frozen=True)
|
| 333 |
+
class StubSignature:
|
| 334 |
+
g: NativeFunctionsGroup
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def name(self) -> str:
|
| 338 |
+
return f"{str(self.g.functional.func.name.name)}_stub"
|
| 339 |
+
|
| 340 |
+
@property
|
| 341 |
+
def kernel_name(self) -> str:
|
| 342 |
+
return f"{str(self.g.functional.func.name.name)}_kernel"
|
| 343 |
+
|
| 344 |
+
@property
|
| 345 |
+
def type_name(self) -> str:
|
| 346 |
+
return f"{str(self.g.functional.func.name.name)}_fn"
|
| 347 |
+
|
| 348 |
+
def arguments(self) -> List[Binding]:
|
| 349 |
+
return ufunc.stub_arguments(self.g)
|
| 350 |
+
|
| 351 |
+
def type(self) -> str:
|
| 352 |
+
cpp_args = self.arguments()
|
| 353 |
+
return f"void(*)(TensorIteratorBase&, {', '.join(a.type for a in cpp_args)})"
|
| 354 |
+
|
| 355 |
+
def dispatch_decl(self) -> str:
|
| 356 |
+
return f"DECLARE_DISPATCH({self.type_name}, {self.name})"
|
| 357 |
+
|
| 358 |
+
def dispatch_defn(self) -> str:
|
| 359 |
+
return f"DEFINE_DISPATCH({self.name})"
|
| 360 |
+
|
| 361 |
+
def kernel_defn(self) -> str:
|
| 362 |
+
return f"void {self.kernel_name}(TensorIteratorBase& iter, {', '.join(a.defn() for a in self.arguments())})"
|
| 363 |
+
|
| 364 |
+
def type_defn(self) -> str:
|
| 365 |
+
return f"using {self.type_name} = {self.type()}"
|
| 366 |
+
|
| 367 |
+
# must be called from context where this is TensorIteratorBase*
|
| 368 |
+
def call(self, ctx: Sequence[Binding]) -> str:
|
| 369 |
+
return f"{self.name}(device_type(), *this, {', '.join(a.expr for a in translate(ctx, self.arguments()))})"
|
| 370 |
+
|
| 371 |
+
# used in CUDA to skip the unnecessary dynamic dispatch
|
| 372 |
+
def direct_call(self, ctx: Sequence[Binding]) -> str:
|
| 373 |
+
return f"{self.kernel_name}(*this, {', '.join(a.expr for a in translate(ctx, self.arguments()))})"
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
@with_native_function
|
| 377 |
+
def compute_ufunc_cpu(g: NativeFunctionsGroup) -> str:
|
| 378 |
+
stub_sig = StubSignature(g)
|
| 379 |
+
sig = StructuredImplSignature(g, ufunc.kernel_name(g, DispatchKey.CPU))
|
| 380 |
+
|
| 381 |
+
return f"""
|
| 382 |
+
{stub_sig.type_defn()};
|
| 383 |
+
{stub_sig.dispatch_decl()};
|
| 384 |
+
{stub_sig.dispatch_defn()};
|
| 385 |
+
|
| 386 |
+
{sig.defn()} {{
|
| 387 |
+
{stub_sig.call(sig.arguments())};
|
| 388 |
+
}}
|
| 389 |
+
"""
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
def compute_ufunc_cpu_dtype_body(
|
| 393 |
+
g: NativeFunctionsGroup,
|
| 394 |
+
dtype: ScalarType,
|
| 395 |
+
inner_loops: Dict[UfuncKey, UfuncSignature],
|
| 396 |
+
parent_ctx: Sequence[Binding],
|
| 397 |
+
) -> str:
|
| 398 |
+
assert UfuncKey.CPUScalar in inner_loops, f"{dtype}, {inner_loops.keys()}"
|
| 399 |
+
assert inner_loops.keys() <= {UfuncKey.CPUScalar, UfuncKey.CPUVector}
|
| 400 |
+
scalar_loop = inner_loops[UfuncKey.CPUScalar]
|
| 401 |
+
vec_loop = None
|
| 402 |
+
if UfuncKey.CPUVector in inner_loops:
|
| 403 |
+
vec_loop = inner_loops[UfuncKey.CPUVector]
|
| 404 |
+
|
| 405 |
+
# NB: We DON'T use translate here, because translate is
|
| 406 |
+
# incapable of CSE'ing the scalar accesses in case it is also
|
| 407 |
+
# used by Vectorized; also, the unpacking here is very simple
|
| 408 |
+
# and only affects Scalar; everything else is implicitly captured
|
| 409 |
+
# by the lambda
|
| 410 |
+
|
| 411 |
+
# Setup scalar in scope
|
| 412 |
+
body = []
|
| 413 |
+
ctx = []
|
| 414 |
+
for b in parent_ctx:
|
| 415 |
+
if isinstance(b.argument, Argument) and b.argument.type != BaseType(
|
| 416 |
+
BaseTy.Scalar
|
| 417 |
+
):
|
| 418 |
+
continue
|
| 419 |
+
body.append(f"auto _s_{b.name} = {b.name}.to<scalar_t>();")
|
| 420 |
+
ctx.append(Expr(f"_s_{b.name}", NamedCType(b.nctype.name, BaseCType(scalar_t))))
|
| 421 |
+
if vec_loop is not None:
|
| 422 |
+
for b in parent_ctx:
|
| 423 |
+
if isinstance(b.argument, Argument) and b.argument.type != BaseType(
|
| 424 |
+
BaseTy.Scalar
|
| 425 |
+
):
|
| 426 |
+
continue
|
| 427 |
+
body.append(
|
| 428 |
+
f"auto _v_{b.name} = at::vec::Vectorized<scalar_t>(_s_{b.name});"
|
| 429 |
+
)
|
| 430 |
+
ctx.append(
|
| 431 |
+
Expr(
|
| 432 |
+
f"_v_{b.name}",
|
| 433 |
+
NamedCType(b.nctype.name, VectorizedCType(BaseCType(scalar_t))),
|
| 434 |
+
)
|
| 435 |
+
)
|
| 436 |
+
|
| 437 |
+
# Setup lambda signature
|
| 438 |
+
# NB: simplified version of ufunctor_arguments
|
| 439 |
+
scalar_bindings = []
|
| 440 |
+
vec_bindings = []
|
| 441 |
+
for a in g.functional.func.arguments.flat_non_out:
|
| 442 |
+
if not a.type.is_tensor_like():
|
| 443 |
+
continue
|
| 444 |
+
assert a.type == BaseType(BaseTy.Tensor)
|
| 445 |
+
scalar_bindings.append(
|
| 446 |
+
Binding(
|
| 447 |
+
name=a.name,
|
| 448 |
+
nctype=NamedCType(a.name, BaseCType(scalar_t)),
|
| 449 |
+
argument=a,
|
| 450 |
+
)
|
| 451 |
+
)
|
| 452 |
+
if vec_loop is not None:
|
| 453 |
+
vec_bindings.append(
|
| 454 |
+
Binding(
|
| 455 |
+
name=a.name,
|
| 456 |
+
nctype=NamedCType(a.name, VectorizedCType(BaseCType(scalar_t))),
|
| 457 |
+
argument=a,
|
| 458 |
+
)
|
| 459 |
+
)
|
| 460 |
+
|
| 461 |
+
def with_ctx(b: Sequence[Binding]) -> List[Union[Expr, Binding]]:
|
| 462 |
+
r: List[Union[Expr, Binding]] = []
|
| 463 |
+
r.extend(ctx)
|
| 464 |
+
r.extend(b)
|
| 465 |
+
return r
|
| 466 |
+
|
| 467 |
+
body_str = "\n".join(body)
|
| 468 |
+
if vec_loop is not None:
|
| 469 |
+
return f"""
|
| 470 |
+
{body_str}
|
| 471 |
+
cpu_kernel_vec(iter,
|
| 472 |
+
[=]({', '.join(b.decl() for b in scalar_bindings)}) {{ return {scalar_loop.call(with_ctx(scalar_bindings))}; }},
|
| 473 |
+
[=]({', '.join(b.decl() for b in vec_bindings)}) {{ return {vec_loop.call(with_ctx(vec_bindings))}; }}
|
| 474 |
+
);
|
| 475 |
+
"""
|
| 476 |
+
else:
|
| 477 |
+
return f"""
|
| 478 |
+
{body_str}
|
| 479 |
+
cpu_kernel(iter,
|
| 480 |
+
[=]({', '.join(b.decl() for b in scalar_bindings)}) {{ return {scalar_loop.call(with_ctx(scalar_bindings))}; }}
|
| 481 |
+
);
|
| 482 |
+
"""
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
@with_native_function
|
| 486 |
+
def compute_ufunc_cpu_kernel(g: NativeFunctionsGroup) -> str:
|
| 487 |
+
stub_sig = StubSignature(g)
|
| 488 |
+
|
| 489 |
+
# Reindex the ufunc by dtypes; processing generic/scalaronly as well
|
| 490 |
+
loops = g.out.ufunc_inner_loop
|
| 491 |
+
ufunc_sigs: Dict[ScalarType, Dict[UfuncKey, UfuncSignature]] = {}
|
| 492 |
+
for k in [UfuncKey.CPUScalar, UfuncKey.CPUVector]:
|
| 493 |
+
lks = []
|
| 494 |
+
# ORDER MATTERS: this specifies overriding precedence
|
| 495 |
+
if k in loops: # should happen rarely
|
| 496 |
+
lks.append(k)
|
| 497 |
+
if UfuncKey.ScalarOnly in loops and k is UfuncKey.CPUScalar:
|
| 498 |
+
lks.append(UfuncKey.ScalarOnly)
|
| 499 |
+
if UfuncKey.Generic in loops:
|
| 500 |
+
lks.append(UfuncKey.Generic)
|
| 501 |
+
# TODO: don't hardcode ufunc:: namespace here, should be centralized smh
|
| 502 |
+
for lk in lks:
|
| 503 |
+
for dtype in loops[lk].supported_dtypes:
|
| 504 |
+
compute_t: CType
|
| 505 |
+
if k is UfuncKey.CPUScalar:
|
| 506 |
+
compute_t = BaseCType(scalar_t)
|
| 507 |
+
elif k is UfuncKey.CPUVector:
|
| 508 |
+
compute_t = VectorizedCType(BaseCType(scalar_t))
|
| 509 |
+
else:
|
| 510 |
+
raise AssertionError()
|
| 511 |
+
inner_ufunc_sigs = ufunc_sigs.setdefault(dtype, {})
|
| 512 |
+
if k not in inner_ufunc_sigs:
|
| 513 |
+
inner_ufunc_sigs[k] = UfuncSignature(
|
| 514 |
+
g, name=f"ufunc::{loops[lk].name}", compute_t=compute_t
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
# Build the conditionals
|
| 518 |
+
dtype_cases = []
|
| 519 |
+
for dtype, inner_ufunc_sigs in ufunc_sigs.items():
|
| 520 |
+
dtype_cases.append(
|
| 521 |
+
f"""
|
| 522 |
+
AT_DISPATCH_CASE(at::ScalarType::{dtype},
|
| 523 |
+
[&]() {{
|
| 524 |
+
{compute_ufunc_cpu_dtype_body(g, dtype, inner_ufunc_sigs, stub_sig.arguments())}
|
| 525 |
+
}}
|
| 526 |
+
)
|
| 527 |
+
"""
|
| 528 |
+
)
|
| 529 |
+
|
| 530 |
+
dtype_cases_str = "\n".join(dtype_cases)
|
| 531 |
+
return f"""
|
| 532 |
+
namespace {{
|
| 533 |
+
|
| 534 |
+
{stub_sig.kernel_defn()} {{
|
| 535 |
+
AT_DISPATCH_SWITCH(iter.common_dtype(), "{stub_sig.name}",
|
| 536 |
+
{dtype_cases_str}
|
| 537 |
+
);
|
| 538 |
+
}}
|
| 539 |
+
|
| 540 |
+
}} // anonymous namespace
|
| 541 |
+
|
| 542 |
+
{stub_sig.type_defn()};
|
| 543 |
+
{stub_sig.dispatch_decl()};
|
| 544 |
+
REGISTER_DISPATCH({stub_sig.name}, &{stub_sig.kernel_name});
|
| 545 |
+
"""
|
wemm/lib/python3.10/site-packages/torchgen/gen_executorch.py
ADDED
|
@@ -0,0 +1,779 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
import os
|
| 3 |
+
import pathlib
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
from dataclasses import dataclass
|
| 6 |
+
from typing import Callable, Dict, List, Optional, Sequence, TextIO, Tuple, Union
|
| 7 |
+
|
| 8 |
+
import yaml
|
| 9 |
+
|
| 10 |
+
# Parse native_functions.yaml into a sequence of NativeFunctions and Backend Indices.
|
| 11 |
+
from torchgen import dest
|
| 12 |
+
from torchgen.api import cpp as aten_cpp
|
| 13 |
+
from torchgen.api.types import CppSignature, CppSignatureGroup, CType, NamedCType
|
| 14 |
+
from torchgen.context import method_with_native_function, with_native_function_and_index
|
| 15 |
+
from torchgen.executorch.api import et_cpp
|
| 16 |
+
from torchgen.executorch.api.custom_ops import (
|
| 17 |
+
ComputeNativeFunctionStub,
|
| 18 |
+
gen_custom_ops_registration,
|
| 19 |
+
)
|
| 20 |
+
from torchgen.executorch.api.types import ExecutorchCppSignature
|
| 21 |
+
from torchgen.executorch.api.unboxing import Unboxing
|
| 22 |
+
from torchgen.gen import (
|
| 23 |
+
get_custom_build_selector,
|
| 24 |
+
get_native_function_declarations,
|
| 25 |
+
get_native_function_schema_registrations,
|
| 26 |
+
LineLoader,
|
| 27 |
+
parse_native_yaml,
|
| 28 |
+
ParsedYaml,
|
| 29 |
+
)
|
| 30 |
+
from torchgen.model import (
|
| 31 |
+
BackendIndex,
|
| 32 |
+
BackendMetadata,
|
| 33 |
+
DispatchKey,
|
| 34 |
+
is_cuda_dispatch_key,
|
| 35 |
+
Location,
|
| 36 |
+
NativeFunction,
|
| 37 |
+
NativeFunctionsGroup,
|
| 38 |
+
OperatorName,
|
| 39 |
+
Variant,
|
| 40 |
+
)
|
| 41 |
+
from torchgen.selective_build.selector import SelectiveBuilder
|
| 42 |
+
from torchgen.utils import (
|
| 43 |
+
context,
|
| 44 |
+
FileManager,
|
| 45 |
+
make_file_manager,
|
| 46 |
+
mapMaybe,
|
| 47 |
+
NamespaceHelper,
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def static_dispatch(
|
| 52 |
+
sig: Union[CppSignature, ExecutorchCppSignature],
|
| 53 |
+
f: NativeFunction,
|
| 54 |
+
backend_indices: List[BackendIndex],
|
| 55 |
+
) -> str:
|
| 56 |
+
"""
|
| 57 |
+
For a given `NativeFunction`, find out the corresponding native function and dispatch to it. If zero or more than one
|
| 58 |
+
native function exists, error out. A simplified version of register_dispatch_key.py
|
| 59 |
+
Arguments:
|
| 60 |
+
sig: A CppSignature for this native function we want to use.
|
| 61 |
+
f: NativeFunction to generate static dispatch.
|
| 62 |
+
backend_indices: All available backends.
|
| 63 |
+
Return:
|
| 64 |
+
C++ code to call backend-specific functions, e.g., "return at::native::add(self, other, scale);"
|
| 65 |
+
"""
|
| 66 |
+
if len(backend_indices) == 0 or f.manual_kernel_registration:
|
| 67 |
+
return ""
|
| 68 |
+
|
| 69 |
+
backends = [b for b in backend_indices if b.has_kernel(f)]
|
| 70 |
+
static_block = None
|
| 71 |
+
if len(backends) == 1:
|
| 72 |
+
backend_metadata = backends[0].get_kernel(f)
|
| 73 |
+
if backend_metadata:
|
| 74 |
+
args = ", ".join(a.name for a in sig.arguments())
|
| 75 |
+
# Here we are assuming there's no difference between CppSignature and NativeSignature for Executorch.
|
| 76 |
+
static_block = f"return ::{backend_metadata.cpp_namespace}::{backend_metadata.kernel}({args});"
|
| 77 |
+
else:
|
| 78 |
+
static_block = f"""
|
| 79 |
+
ET_ASSERT_UNREACHABLE_MSG("The number of native function(s) binding to {f.func.name} is {len(backends)}.");
|
| 80 |
+
"""
|
| 81 |
+
return f"""
|
| 82 |
+
// {f.namespace}::{f.func}
|
| 83 |
+
TORCH_API inline {sig.decl()} {{
|
| 84 |
+
{static_block}
|
| 85 |
+
}}
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# Generates Functions.h, which provides the functional public C++ API,
|
| 90 |
+
# and the scaffolding to call into the dispatcher from these functions.
|
| 91 |
+
@dataclass(frozen=True)
|
| 92 |
+
class ComputeFunction:
|
| 93 |
+
static_dispatch_backend_indices: List[BackendIndex]
|
| 94 |
+
|
| 95 |
+
selector: SelectiveBuilder
|
| 96 |
+
|
| 97 |
+
use_aten_lib: bool
|
| 98 |
+
|
| 99 |
+
is_custom_op: Callable[[NativeFunction], bool]
|
| 100 |
+
|
| 101 |
+
@method_with_native_function
|
| 102 |
+
def __call__(self, f: NativeFunction) -> Optional[str]:
|
| 103 |
+
if not self.selector.is_root_operator(f"{f.namespace}::{f.func.name}"):
|
| 104 |
+
return None
|
| 105 |
+
if Variant.function not in f.variants:
|
| 106 |
+
return None
|
| 107 |
+
sig: Union[CppSignature, ExecutorchCppSignature] = (
|
| 108 |
+
CppSignatureGroup.from_native_function(
|
| 109 |
+
f, method=False, fallback_binding=f.manual_cpp_binding
|
| 110 |
+
).most_faithful_signature()
|
| 111 |
+
if self.use_aten_lib
|
| 112 |
+
else ExecutorchCppSignature.from_native_function(f)
|
| 113 |
+
)
|
| 114 |
+
if self.use_aten_lib and not self.is_custom_op(f):
|
| 115 |
+
comma = ", "
|
| 116 |
+
|
| 117 |
+
return f"""
|
| 118 |
+
// {f.namespace}::{f.func}
|
| 119 |
+
TORCH_API inline {sig.decl()} {{
|
| 120 |
+
return at::{sig.name()}({comma.join(e.name for e in sig.arguments())});
|
| 121 |
+
}}
|
| 122 |
+
"""
|
| 123 |
+
|
| 124 |
+
else:
|
| 125 |
+
return static_dispatch(
|
| 126 |
+
sig,
|
| 127 |
+
f,
|
| 128 |
+
backend_indices=self.static_dispatch_backend_indices,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# Generates RegisterCodegenUnboxedKernels.cpp.
|
| 133 |
+
@dataclass(frozen=True)
|
| 134 |
+
class ComputeCodegenUnboxedKernels:
|
| 135 |
+
selector: SelectiveBuilder
|
| 136 |
+
|
| 137 |
+
use_aten_lib: bool
|
| 138 |
+
|
| 139 |
+
@method_with_native_function
|
| 140 |
+
def __call__(self, f: NativeFunction) -> str:
|
| 141 |
+
if not self.selector.is_root_operator(f"{f.namespace}::{f.func.name}"):
|
| 142 |
+
return ""
|
| 143 |
+
sig: Union[CppSignature, ExecutorchCppSignature]
|
| 144 |
+
argument_type_gen: Callable[..., NamedCType]
|
| 145 |
+
return_type_gen: Callable[..., CType]
|
| 146 |
+
if self.use_aten_lib:
|
| 147 |
+
sig = CppSignatureGroup.from_native_function(
|
| 148 |
+
f, method=False, fallback_binding=f.manual_cpp_binding
|
| 149 |
+
).most_faithful_signature()
|
| 150 |
+
argument_type_gen = aten_cpp.argumenttype_type
|
| 151 |
+
return_type_gen = aten_cpp.returns_type
|
| 152 |
+
else:
|
| 153 |
+
sig = ExecutorchCppSignature.from_native_function(f)
|
| 154 |
+
argument_type_gen = et_cpp.argumenttype_type
|
| 155 |
+
return_type_gen = et_cpp.returns_type
|
| 156 |
+
# parse arguments into C++ code
|
| 157 |
+
binding_list, code_list = Unboxing(
|
| 158 |
+
argument_type_gen=argument_type_gen
|
| 159 |
+
).convert_arguments(sig.arguments())
|
| 160 |
+
|
| 161 |
+
# for each C++ argument, generate the conversion code
|
| 162 |
+
code_connector = "\n\t"
|
| 163 |
+
arg_connector = ", "
|
| 164 |
+
|
| 165 |
+
args_str = f"{arg_connector.join(e.name for e in binding_list)}"
|
| 166 |
+
|
| 167 |
+
if len(f.func.returns) == 0:
|
| 168 |
+
if len(f.func.arguments.out) == 0:
|
| 169 |
+
raise Exception(
|
| 170 |
+
f"Can't handle native function {f.func} with no returns and no out yet."
|
| 171 |
+
)
|
| 172 |
+
out = f.func.arguments.out[0]
|
| 173 |
+
return_assignment = f"""stack[{len(binding_list)}] = &{out.name};"""
|
| 174 |
+
ret_prefix = ""
|
| 175 |
+
else:
|
| 176 |
+
if len(f.func.arguments.out) == 0:
|
| 177 |
+
return_assignment = (
|
| 178 |
+
f"""*stack[{len(binding_list)}] = EValue(result_);"""
|
| 179 |
+
)
|
| 180 |
+
ret_prefix = return_type_gen(f.func.returns).cpp_type() + " result_ = "
|
| 181 |
+
else:
|
| 182 |
+
return_assignment = ""
|
| 183 |
+
ret_prefix = ""
|
| 184 |
+
|
| 185 |
+
return f"""
|
| 186 |
+
Operator(
|
| 187 |
+
"{f.namespace}::{f.func.name}",
|
| 188 |
+
[](EValue** stack) {{
|
| 189 |
+
{code_connector.join(code_list)}
|
| 190 |
+
|
| 191 |
+
EXECUTORCH_SCOPE_PROF("native_call_{f.func.name}");
|
| 192 |
+
{ret_prefix}torch::executor::{f.namespace}::{sig.name()}({args_str});
|
| 193 |
+
|
| 194 |
+
{return_assignment}
|
| 195 |
+
}}
|
| 196 |
+
),
|
| 197 |
+
"""
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def gen_unboxing(
|
| 201 |
+
*,
|
| 202 |
+
native_functions: Sequence[NativeFunction],
|
| 203 |
+
cpu_fm: FileManager,
|
| 204 |
+
selector: SelectiveBuilder,
|
| 205 |
+
use_aten_lib: bool,
|
| 206 |
+
) -> None:
|
| 207 |
+
def key_func(fn: Union[NativeFunction, NativeFunctionsGroup]) -> str:
|
| 208 |
+
return fn.root_name
|
| 209 |
+
|
| 210 |
+
cpu_fm.write_sharded(
|
| 211 |
+
"RegisterCodegenUnboxedKernels.cpp",
|
| 212 |
+
native_functions,
|
| 213 |
+
key_fn=key_func,
|
| 214 |
+
env_callable=lambda fn: {
|
| 215 |
+
"unboxed_ops": [ComputeCodegenUnboxedKernels(selector, use_aten_lib)(fn)],
|
| 216 |
+
},
|
| 217 |
+
num_shards=1,
|
| 218 |
+
sharded_keys={"unboxed_ops"},
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
@with_native_function_and_index
|
| 223 |
+
def compute_native_function_declaration(
|
| 224 |
+
g: Union[NativeFunctionsGroup, NativeFunction], backend_index: BackendIndex
|
| 225 |
+
) -> List[str]:
|
| 226 |
+
assert isinstance(g, NativeFunction)
|
| 227 |
+
sig = ExecutorchCppSignature.from_native_function(f=g)
|
| 228 |
+
metadata = backend_index.get_kernel(g)
|
| 229 |
+
if metadata is None:
|
| 230 |
+
return []
|
| 231 |
+
prefix = "static" if backend_index.external else "TORCH_API"
|
| 232 |
+
return [f"{prefix} {sig.decl(name=metadata.kernel)};"]
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def gen_functions_declarations(
|
| 236 |
+
*,
|
| 237 |
+
native_functions: Sequence[NativeFunction],
|
| 238 |
+
static_dispatch_idx: List[BackendIndex],
|
| 239 |
+
selector: SelectiveBuilder,
|
| 240 |
+
use_aten_lib: bool,
|
| 241 |
+
custom_ops_native_functions: Optional[Sequence[NativeFunction]] = None,
|
| 242 |
+
) -> str:
|
| 243 |
+
"""
|
| 244 |
+
Generates namespace separated C++ function API inline declaration/definitions.
|
| 245 |
+
Native functions are grouped by namespaces and the generated code is wrapped inside
|
| 246 |
+
namespace blocks.
|
| 247 |
+
|
| 248 |
+
E.g., for `custom_1::foo.out` in yaml file we will generate a C++ API as a symbol
|
| 249 |
+
in `torch::executor::custom_1::foo_out`. This way we avoid symbol conflict when
|
| 250 |
+
the other `custom_2::foo.out` is available.
|
| 251 |
+
"""
|
| 252 |
+
ns_grouped_functions = defaultdict(list)
|
| 253 |
+
for native_function in native_functions:
|
| 254 |
+
ns_grouped_functions[native_function.namespace].append(native_function)
|
| 255 |
+
functions_declarations = ""
|
| 256 |
+
newline = "\n"
|
| 257 |
+
for namespace in ns_grouped_functions:
|
| 258 |
+
ns_helper = NamespaceHelper(
|
| 259 |
+
namespace_str=namespace,
|
| 260 |
+
entity_name="",
|
| 261 |
+
max_level=3,
|
| 262 |
+
)
|
| 263 |
+
declarations = list(
|
| 264 |
+
mapMaybe(
|
| 265 |
+
ComputeFunction(
|
| 266 |
+
static_dispatch_backend_indices=static_dispatch_idx,
|
| 267 |
+
selector=selector,
|
| 268 |
+
use_aten_lib=use_aten_lib,
|
| 269 |
+
is_custom_op=lambda f: custom_ops_native_functions is not None
|
| 270 |
+
and f in custom_ops_native_functions,
|
| 271 |
+
),
|
| 272 |
+
ns_grouped_functions[namespace],
|
| 273 |
+
)
|
| 274 |
+
)
|
| 275 |
+
functions_declarations += f"""
|
| 276 |
+
{ns_helper.prologue}
|
| 277 |
+
{newline.join(declarations)}
|
| 278 |
+
{ns_helper.epilogue}
|
| 279 |
+
"""
|
| 280 |
+
return functions_declarations
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
def gen_headers(
|
| 284 |
+
*,
|
| 285 |
+
native_functions: Sequence[NativeFunction],
|
| 286 |
+
custom_ops_native_functions: Sequence[NativeFunction],
|
| 287 |
+
static_dispatch_idx: List[BackendIndex],
|
| 288 |
+
selector: SelectiveBuilder,
|
| 289 |
+
backend_indices: Dict[DispatchKey, BackendIndex],
|
| 290 |
+
cpu_fm: FileManager,
|
| 291 |
+
use_aten_lib: bool,
|
| 292 |
+
) -> None:
|
| 293 |
+
aten_headers = ["#include <ATen/Functions.h>"]
|
| 294 |
+
if custom_ops_native_functions:
|
| 295 |
+
cpu_fm.write_with_template(
|
| 296 |
+
"CustomOpsNativeFunctions.h",
|
| 297 |
+
"NativeFunctions.h",
|
| 298 |
+
lambda: {
|
| 299 |
+
"nativeFunctions_declarations": get_native_function_declarations(
|
| 300 |
+
grouped_native_functions=custom_ops_native_functions,
|
| 301 |
+
backend_indices=backend_indices,
|
| 302 |
+
native_function_decl_gen=dest.compute_native_function_declaration,
|
| 303 |
+
),
|
| 304 |
+
},
|
| 305 |
+
)
|
| 306 |
+
aten_headers.append('#include "CustomOpsNativeFunctions.h"')
|
| 307 |
+
cpu_fm.write(
|
| 308 |
+
"Functions.h",
|
| 309 |
+
lambda: {
|
| 310 |
+
"static_dispatch_extra_headers": aten_headers
|
| 311 |
+
if use_aten_lib
|
| 312 |
+
else ['#include "NativeFunctions.h"'],
|
| 313 |
+
"Functions_declarations": gen_functions_declarations(
|
| 314 |
+
native_functions=native_functions,
|
| 315 |
+
static_dispatch_idx=static_dispatch_idx,
|
| 316 |
+
selector=selector,
|
| 317 |
+
use_aten_lib=use_aten_lib,
|
| 318 |
+
custom_ops_native_functions=custom_ops_native_functions,
|
| 319 |
+
),
|
| 320 |
+
},
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
cpu_fm.write(
|
| 324 |
+
"NativeFunctions.h",
|
| 325 |
+
lambda: {
|
| 326 |
+
"nativeFunctions_declarations": get_native_function_declarations(
|
| 327 |
+
grouped_native_functions=native_functions,
|
| 328 |
+
backend_indices=backend_indices,
|
| 329 |
+
native_function_decl_gen=dest.compute_native_function_declaration
|
| 330 |
+
if use_aten_lib
|
| 331 |
+
else compute_native_function_declaration,
|
| 332 |
+
),
|
| 333 |
+
},
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def gen_custom_ops(
|
| 338 |
+
*,
|
| 339 |
+
native_functions: Sequence[NativeFunction],
|
| 340 |
+
selector: SelectiveBuilder,
|
| 341 |
+
backend_indices: Dict[DispatchKey, BackendIndex],
|
| 342 |
+
cpu_fm: FileManager,
|
| 343 |
+
rocm: bool,
|
| 344 |
+
) -> None:
|
| 345 |
+
dispatch_key = DispatchKey.CPU
|
| 346 |
+
backend_index = backend_indices[dispatch_key]
|
| 347 |
+
(
|
| 348 |
+
anonymous_definition,
|
| 349 |
+
static_init_dispatch_registrations,
|
| 350 |
+
) = gen_custom_ops_registration(
|
| 351 |
+
native_functions=native_functions,
|
| 352 |
+
selector=selector,
|
| 353 |
+
backend_index=backend_index,
|
| 354 |
+
rocm=rocm,
|
| 355 |
+
)
|
| 356 |
+
cpu_fm.write_with_template(
|
| 357 |
+
f"Register{dispatch_key}CustomOps.cpp",
|
| 358 |
+
"RegisterDispatchKeyCustomOps.cpp",
|
| 359 |
+
lambda: {
|
| 360 |
+
"ops_headers": '#include "CustomOpsNativeFunctions.h"',
|
| 361 |
+
"DispatchKey": dispatch_key,
|
| 362 |
+
"dispatch_namespace": dispatch_key.lower(),
|
| 363 |
+
"dispatch_namespaced_definitions": "",
|
| 364 |
+
"dispatch_anonymous_definitions": anonymous_definition,
|
| 365 |
+
"static_init_dispatch_registrations": static_init_dispatch_registrations,
|
| 366 |
+
},
|
| 367 |
+
)
|
| 368 |
+
cpu_fm.write_with_template(
|
| 369 |
+
f"Register{dispatch_key}Stub.cpp",
|
| 370 |
+
"RegisterDispatchKeyCustomOps.cpp",
|
| 371 |
+
lambda: {
|
| 372 |
+
"ops_headers": "",
|
| 373 |
+
"DispatchKey": dispatch_key,
|
| 374 |
+
"dispatch_namespace": dispatch_key.lower(),
|
| 375 |
+
"dispatch_namespaced_definitions": "",
|
| 376 |
+
"dispatch_anonymous_definitions": list(
|
| 377 |
+
mapMaybe(ComputeNativeFunctionStub(), native_functions)
|
| 378 |
+
),
|
| 379 |
+
"static_init_dispatch_registrations": static_init_dispatch_registrations,
|
| 380 |
+
},
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
(
|
| 384 |
+
aten_schema_registrations,
|
| 385 |
+
schema_registrations,
|
| 386 |
+
) = get_native_function_schema_registrations(
|
| 387 |
+
native_functions=native_functions,
|
| 388 |
+
schema_selector=selector,
|
| 389 |
+
)
|
| 390 |
+
cpu_fm.write(
|
| 391 |
+
"RegisterSchema.cpp",
|
| 392 |
+
lambda: {
|
| 393 |
+
"schema_registrations": schema_registrations,
|
| 394 |
+
"aten_schema_registrations": aten_schema_registrations,
|
| 395 |
+
},
|
| 396 |
+
)
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
def translate_native_yaml(
|
| 400 |
+
tags_yaml_path: str,
|
| 401 |
+
aten_yaml_path: str,
|
| 402 |
+
native_yaml_path: Optional[str],
|
| 403 |
+
use_aten_lib: bool,
|
| 404 |
+
out_file: TextIO,
|
| 405 |
+
) -> None:
|
| 406 |
+
"""Translates Executorch DSL dialect to use the same syntax as
|
| 407 |
+
native_functions.yaml. The major difference is that Executorch DSL dialect
|
| 408 |
+
supports "op" key, where it refers to the operator name in native_functions.yaml.
|
| 409 |
+
|
| 410 |
+
For example, a functions.yaml may have the following entry:
|
| 411 |
+
|
| 412 |
+
- op: add.out
|
| 413 |
+
...
|
| 414 |
+
|
| 415 |
+
It needs to be translated to the following:
|
| 416 |
+
|
| 417 |
+
- func: add.out(Tensor self, Tensor other, *, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)
|
| 418 |
+
...
|
| 419 |
+
|
| 420 |
+
We go in aten_yaml_path and find the operator schema for "add.out" and add it
|
| 421 |
+
to the original functions.yaml. We also add required field "variants", where for
|
| 422 |
+
Executorch it will always be "function".
|
| 423 |
+
|
| 424 |
+
For ATen mode we don't have to do the translation because native_yaml_path is
|
| 425 |
+
the same as native_functions.yaml.
|
| 426 |
+
|
| 427 |
+
Args:
|
| 428 |
+
tags_yaml_path: Path to a tags.yaml file to satisfy codegen parsing.
|
| 429 |
+
It is not optional.
|
| 430 |
+
aten_yaml_path: Path to ATen operator yaml file native_functions.yaml.
|
| 431 |
+
native_yaml_path: Path to a functions.yaml file to parse.
|
| 432 |
+
If the path does not exist in the filesystem, it is treated as an
|
| 433 |
+
empty file. If `custom_ops_yaml_path` exists, the contents of that
|
| 434 |
+
file are appended to the yaml input to be parsed.
|
| 435 |
+
use_aten_lib: We use this flag to determine if we want to generate native
|
| 436 |
+
functions. In ATen mode we should generate out= variants.
|
| 437 |
+
out_file: The IO object that we are writing into.
|
| 438 |
+
Returns:
|
| 439 |
+
None
|
| 440 |
+
"""
|
| 441 |
+
if use_aten_lib:
|
| 442 |
+
with open(aten_yaml_path, "r") as aten_yaml:
|
| 443 |
+
out_file.writelines(aten_yaml.readlines())
|
| 444 |
+
return
|
| 445 |
+
aten_parsed_yaml = parse_native_yaml(
|
| 446 |
+
aten_yaml_path,
|
| 447 |
+
tags_yaml_path,
|
| 448 |
+
None,
|
| 449 |
+
skip_native_fns_gen=False,
|
| 450 |
+
)
|
| 451 |
+
aten_native_functions = aten_parsed_yaml.native_functions
|
| 452 |
+
schema_dict = {
|
| 453 |
+
f"{f.namespace}::{f.func.name}": str(f.func) for f in aten_native_functions
|
| 454 |
+
}
|
| 455 |
+
if (
|
| 456 |
+
not native_yaml_path
|
| 457 |
+
or not os.path.exists(native_yaml_path)
|
| 458 |
+
or os.stat(native_yaml_path).st_size == 0
|
| 459 |
+
):
|
| 460 |
+
return
|
| 461 |
+
with open(native_yaml_path, "r") as native_yaml:
|
| 462 |
+
native_es = yaml.load(native_yaml, Loader=LineLoader)
|
| 463 |
+
if not native_es:
|
| 464 |
+
return
|
| 465 |
+
for e in native_es:
|
| 466 |
+
assert isinstance(e.get("__line__"), int), e
|
| 467 |
+
loc = Location(native_yaml_path, e.pop("__line__"))
|
| 468 |
+
with context(lambda: f"in {loc}:\n "):
|
| 469 |
+
if "variants" not in e:
|
| 470 |
+
e["variants"] = "function"
|
| 471 |
+
if "func" in e:
|
| 472 |
+
continue
|
| 473 |
+
assert isinstance(e.get("op"), str), e
|
| 474 |
+
opname = e.pop("op")
|
| 475 |
+
if "::" not in opname:
|
| 476 |
+
opname = "aten::" + opname
|
| 477 |
+
assert opname in schema_dict
|
| 478 |
+
e["func"] = schema_dict.get(opname)
|
| 479 |
+
yaml.dump(native_es, out_file, width=1000)
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def convert_backend_indices(
|
| 483 |
+
bs: Dict[DispatchKey, Dict[OperatorName, BackendMetadata]]
|
| 484 |
+
) -> Dict[DispatchKey, BackendIndex]:
|
| 485 |
+
indices: Dict[DispatchKey, BackendIndex] = defaultdict(
|
| 486 |
+
lambda: BackendIndex(
|
| 487 |
+
dispatch_key=DispatchKey.Undefined,
|
| 488 |
+
use_out_as_primary=True,
|
| 489 |
+
external=False,
|
| 490 |
+
device_guard=False,
|
| 491 |
+
index={},
|
| 492 |
+
)
|
| 493 |
+
)
|
| 494 |
+
for k, v in bs.items():
|
| 495 |
+
indices[k] = BackendIndex(
|
| 496 |
+
dispatch_key=k,
|
| 497 |
+
use_out_as_primary=True,
|
| 498 |
+
external=False,
|
| 499 |
+
# Only cuda-like devices in tree require device guards
|
| 500 |
+
device_guard=is_cuda_dispatch_key(k),
|
| 501 |
+
index=v,
|
| 502 |
+
)
|
| 503 |
+
return indices
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def parse_yaml(
|
| 507 |
+
path: Optional[str],
|
| 508 |
+
tags_yaml_path: str,
|
| 509 |
+
function_filter: Callable[[NativeFunction], bool],
|
| 510 |
+
skip_native_fns_gen: bool = False,
|
| 511 |
+
) -> Tuple[
|
| 512 |
+
List[NativeFunction], Dict[DispatchKey, Dict[OperatorName, BackendMetadata]]
|
| 513 |
+
]:
|
| 514 |
+
if path and os.path.exists(path) and os.stat(path).st_size > 0:
|
| 515 |
+
parsed_yaml = parse_native_yaml(
|
| 516 |
+
path,
|
| 517 |
+
tags_yaml_path,
|
| 518 |
+
None,
|
| 519 |
+
skip_native_fns_gen=skip_native_fns_gen,
|
| 520 |
+
)
|
| 521 |
+
native_functions = list(filter(function_filter, parsed_yaml.native_functions))
|
| 522 |
+
op_names = [f.func.name for f in native_functions]
|
| 523 |
+
|
| 524 |
+
def map_index(
|
| 525 |
+
m: Dict[OperatorName, BackendMetadata]
|
| 526 |
+
) -> Dict[OperatorName, BackendMetadata]:
|
| 527 |
+
return {op: m[op] for op in m if op in op_names}
|
| 528 |
+
|
| 529 |
+
backend_indices = {
|
| 530 |
+
k: map_index(b.index) for (k, b) in parsed_yaml.backend_indices.items()
|
| 531 |
+
}
|
| 532 |
+
return native_functions, backend_indices
|
| 533 |
+
else:
|
| 534 |
+
return [], {}
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
def parse_yaml_files(
|
| 538 |
+
tags_yaml_path: str,
|
| 539 |
+
aten_yaml_path: str,
|
| 540 |
+
native_yaml_path: Optional[str],
|
| 541 |
+
custom_ops_yaml_path: Optional[str],
|
| 542 |
+
selector: SelectiveBuilder,
|
| 543 |
+
use_aten_lib: bool,
|
| 544 |
+
) -> Tuple[ParsedYaml, Optional[ParsedYaml]]:
|
| 545 |
+
"""Parses functions.yaml and custom_ops.yaml files.
|
| 546 |
+
|
| 547 |
+
Args:
|
| 548 |
+
tags_yaml_path: Path to a tags.yaml file to satisfy codegen parsing.
|
| 549 |
+
It is not optional.
|
| 550 |
+
aten_yaml_path: Path to ATen operator yaml file native_functions.yaml.
|
| 551 |
+
native_yaml_path: Path to a functions.yaml file to parse.
|
| 552 |
+
If the path does not exist in the filesystem, it is treated as an
|
| 553 |
+
empty file. If `custom_ops_yaml_path` exists, the contents of that
|
| 554 |
+
file are appended to the yaml input to be parsed.
|
| 555 |
+
custom_ops_yaml_path: Path to a custom_ops.yaml file to parse. If
|
| 556 |
+
the path does not exist in the filesystem, it is ignored.
|
| 557 |
+
selector: For selective build.
|
| 558 |
+
use_aten_lib: We use this flag to determine if we want to generate native
|
| 559 |
+
functions. In ATen mode we should generate out= variants.
|
| 560 |
+
Returns:
|
| 561 |
+
A tuple with two elements:
|
| 562 |
+
[0]: The parsed results of concatenating the contents of
|
| 563 |
+
`native_yaml_path` and `custom_ops_yaml_path`.
|
| 564 |
+
[1]: The parsed results of the contents of `custom_ops_yaml_path`, if
|
| 565 |
+
present. If not present, None.
|
| 566 |
+
"""
|
| 567 |
+
import tempfile
|
| 568 |
+
|
| 569 |
+
# only include selected ops, this is because we want to avoid
|
| 570 |
+
def function_filter(f: NativeFunction) -> bool:
|
| 571 |
+
return selector.is_native_function_selected(f)
|
| 572 |
+
|
| 573 |
+
with tempfile.TemporaryDirectory() as tmpdirname:
|
| 574 |
+
translated_yaml_path = os.path.join(tmpdirname, "translated.yaml")
|
| 575 |
+
with open(translated_yaml_path, "w") as translated:
|
| 576 |
+
translate_native_yaml(
|
| 577 |
+
tags_yaml_path,
|
| 578 |
+
aten_yaml_path,
|
| 579 |
+
native_yaml_path,
|
| 580 |
+
use_aten_lib,
|
| 581 |
+
translated,
|
| 582 |
+
)
|
| 583 |
+
translated_functions, translated_backend_indices = parse_yaml(
|
| 584 |
+
translated_yaml_path, tags_yaml_path, function_filter, not use_aten_lib
|
| 585 |
+
)
|
| 586 |
+
custom_ops_functions, custom_ops_backend_indices = parse_yaml(
|
| 587 |
+
custom_ops_yaml_path, tags_yaml_path, function_filter, True
|
| 588 |
+
)
|
| 589 |
+
|
| 590 |
+
combined_functions = translated_functions + custom_ops_functions
|
| 591 |
+
combined_backend_indices: Dict[
|
| 592 |
+
DispatchKey, Dict[OperatorName, BackendMetadata]
|
| 593 |
+
] = defaultdict(dict)
|
| 594 |
+
combined_backend_indices.update(translated_backend_indices)
|
| 595 |
+
|
| 596 |
+
for dk in custom_ops_backend_indices:
|
| 597 |
+
if dk not in combined_backend_indices:
|
| 598 |
+
combined_backend_indices.update({dk: custom_ops_backend_indices[dk]})
|
| 599 |
+
else:
|
| 600 |
+
combined_backend_indices[dk] = {
|
| 601 |
+
**combined_backend_indices[dk],
|
| 602 |
+
**custom_ops_backend_indices[dk],
|
| 603 |
+
}
|
| 604 |
+
|
| 605 |
+
combined_yaml = ParsedYaml(
|
| 606 |
+
combined_functions, convert_backend_indices(combined_backend_indices)
|
| 607 |
+
)
|
| 608 |
+
custom_ops_parsed_yaml = ParsedYaml(
|
| 609 |
+
custom_ops_functions, convert_backend_indices(custom_ops_backend_indices)
|
| 610 |
+
)
|
| 611 |
+
return combined_yaml, custom_ops_parsed_yaml
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
def main() -> None:
|
| 615 |
+
parser = argparse.ArgumentParser(description="Generate operator source files")
|
| 616 |
+
# Although we don't refer to --source-path directly, make_file_manager()
|
| 617 |
+
# expects it to point to a directory that contains a templates/ subdirectory
|
| 618 |
+
# containing the file templates.
|
| 619 |
+
parser.add_argument(
|
| 620 |
+
"-s",
|
| 621 |
+
"--source-path",
|
| 622 |
+
help="path to source directory for kernel templates",
|
| 623 |
+
)
|
| 624 |
+
parser.add_argument(
|
| 625 |
+
"--functions-yaml-path",
|
| 626 |
+
"--functions_yaml_path",
|
| 627 |
+
help="path to the functions.yaml file to use. Optional, but at least "
|
| 628 |
+
"one of --functions-yaml-path and --custom-ops-yaml-path must be "
|
| 629 |
+
"specified.",
|
| 630 |
+
)
|
| 631 |
+
parser.add_argument(
|
| 632 |
+
"--custom-ops-yaml-path",
|
| 633 |
+
"--custom_ops_yaml_path",
|
| 634 |
+
help="path to the custom_ops.yaml file to use. Optional, but at least "
|
| 635 |
+
"one of --functions-yaml-path and --custom-ops-yaml-path must be "
|
| 636 |
+
"specified.",
|
| 637 |
+
)
|
| 638 |
+
parser.add_argument(
|
| 639 |
+
"--aten-yaml-path",
|
| 640 |
+
"--aten_yaml_path",
|
| 641 |
+
help="path to native_functions.yaml file.",
|
| 642 |
+
)
|
| 643 |
+
# Note that make_file_manager() also looks at --install-dir.
|
| 644 |
+
parser.add_argument(
|
| 645 |
+
"-d",
|
| 646 |
+
"--install-dir",
|
| 647 |
+
"--install_dir",
|
| 648 |
+
help="output directory",
|
| 649 |
+
default="build/generated",
|
| 650 |
+
)
|
| 651 |
+
parser.add_argument(
|
| 652 |
+
"-o",
|
| 653 |
+
"--output-dependencies",
|
| 654 |
+
help="output a list of dependencies into the given file and exit",
|
| 655 |
+
)
|
| 656 |
+
# Although we don't refer to --dry-run directly, make_file_manager() looks
|
| 657 |
+
# for it.
|
| 658 |
+
parser.add_argument(
|
| 659 |
+
"--dry-run",
|
| 660 |
+
action="store_true",
|
| 661 |
+
help="run without writing any files (still updates outputs)",
|
| 662 |
+
)
|
| 663 |
+
parser.add_argument(
|
| 664 |
+
"--static-dispatch-backend",
|
| 665 |
+
"--static_dispatch_backend",
|
| 666 |
+
nargs="*",
|
| 667 |
+
help="generate static dispatch code for the specific backend (if set)",
|
| 668 |
+
)
|
| 669 |
+
parser.add_argument(
|
| 670 |
+
"--op-registration-whitelist",
|
| 671 |
+
"--op_registration_whitelist",
|
| 672 |
+
nargs="*",
|
| 673 |
+
help="filter op registrations by the whitelist (if set); "
|
| 674 |
+
"each item is `namespace`::`operator name` without overload name; "
|
| 675 |
+
"e.g.: aten::empty aten::conv2d ...",
|
| 676 |
+
)
|
| 677 |
+
parser.add_argument(
|
| 678 |
+
"--op-selection-yaml-path",
|
| 679 |
+
"--op_selection_yaml_path",
|
| 680 |
+
help="Provide a path to the operator selection (for custom build) YAML "
|
| 681 |
+
"that contains the information about the set of selected operators "
|
| 682 |
+
"and their categories (training, ...). Each operator is either a "
|
| 683 |
+
"full operator name with overload or just a bare operator name. "
|
| 684 |
+
"The operator names also contain the namespace prefix (e.g. aten::)",
|
| 685 |
+
)
|
| 686 |
+
parser.add_argument(
|
| 687 |
+
"--tags-path",
|
| 688 |
+
help="Path to tags.yaml. Required by yaml parsing in codegen system.",
|
| 689 |
+
)
|
| 690 |
+
parser.add_argument(
|
| 691 |
+
"--rocm",
|
| 692 |
+
action="store_true",
|
| 693 |
+
help="reinterpret CUDA as ROCm/HIP and adjust filepaths accordingly",
|
| 694 |
+
)
|
| 695 |
+
parser.add_argument(
|
| 696 |
+
"--use-aten-lib",
|
| 697 |
+
"--use_aten_lib",
|
| 698 |
+
action="store_true",
|
| 699 |
+
help="a boolean flag to indicate whether we use ATen kernels or not, in the future this flag will be per "
|
| 700 |
+
"operator",
|
| 701 |
+
)
|
| 702 |
+
parser.add_argument(
|
| 703 |
+
"--generate",
|
| 704 |
+
type=str,
|
| 705 |
+
nargs="*",
|
| 706 |
+
choices=["headers", "sources"],
|
| 707 |
+
default=["headers", "sources"],
|
| 708 |
+
help="Generate only a subset of files",
|
| 709 |
+
)
|
| 710 |
+
options = parser.parse_args()
|
| 711 |
+
assert options.tags_path, "tags.yaml is required by codegen yaml parsing."
|
| 712 |
+
|
| 713 |
+
selector = get_custom_build_selector(
|
| 714 |
+
options.op_registration_whitelist,
|
| 715 |
+
options.op_selection_yaml_path,
|
| 716 |
+
)
|
| 717 |
+
|
| 718 |
+
parsed_yaml, custom_ops_parsed_yaml = parse_yaml_files(
|
| 719 |
+
aten_yaml_path=options.aten_yaml_path,
|
| 720 |
+
tags_yaml_path=options.tags_path,
|
| 721 |
+
native_yaml_path=options.functions_yaml_path,
|
| 722 |
+
custom_ops_yaml_path=options.custom_ops_yaml_path,
|
| 723 |
+
selector=selector,
|
| 724 |
+
use_aten_lib=options.use_aten_lib,
|
| 725 |
+
)
|
| 726 |
+
native_functions, backend_indices = (
|
| 727 |
+
parsed_yaml.native_functions,
|
| 728 |
+
parsed_yaml.backend_indices,
|
| 729 |
+
)
|
| 730 |
+
custom_ops_native_functions = (
|
| 731 |
+
custom_ops_parsed_yaml.native_functions if custom_ops_parsed_yaml else []
|
| 732 |
+
)
|
| 733 |
+
|
| 734 |
+
cpu_fm = make_file_manager(options=options)
|
| 735 |
+
|
| 736 |
+
static_dispatch_idx: List[BackendIndex] = [backend_indices[DispatchKey.CPU]]
|
| 737 |
+
|
| 738 |
+
if "headers" in options.generate:
|
| 739 |
+
gen_headers(
|
| 740 |
+
native_functions=native_functions,
|
| 741 |
+
custom_ops_native_functions=custom_ops_native_functions,
|
| 742 |
+
static_dispatch_idx=static_dispatch_idx,
|
| 743 |
+
selector=selector,
|
| 744 |
+
backend_indices=backend_indices,
|
| 745 |
+
cpu_fm=cpu_fm,
|
| 746 |
+
use_aten_lib=options.use_aten_lib,
|
| 747 |
+
)
|
| 748 |
+
|
| 749 |
+
if "sources" in options.generate:
|
| 750 |
+
gen_unboxing(
|
| 751 |
+
native_functions=native_functions,
|
| 752 |
+
cpu_fm=cpu_fm,
|
| 753 |
+
selector=selector,
|
| 754 |
+
use_aten_lib=options.use_aten_lib,
|
| 755 |
+
)
|
| 756 |
+
if custom_ops_native_functions:
|
| 757 |
+
gen_custom_ops(
|
| 758 |
+
native_functions=custom_ops_native_functions,
|
| 759 |
+
selector=selector,
|
| 760 |
+
backend_indices=backend_indices,
|
| 761 |
+
cpu_fm=cpu_fm,
|
| 762 |
+
rocm=options.rocm,
|
| 763 |
+
)
|
| 764 |
+
|
| 765 |
+
if options.output_dependencies:
|
| 766 |
+
depfile_path = pathlib.Path(options.output_dependencies).resolve()
|
| 767 |
+
depfile_name = depfile_path.name
|
| 768 |
+
depfile_stem = depfile_path.stem
|
| 769 |
+
|
| 770 |
+
for fm, prefix in [
|
| 771 |
+
(cpu_fm, ""),
|
| 772 |
+
]:
|
| 773 |
+
varname = prefix + depfile_stem
|
| 774 |
+
path = depfile_path.parent / (prefix + depfile_name)
|
| 775 |
+
fm.write_outputs(varname, str(path))
|
| 776 |
+
|
| 777 |
+
|
| 778 |
+
if __name__ == "__main__":
|
| 779 |
+
main()
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/native/native_functions.yaml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/native/tags.yaml
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This yaml file contains all the possible tags that can be defined in `tags` in `native_functions.yaml`
|
| 2 |
+
|
| 3 |
+
- tag: inplace_view
|
| 4 |
+
desc: |
|
| 5 |
+
This tag indicates if an operator *only* modifies the tensor metadata
|
| 6 |
+
- tag: view_copy
|
| 7 |
+
desc: |
|
| 8 |
+
This tag indicates operators that are *_copy* variants
|
| 9 |
+
of view/aliasing operators. If an operator has a view_copy tag,
|
| 10 |
+
then it should have the name {op}_copy, where {op} is a view operator.
|
| 11 |
+
- tag: dynamic_output_shape
|
| 12 |
+
desc: |
|
| 13 |
+
This tag indicates if an operator's output's shape depends on input Tensor
|
| 14 |
+
data.
|
| 15 |
+
- tag: data_dependent_output
|
| 16 |
+
desc: |
|
| 17 |
+
Operator has a non-Tensor output whose value is dependent on the data
|
| 18 |
+
of Tensor inputs. Among other things, this implies that this operator
|
| 19 |
+
cannot be run with meta tensor (since data is not available), nor
|
| 20 |
+
can it be symbolically traced.
|
| 21 |
+
- tag: generated
|
| 22 |
+
desc: |
|
| 23 |
+
This tag indicates that the operator doesn't have an explicit entry in
|
| 24 |
+
native_functions.yaml, and instead was generated automatically by the codegen.
|
| 25 |
+
- tag: nondeterministic_seeded
|
| 26 |
+
desc: |
|
| 27 |
+
This tag indicates if an operator is nondeterminstically seeded (ie is random)
|
| 28 |
+
such that the operator intentionally produces different results when run twice on the same inputs.
|
| 29 |
+
- tag: nondeterministic_bitwise
|
| 30 |
+
desc: |
|
| 31 |
+
This tag indicates if an operator doesn't guarentee bitwise equivalence
|
| 32 |
+
across different runs of an operator with identical inputs.
|
| 33 |
+
- tag: core
|
| 34 |
+
desc: |
|
| 35 |
+
Core aten ops is a subset of aten ops that remains after aten-to-aten decomposition and
|
| 36 |
+
functionalization pass. Core aten ops are fully functional and adhere to single static
|
| 37 |
+
assignment (SSA): this implies there will be no `inplace` or `_out` variants in this opset.
|
| 38 |
+
This opset is designed to serve as the functional IR to interface with compiler backends.
|
| 39 |
+
In contrast to primTorch, core aten opset doesn't decompose ops into explicit
|
| 40 |
+
type promotion and broadcasting ops.
|
| 41 |
+
Core aten ops is also effectively the opset produced by torchdynamo.export(aten_graph=True),
|
| 42 |
+
and thus can be used as an opset for export purpose.
|
| 43 |
+
- tag: pointwise
|
| 44 |
+
desc: |
|
| 45 |
+
Pointwise operators are operators where each element of the output is computed only by accessing
|
| 46 |
+
the corresponding element of all the broadcasted inputs. The output shape will be the broadcasted
|
| 47 |
+
shape of the inputs.
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/DispatchKeyFunction.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// ${generated_comment}
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace ${dispatch_namespace} {
|
| 19 |
+
|
| 20 |
+
${dispatch_namespaced_declarations}
|
| 21 |
+
|
| 22 |
+
} // namespace ${dispatch_namespace}
|
| 23 |
+
} // namespace at
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/NativeFunction.h
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// ${generated_comment}
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
${extra_includes}
|
| 16 |
+
|
| 17 |
+
${native_function_declarations}
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/NativeMetaFunctions.h
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// ${generated_comment}
|
| 4 |
+
|
| 5 |
+
#include <ATen/core/Tensor.h>
|
| 6 |
+
#include <ATen/core/IListRef.h>
|
| 7 |
+
#include <ATen/TensorMeta.h>
|
| 8 |
+
#include <ATen/TensorIterator.h>
|
| 9 |
+
|
| 10 |
+
${NativeMetaFunctions_includes}
|
| 11 |
+
|
| 12 |
+
namespace at {
|
| 13 |
+
|
| 14 |
+
namespace meta {
|
| 15 |
+
|
| 16 |
+
${NativeMetaFunctions_declarations}
|
| 17 |
+
|
| 18 |
+
} // namespace meta
|
| 19 |
+
} // namespace at
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/Operators.h
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// ${generated_comment}
|
| 4 |
+
|
| 5 |
+
#ifdef TORCH_ASSERT_NO_OPERATORS
|
| 6 |
+
#error This change adds a dependency on native_functions.yaml, \
|
| 7 |
+
meaning the file will need to be re-compiled every time an operator \
|
| 8 |
+
is changed or added. Consider if your change would be better placed in \
|
| 9 |
+
another file, or if a more specific header might achieve the same goal. \
|
| 10 |
+
See NOTE: [Tensor vs. TensorBase]
|
| 11 |
+
#endif
|
| 12 |
+
|
| 13 |
+
#if defined(AT_PER_OPERATOR_HEADERS) && defined(TORCH_ASSERT_ONLY_METHOD_OPERATORS)
|
| 14 |
+
#error This change adds a dependency on all pytorch operators, meaning the \
|
| 15 |
+
file will need to be re-compiled every time an operator is changed or added. \
|
| 16 |
+
Consider including a specific operator from <ATen/ops/{my_operator}_ops.h> \
|
| 17 |
+
and see NOTE [TORCH_ASSERT_ONLY_METHOD_OPERATORS].
|
| 18 |
+
#endif
|
| 19 |
+
|
| 20 |
+
#include <c10/core/SymInt.h>
|
| 21 |
+
#include <c10/core/SymIntArrayRef.h>
|
| 22 |
+
#include <c10/core/Scalar.h>
|
| 23 |
+
#include <c10/core/TensorOptions.h>
|
| 24 |
+
#include <c10/core/QScheme.h>
|
| 25 |
+
#include <c10/util/OptionalArrayRef.h>
|
| 26 |
+
#include <tuple>
|
| 27 |
+
#include <vector>
|
| 28 |
+
|
| 29 |
+
${Operators_includes}
|
| 30 |
+
|
| 31 |
+
// Extension writers: do you write wrapper functions? Are you frustrated with
|
| 32 |
+
// resolving overloads of operators? Are you frustrated with dealing with
|
| 33 |
+
// pointer-to-methods and resolving overloads of pointer-to-methods?? Look no
|
| 34 |
+
// further, this is the utility for you.
|
| 35 |
+
//
|
| 36 |
+
// Given an operator schema: aten::op.overload(...
|
| 37 |
+
//
|
| 38 |
+
// Use ATEN_FN2(op, overload) to get a *function* version of the operator
|
| 39 |
+
// that is guaranteed to not be overloaded. This means that you can safely
|
| 40 |
+
// decltype(&ATEN_FN2(op, overload)) it. NB: the 2 means this macro takes 2 args.
|
| 41 |
+
//
|
| 42 |
+
// Given an operator schema without an overload name: aten::op(...
|
| 43 |
+
//
|
| 44 |
+
// Use ATEN_FN(op) to get an unambiguous *function* version of the operator.
|
| 45 |
+
//
|
| 46 |
+
// There is some interesting behavior for out= operations.
|
| 47 |
+
// ATEN_FN2(sin, out) gives a function that is *faithful* to the schema;
|
| 48 |
+
// that is, the order of arguments is exactly what it looks like in the schema.
|
| 49 |
+
|
| 50 |
+
#define ATEN_FN2(op_name, overload) at::_ops::op_name##_##overload::call
|
| 51 |
+
#define ATEN_FN(op_name) at::_ops::op_name::call
|
| 52 |
+
|
| 53 |
+
// Separately, ATEN_OP(op) and ATEN_OP2(op, overload) define a class containing compile-time
|
| 54 |
+
// metadata about a given aten operator.
|
| 55 |
+
// Notable data on the class includes:
|
| 56 |
+
// - ATEN_OP2(add, Tensor)::name // returns the string name: "add"
|
| 57 |
+
// - ATEN_OP2(add, Tensor)::overload_name // returns the string overload name: "Tensor"
|
| 58 |
+
// - ATEN_OP2(add, Tensor)::schema // returns the C++ schema type: at::Tensor (const at::Tensor &, const at::Tensor &, const at::Scalar &)
|
| 59 |
+
// - ATEN_OP2(add, Tensor)::schema_str // returns the string jit type: "add.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor"
|
| 60 |
+
|
| 61 |
+
#define ATEN_OP2(op_name, overload) at::_ops::op_name##_##overload
|
| 62 |
+
#define ATEN_OP(op_name) at::_ops::op_name
|
| 63 |
+
|
| 64 |
+
// WARNING: Please do not call any of the ops in the _ops namespace directly.
|
| 65 |
+
// Use the ATEN_FN macros. We do not guarantee stability of the naming
|
| 66 |
+
// scheme for the functions in at::_ops
|
| 67 |
+
|
| 68 |
+
// See Note [The ATen Operators API] for details of the at::_ops namespace
|
| 69 |
+
|
| 70 |
+
namespace at {
|
| 71 |
+
namespace _ops {
|
| 72 |
+
${Operators_declarations}
|
| 73 |
+
} // namespace _ops
|
| 74 |
+
} // namespace at
|
wemm/lib/python3.10/site-packages/torchgen/packaged/ATen/templates/RedispatchFunctions.h
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// ${generated_comment}
|
| 4 |
+
|
| 5 |
+
#ifdef TORCH_ASSERT_ONLY_METHOD_OPERATORS
|
| 6 |
+
#error This change adds a dependency on all pytorch operators, meaning the \
|
| 7 |
+
file will need to be re-compiled every time an operator is changed or added. \
|
| 8 |
+
Consider using the at::_ops::{name}::redispatch() interface by including \
|
| 9 |
+
the specific operator from <ATen/ops/{my_operator}_ops.h>
|
| 10 |
+
#endif
|
| 11 |
+
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <ATen/Tensor.h>
|
| 14 |
+
#include <c10/core/Storage.h>
|
| 15 |
+
#include <ATen/core/Generator.h>
|
| 16 |
+
#include <c10/util/Deprecated.h>
|
| 17 |
+
#include <ATen/DeviceGuard.h>
|
| 18 |
+
#include <c10/core/TensorOptions.h>
|
| 19 |
+
#include <ATen/core/Reduction.h>
|
| 20 |
+
#include <c10/util/Optional.h>
|
| 21 |
+
#include <ATen/TensorUtils.h>
|
| 22 |
+
#include <ATen/Context.h>
|
| 23 |
+
#include <ATen/TracerMode.h>
|
| 24 |
+
#include <ATen/Operators.h>
|
| 25 |
+
|
| 26 |
+
namespace at {
|
| 27 |
+
|
| 28 |
+
namespace redispatch {
|
| 29 |
+
${function_redispatch_definitions}
|
| 30 |
+
} // namespace redispatch
|
| 31 |
+
|
| 32 |
+
}
|