Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- valley/lib/python3.10/site-packages/torch/_export/__pycache__/pass_base.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/__pycache__/tools.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/__pycache__/utils.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/__pycache__/wrappers.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/db/__pycache__/case.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/db/examples/__pycache__/dynamic_shape_if_guard.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/db/examples/__pycache__/list_unpack.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/__init__.py +0 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/__pycache__/upgrade.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/schema.py +379 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/schema_check.py +286 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/serialize.py +0 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/union.py +70 -0
- valley/lib/python3.10/site-packages/torch/_export/serde/upgrade.py +14 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/__init__.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/benchmark_utils.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/compilers.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/fx_minifier.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/make_functional.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/python_key.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/vmap.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__init__.py +296 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_async.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_await.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_builtins.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_check.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_decompositions.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_freeze.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_fuser.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_ir_utils.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_logging.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_pickle.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_recursive.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_script.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_serialization.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_shape_functions.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/_state.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/annotations.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/frontend.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/generate_bytecode.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/quantized.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/supported_ops.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/__pycache__/unsupported_tensor_ops.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/torch/jit/_async.py +102 -0
valley/lib/python3.10/site-packages/torch/_export/__pycache__/pass_base.cpython-310.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/__pycache__/tools.cpython-310.pyc
ADDED
|
Binary file (4.26 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (17 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/__pycache__/wrappers.cpython-310.pyc
ADDED
|
Binary file (4.25 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/db/__pycache__/case.cpython-310.pyc
ADDED
|
Binary file (5.44 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/db/examples/__pycache__/dynamic_shape_if_guard.cpython-310.pyc
ADDED
|
Binary file (1.01 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/db/examples/__pycache__/list_unpack.cpython-310.pyc
ADDED
|
Binary file (1.22 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/serde/__init__.py
ADDED
|
File without changes
|
valley/lib/python3.10/site-packages/torch/_export/serde/__pycache__/upgrade.cpython-310.pyc
ADDED
|
Binary file (630 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_export/serde/schema.py
ADDED
|
@@ -0,0 +1,379 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# NOTE: This is a placeholder for iterating on export serialization schema design.
|
| 2 |
+
# Anything is subject to change and no guarantee is provided at this point.
|
| 3 |
+
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from enum import IntEnum
|
| 6 |
+
from typing import Dict, List, Optional, Tuple
|
| 7 |
+
|
| 8 |
+
from torch._export.serde.union import _Union
|
| 9 |
+
|
| 10 |
+
# NOTE: Please update this value if any modifications are made to the schema
|
| 11 |
+
SCHEMA_VERSION = (5, 3)
|
| 12 |
+
TREESPEC_VERSION = 1
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ScalarType(IntEnum):
|
| 16 |
+
UNKNOWN = 0
|
| 17 |
+
BYTE = 1
|
| 18 |
+
CHAR = 2
|
| 19 |
+
SHORT = 3
|
| 20 |
+
INT = 4
|
| 21 |
+
LONG = 5
|
| 22 |
+
HALF = 6
|
| 23 |
+
FLOAT = 7
|
| 24 |
+
DOUBLE = 8
|
| 25 |
+
COMPLEXHALF = 9
|
| 26 |
+
COMPLEXFLOAT = 10
|
| 27 |
+
COMPLEXDOUBLE = 11
|
| 28 |
+
BOOL = 12
|
| 29 |
+
BFLOAT16 = 13
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Layout(IntEnum):
|
| 33 |
+
Unknown = 0
|
| 34 |
+
SparseCoo = 1
|
| 35 |
+
SparseCsr = 2
|
| 36 |
+
SparseCsc = 3
|
| 37 |
+
SparseBsr = 4
|
| 38 |
+
SparseBsc = 5
|
| 39 |
+
_mkldnn = 6
|
| 40 |
+
Strided = 7
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class MemoryFormat(IntEnum):
|
| 44 |
+
Unknown = 0
|
| 45 |
+
ContiguousFormat = 1
|
| 46 |
+
ChannelsLast = 2
|
| 47 |
+
ChannelsLast3d = 3
|
| 48 |
+
PreserveFormat = 4
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@dataclass
|
| 52 |
+
class Device:
|
| 53 |
+
type: str
|
| 54 |
+
index: Optional[int] = None
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@dataclass(repr=False)
|
| 58 |
+
class SymExprHint(_Union):
|
| 59 |
+
as_int: int
|
| 60 |
+
as_float: float
|
| 61 |
+
as_bool: bool
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# This is for storing the symbolic expressions behind symints/symfloats/symbools
|
| 65 |
+
# For example, we can get something like
|
| 66 |
+
# SymExpr(expr_str="s0 + s1", hint=SymExprHint(as_int=4)
|
| 67 |
+
# if we also have the hint that s0 and s1 are both 2.
|
| 68 |
+
@dataclass
|
| 69 |
+
class SymExpr:
|
| 70 |
+
expr_str: str
|
| 71 |
+
hint: Optional[SymExprHint] = None
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@dataclass(repr=False)
|
| 75 |
+
class SymInt(_Union):
|
| 76 |
+
as_expr: SymExpr
|
| 77 |
+
as_int: int
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
@dataclass(repr=False)
|
| 81 |
+
class SymBool(_Union):
|
| 82 |
+
as_expr: SymExpr
|
| 83 |
+
as_bool: bool
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@dataclass
|
| 87 |
+
class TensorMeta:
|
| 88 |
+
dtype: ScalarType
|
| 89 |
+
sizes: List[SymInt]
|
| 90 |
+
requires_grad: bool
|
| 91 |
+
device: Device
|
| 92 |
+
strides: List[SymInt]
|
| 93 |
+
storage_offset: SymInt
|
| 94 |
+
layout: Layout
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
# In most cases we will use the "as_name" field to store arguments which are
|
| 98 |
+
# SymInts.
|
| 99 |
+
# The "as_int" field is used in the case where we have a list containing a mix
|
| 100 |
+
# of SymInt and ints (ex. [1, s0, ...]). We will serialize this type of list to
|
| 101 |
+
# be List[SymIntArgument] and map the SymInts to the "as_name" field, and ints
|
| 102 |
+
# to the "as_int" field.
|
| 103 |
+
@dataclass(repr=False)
|
| 104 |
+
class SymIntArgument(_Union):
|
| 105 |
+
as_name: str
|
| 106 |
+
as_int: int
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# In most cases we will use the "as_name" field to store arguments which are
|
| 110 |
+
# SymBools.
|
| 111 |
+
# The "as_bool" field is used in the case where we have a list containing a mix
|
| 112 |
+
# of SymBool and bools (ex. [True, i0, ...]). We will serialize this type of list to
|
| 113 |
+
# be List[SymboolArgument] and map the SymBools to the "as_name" field, and bools
|
| 114 |
+
# to the "as_bool" field.
|
| 115 |
+
@dataclass(repr=False)
|
| 116 |
+
class SymBoolArgument(_Union):
|
| 117 |
+
as_name: str
|
| 118 |
+
as_bool: bool
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
@dataclass
|
| 122 |
+
class TensorArgument:
|
| 123 |
+
name: str
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@dataclass
|
| 127 |
+
class TokenArgument:
|
| 128 |
+
name: str
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
# This is use for storing the contents of a list which contain optional tensors
|
| 132 |
+
# (Tensor?[], ex. [Tensor, None, ...]), where the list will be serialized to the
|
| 133 |
+
# type List[OptionalTensorArgument], with tensor values seiralized to the
|
| 134 |
+
# "as_tensor" field, and None values serialized to the "as_none" field.
|
| 135 |
+
@dataclass(repr=False)
|
| 136 |
+
class OptionalTensorArgument(_Union):
|
| 137 |
+
as_tensor: TensorArgument
|
| 138 |
+
as_none: Tuple[()]
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
@dataclass
|
| 142 |
+
class GraphArgument:
|
| 143 |
+
name: str
|
| 144 |
+
graph: 'Graph'
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
@dataclass
|
| 148 |
+
class CustomObjArgument:
|
| 149 |
+
name: str
|
| 150 |
+
class_fqn: str
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
# This is actually a union type
|
| 154 |
+
@dataclass(repr=False)
|
| 155 |
+
class Argument(_Union):
|
| 156 |
+
as_none: Tuple[()]
|
| 157 |
+
as_tensor: TensorArgument
|
| 158 |
+
as_tensors: List[TensorArgument]
|
| 159 |
+
as_int: int
|
| 160 |
+
as_ints: List[int]
|
| 161 |
+
as_float: float
|
| 162 |
+
as_floats: List[float]
|
| 163 |
+
as_string: str
|
| 164 |
+
as_strings: List[str]
|
| 165 |
+
as_sym_int: SymIntArgument
|
| 166 |
+
as_sym_ints: List[SymIntArgument]
|
| 167 |
+
as_scalar_type: ScalarType
|
| 168 |
+
as_memory_format: MemoryFormat
|
| 169 |
+
as_layout: Layout
|
| 170 |
+
as_device: Device
|
| 171 |
+
as_bool: bool
|
| 172 |
+
as_bools: List[bool]
|
| 173 |
+
as_sym_bool: SymBoolArgument
|
| 174 |
+
as_sym_bools: List[SymBoolArgument]
|
| 175 |
+
as_graph: GraphArgument
|
| 176 |
+
as_optional_tensors: List[OptionalTensorArgument]
|
| 177 |
+
as_custom_obj: CustomObjArgument
|
| 178 |
+
as_operator: str
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
@dataclass
|
| 182 |
+
class NamedArgument:
|
| 183 |
+
# Argument name from the operator schema
|
| 184 |
+
name: str
|
| 185 |
+
arg: Argument
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@dataclass
|
| 189 |
+
class Node:
|
| 190 |
+
target: str
|
| 191 |
+
inputs: List[NamedArgument]
|
| 192 |
+
outputs: List[Argument]
|
| 193 |
+
metadata: Dict[str, str]
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
@dataclass
|
| 197 |
+
class Graph:
|
| 198 |
+
inputs: List[Argument]
|
| 199 |
+
outputs: List[Argument]
|
| 200 |
+
nodes: List[Node]
|
| 201 |
+
tensor_values: Dict[str, TensorMeta]
|
| 202 |
+
sym_int_values: Dict[str, SymInt]
|
| 203 |
+
sym_bool_values: Dict[str, SymBool]
|
| 204 |
+
# This is for deserializing the submodule graphs from higher order ops
|
| 205 |
+
# (ex. cond, map) where single tensor returns will just return a single
|
| 206 |
+
# tensor, rather than following export schema and returning a singleton
|
| 207 |
+
# list.
|
| 208 |
+
is_single_tensor_return: bool = False
|
| 209 |
+
custom_obj_values: Dict[str, CustomObjArgument] = field(default_factory=dict)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
@dataclass
|
| 213 |
+
class UserInputSpec:
|
| 214 |
+
# Actually, only tensors and SymInts are allowed here
|
| 215 |
+
arg: Argument
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
@dataclass(repr=False)
|
| 219 |
+
class ConstantValue(_Union):
|
| 220 |
+
as_none: Tuple[()]
|
| 221 |
+
as_int: int
|
| 222 |
+
as_float: float
|
| 223 |
+
as_string: str
|
| 224 |
+
as_bool: bool
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
@dataclass
|
| 228 |
+
class ConstantInputSpec:
|
| 229 |
+
name: str
|
| 230 |
+
value: ConstantValue
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
@dataclass
|
| 234 |
+
class InputToParameterSpec:
|
| 235 |
+
arg: TensorArgument
|
| 236 |
+
parameter_name: str
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
@dataclass
|
| 240 |
+
class InputToBufferSpec:
|
| 241 |
+
arg: TensorArgument
|
| 242 |
+
buffer_name: str
|
| 243 |
+
persistent: bool
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
@dataclass
|
| 248 |
+
class InputToTensorConstantSpec:
|
| 249 |
+
arg: TensorArgument
|
| 250 |
+
tensor_constant_name: str
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
@dataclass
|
| 254 |
+
class InputToCustomObjSpec:
|
| 255 |
+
arg: CustomObjArgument
|
| 256 |
+
custom_obj_name: str
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
@dataclass
|
| 260 |
+
class InputTokenSpec:
|
| 261 |
+
arg: TokenArgument
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
@dataclass(repr=False)
|
| 265 |
+
class InputSpec(_Union):
|
| 266 |
+
user_input: UserInputSpec
|
| 267 |
+
parameter: InputToParameterSpec
|
| 268 |
+
buffer: InputToBufferSpec
|
| 269 |
+
tensor_constant: InputToTensorConstantSpec
|
| 270 |
+
custom_obj: InputToCustomObjSpec
|
| 271 |
+
token: InputTokenSpec
|
| 272 |
+
constant_input: ConstantInputSpec
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
@dataclass
|
| 276 |
+
class UserOutputSpec:
|
| 277 |
+
arg: Argument
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
@dataclass
|
| 281 |
+
class LossOutputSpec:
|
| 282 |
+
arg: TensorArgument
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
@dataclass
|
| 286 |
+
class BufferMutationSpec:
|
| 287 |
+
arg: TensorArgument
|
| 288 |
+
buffer_name: str
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
@dataclass
|
| 292 |
+
class GradientToParameterSpec:
|
| 293 |
+
arg: TensorArgument
|
| 294 |
+
parameter_name: str
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
@dataclass
|
| 298 |
+
class GradientToUserInputSpec:
|
| 299 |
+
arg: TensorArgument
|
| 300 |
+
user_input_name: str
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
@dataclass
|
| 304 |
+
class UserInputMutationSpec:
|
| 305 |
+
arg: TensorArgument
|
| 306 |
+
user_input_name: str
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
@dataclass
|
| 310 |
+
class OutputTokenSpec:
|
| 311 |
+
arg: TokenArgument
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
@dataclass(repr=False)
|
| 315 |
+
class OutputSpec(_Union):
|
| 316 |
+
user_output: UserOutputSpec
|
| 317 |
+
loss_output: LossOutputSpec
|
| 318 |
+
buffer_mutation: BufferMutationSpec
|
| 319 |
+
gradient_to_parameter: GradientToParameterSpec
|
| 320 |
+
gradient_to_user_input: GradientToUserInputSpec
|
| 321 |
+
user_input_mutation: UserInputMutationSpec
|
| 322 |
+
token: OutputTokenSpec
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
@dataclass
|
| 326 |
+
class GraphSignature:
|
| 327 |
+
input_specs: List[InputSpec]
|
| 328 |
+
output_specs: List[OutputSpec]
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
@dataclass
|
| 332 |
+
class RangeConstraint:
|
| 333 |
+
min_val: int
|
| 334 |
+
max_val: int
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
@dataclass
|
| 338 |
+
class ModuleCallSignature:
|
| 339 |
+
inputs: List[Argument]
|
| 340 |
+
outputs: List[Argument]
|
| 341 |
+
|
| 342 |
+
# These are serialized by calling pytree.treespec_loads
|
| 343 |
+
# And deserialized by calling pytree.treespec_dumps
|
| 344 |
+
in_spec: str
|
| 345 |
+
out_spec: str
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
@dataclass
|
| 349 |
+
class ModuleCallEntry:
|
| 350 |
+
fqn: str
|
| 351 |
+
signature: Optional[ModuleCallSignature] = None
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
@dataclass
|
| 355 |
+
class GraphModule:
|
| 356 |
+
graph: Graph
|
| 357 |
+
signature: GraphSignature
|
| 358 |
+
# This is used for unflattening, by tracking the calling structure of all of
|
| 359 |
+
# the modules in order to unflatten the modules back to the eager calling
|
| 360 |
+
# conventions.
|
| 361 |
+
module_call_graph: List[ModuleCallEntry]
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
# Invariant: Every time a change is made to the schema, one of the versions
|
| 365 |
+
# should be upadted.
|
| 366 |
+
@dataclass
|
| 367 |
+
class SchemaVersion:
|
| 368 |
+
major: int # Major version number is bumped every time a breaking change is made.
|
| 369 |
+
minor: int # Minor version number is bumped when a compatible change is made.
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
@dataclass
|
| 373 |
+
class ExportedProgram:
|
| 374 |
+
graph_module: GraphModule
|
| 375 |
+
# Key is the opset namespace (ex. aten), and value is the version number
|
| 376 |
+
opset_version: Dict[str, int]
|
| 377 |
+
range_constraints: Dict[str, RangeConstraint]
|
| 378 |
+
schema_version: SchemaVersion
|
| 379 |
+
dialect: str
|
valley/lib/python3.10/site-packages/torch/_export/serde/schema_check.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: allow-untyped-defs
|
| 2 |
+
import dataclasses
|
| 3 |
+
import hashlib
|
| 4 |
+
import re
|
| 5 |
+
import typing
|
| 6 |
+
from enum import IntEnum
|
| 7 |
+
from typing import Any, Dict, Optional, Union
|
| 8 |
+
|
| 9 |
+
from torch._export.serde import schema
|
| 10 |
+
from torch._export.serde.union import _Union
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SchemaUpdateError(Exception):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _check(x, msg):
|
| 18 |
+
if not x:
|
| 19 |
+
raise SchemaUpdateError(msg)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _staged_schema():
|
| 23 |
+
ret: Dict[str, Any] = {}
|
| 24 |
+
defs = {}
|
| 25 |
+
|
| 26 |
+
def _handle_aggregate(ty):
|
| 27 |
+
def dump_type(t):
|
| 28 |
+
if isinstance(t, type):
|
| 29 |
+
return t.__name__
|
| 30 |
+
elif isinstance(t, str):
|
| 31 |
+
assert t in defs
|
| 32 |
+
return t
|
| 33 |
+
elif o := typing.get_origin(t):
|
| 34 |
+
# Lemme know if there's a better way to do this.
|
| 35 |
+
if o == list:
|
| 36 |
+
head = "List"
|
| 37 |
+
elif o == dict:
|
| 38 |
+
head = "Dict"
|
| 39 |
+
elif o == tuple:
|
| 40 |
+
if typing.get_args(t) == ():
|
| 41 |
+
return "Tuple[()]"
|
| 42 |
+
head = "Tuple"
|
| 43 |
+
elif o == Union:
|
| 44 |
+
args = typing.get_args(t)
|
| 45 |
+
assert len(args) == 2 and args[1] == type(None)
|
| 46 |
+
return f"Optional[{dump_type(args[0])}]"
|
| 47 |
+
else:
|
| 48 |
+
raise AssertionError(f"Type {t} is not supported in export schema.")
|
| 49 |
+
return (
|
| 50 |
+
f"{head}[{', '.join([dump_type(x) for x in typing.get_args(t)])}]"
|
| 51 |
+
)
|
| 52 |
+
elif t == ():
|
| 53 |
+
return "()"
|
| 54 |
+
else:
|
| 55 |
+
raise AssertionError(f"Type {t} is not supported in export schema.")
|
| 56 |
+
|
| 57 |
+
def dump_field(f):
|
| 58 |
+
t = dump_type(f.type)
|
| 59 |
+
ret = {"type": t}
|
| 60 |
+
|
| 61 |
+
value = dataclasses.MISSING
|
| 62 |
+
if f.default is not dataclasses.MISSING:
|
| 63 |
+
value = f.default
|
| 64 |
+
elif f.default_factory is not dataclasses.MISSING:
|
| 65 |
+
value = f.default_factory()
|
| 66 |
+
|
| 67 |
+
if t.startswith("Optional[") and value is not None:
|
| 68 |
+
raise AssertionError(
|
| 69 |
+
f"Optional field {ty.__name__}.{f.name} must have default value to be None."
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
if value is not dataclasses.MISSING:
|
| 73 |
+
default = str(value)
|
| 74 |
+
ret["default"] = default
|
| 75 |
+
return ret
|
| 76 |
+
|
| 77 |
+
return {f.name: dump_field(f) for f in dataclasses.fields(ty)}
|
| 78 |
+
|
| 79 |
+
def _handle_int_enum(name, ty):
|
| 80 |
+
ret[name] = {"kind": "enum", "fields": {x.name: x.value for x in ty}}
|
| 81 |
+
|
| 82 |
+
def _handle_struct(name, ty):
|
| 83 |
+
ret[name] = {"kind": "struct", "fields": _handle_aggregate(ty)}
|
| 84 |
+
|
| 85 |
+
def _handle_union(name, ty):
|
| 86 |
+
ret[name] = {"kind": "union", "fields": _handle_aggregate(ty)}
|
| 87 |
+
|
| 88 |
+
for name in dir(schema):
|
| 89 |
+
if name.startswith("_"):
|
| 90 |
+
continue
|
| 91 |
+
|
| 92 |
+
value = getattr(schema, name)
|
| 93 |
+
|
| 94 |
+
if hasattr(value, "__module__") and value.__module__ != schema.__name__:
|
| 95 |
+
continue
|
| 96 |
+
|
| 97 |
+
defs[name] = value
|
| 98 |
+
|
| 99 |
+
for name, value in defs.items():
|
| 100 |
+
if isinstance(value, type):
|
| 101 |
+
if issubclass(value, IntEnum):
|
| 102 |
+
_handle_int_enum(name, value)
|
| 103 |
+
elif dataclasses.is_dataclass(value):
|
| 104 |
+
if issubclass(value, _Union):
|
| 105 |
+
_handle_union(name, value)
|
| 106 |
+
else:
|
| 107 |
+
_handle_struct(name, value)
|
| 108 |
+
else:
|
| 109 |
+
raise AssertionError(f"Unknown schema type {name}: {value}")
|
| 110 |
+
elif isinstance(value, (int, tuple)):
|
| 111 |
+
assert name in ("SCHEMA_VERSION", "TREESPEC_VERSION")
|
| 112 |
+
else:
|
| 113 |
+
raise AssertionError(f"Unknown variable {name}: {value}")
|
| 114 |
+
|
| 115 |
+
ret["SCHEMA_VERSION"] = list(defs["SCHEMA_VERSION"])
|
| 116 |
+
assert all(x > 0 for x in ret["SCHEMA_VERSION"])
|
| 117 |
+
ret["TREESPEC_VERSION"] = defs["TREESPEC_VERSION"]
|
| 118 |
+
assert ret["TREESPEC_VERSION"] > 0
|
| 119 |
+
return ret
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _diff_schema(dst, src):
|
| 123 |
+
additions = {key: src[key] for key in src.keys() - dst.keys()}
|
| 124 |
+
subtractions = {key: dst[key] for key in dst.keys() - src.keys()}
|
| 125 |
+
|
| 126 |
+
common_keys = src.keys() & dst.keys()
|
| 127 |
+
|
| 128 |
+
versions = {"SCHEMA_VERSION", "TREESPEC_VERSION"}
|
| 129 |
+
common_keys -= versions
|
| 130 |
+
|
| 131 |
+
for key in common_keys:
|
| 132 |
+
src_kind = src[key]["kind"]
|
| 133 |
+
src_fields = src[key]["fields"]
|
| 134 |
+
dst_kind = dst[key]["kind"]
|
| 135 |
+
dst_fields = dst[key]["fields"]
|
| 136 |
+
_check(
|
| 137 |
+
src_kind == dst_kind,
|
| 138 |
+
f"Type {key} changed kind from {dst_kind} to {src_kind}",
|
| 139 |
+
)
|
| 140 |
+
assert isinstance(src_fields, dict) and isinstance(dst_fields, dict)
|
| 141 |
+
added_fields = {
|
| 142 |
+
key: src_fields[key] for key in src_fields.keys() - dst_fields.keys()
|
| 143 |
+
}
|
| 144 |
+
subtracted_fields = {
|
| 145 |
+
key: dst_fields[key] for key in dst_fields.keys() - src_fields.keys()
|
| 146 |
+
}
|
| 147 |
+
common_fields = src_fields.keys() & dst_fields.keys()
|
| 148 |
+
|
| 149 |
+
for field in common_fields:
|
| 150 |
+
src_field = src_fields[field]
|
| 151 |
+
dst_field = dst_fields[field]
|
| 152 |
+
if src_kind == "struct":
|
| 153 |
+
_check(
|
| 154 |
+
src_field["type"] == dst_field["type"],
|
| 155 |
+
f"Type of the field {key}.{field} changed from {dst_field['type']} to {src_field['type']}",
|
| 156 |
+
)
|
| 157 |
+
if "default" in src_field and "default" not in dst_field:
|
| 158 |
+
added_fields[field] = {}
|
| 159 |
+
added_fields[field]["default"] = src_field["default"]
|
| 160 |
+
if "default" not in src_field and "default" in dst_field:
|
| 161 |
+
subtracted_fields[field] = {}
|
| 162 |
+
subtracted_fields[field]["default"] = dst_field["default"]
|
| 163 |
+
elif src_kind == "enum":
|
| 164 |
+
_check(
|
| 165 |
+
src_field == dst_field,
|
| 166 |
+
f"Value of the enum field {key}.{field} changed from {dst_field} to {src_field}",
|
| 167 |
+
)
|
| 168 |
+
elif src_kind == "union":
|
| 169 |
+
_check(
|
| 170 |
+
src_field["type"] == dst_field["type"],
|
| 171 |
+
f"Type of the field {key}.{field} changed from {dst_field['type']} to {src_field['type']}",
|
| 172 |
+
)
|
| 173 |
+
else:
|
| 174 |
+
raise AssertionError(f"Unknown kind {src_kind}: {key}")
|
| 175 |
+
if len(added_fields) > 0:
|
| 176 |
+
assert key not in additions
|
| 177 |
+
additions[key] = {}
|
| 178 |
+
additions[key]["fields"] = added_fields
|
| 179 |
+
if len(subtracted_fields) > 0:
|
| 180 |
+
assert key not in subtractions
|
| 181 |
+
subtractions[key] = {}
|
| 182 |
+
subtractions[key]["fields"] = subtracted_fields
|
| 183 |
+
|
| 184 |
+
return additions, subtractions
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def _hash_schema(s):
|
| 188 |
+
return hashlib.sha256(repr(s).encode("utf-8")).hexdigest()
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
@dataclasses.dataclass
|
| 192 |
+
class _Commit:
|
| 193 |
+
result: Dict[str, Any]
|
| 194 |
+
checksum_result: str
|
| 195 |
+
path: str
|
| 196 |
+
additions: Dict[str, Any]
|
| 197 |
+
subtractions: Dict[str, Any]
|
| 198 |
+
base: Dict[str, Any]
|
| 199 |
+
checksum_base: Optional[str]
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def update_schema():
|
| 203 |
+
import importlib.resources
|
| 204 |
+
|
| 205 |
+
if importlib.resources.is_resource(__package__, "schema.yaml"):
|
| 206 |
+
content = importlib.resources.read_text(__package__, "schema.yaml")
|
| 207 |
+
match = re.search("checksum<<([A-Fa-f0-9]{64})>>", content)
|
| 208 |
+
_check(match is not None, "checksum not found in schema.yaml")
|
| 209 |
+
assert match is not None
|
| 210 |
+
checksum_base = match.group(1)
|
| 211 |
+
from yaml import load, Loader
|
| 212 |
+
|
| 213 |
+
dst = load(content, Loader=Loader)
|
| 214 |
+
assert isinstance(dst, dict)
|
| 215 |
+
else:
|
| 216 |
+
checksum_base = None
|
| 217 |
+
dst = {"SCHEMA_VERSION": None, "TREESPEC_VERSION": None}
|
| 218 |
+
|
| 219 |
+
src = _staged_schema()
|
| 220 |
+
additions, subtractions = _diff_schema(dst, src)
|
| 221 |
+
return _Commit(
|
| 222 |
+
result=src,
|
| 223 |
+
checksum_result=_hash_schema(src),
|
| 224 |
+
path=__package__.replace(".", "/") + "/schema.yaml",
|
| 225 |
+
additions=additions,
|
| 226 |
+
subtractions=subtractions,
|
| 227 |
+
base=dst,
|
| 228 |
+
checksum_base=checksum_base,
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def check(commit: _Commit, force_unsafe: bool = False):
|
| 233 |
+
next_version = None
|
| 234 |
+
reason = ""
|
| 235 |
+
# Step 1: Detect major schema updates.
|
| 236 |
+
if len(commit.additions) > 0:
|
| 237 |
+
for k, v in commit.additions.items():
|
| 238 |
+
if k not in commit.base:
|
| 239 |
+
continue
|
| 240 |
+
kind = commit.result[k]["kind"]
|
| 241 |
+
fields = v["fields"]
|
| 242 |
+
for f, d in fields.items():
|
| 243 |
+
if "default" not in d and kind == "struct":
|
| 244 |
+
reason += (
|
| 245 |
+
f"Field {k}.{f} is added to schema.py without a default value as an incomparible change "
|
| 246 |
+
+ "which requires major version bump.\n"
|
| 247 |
+
)
|
| 248 |
+
next_version = [commit.base["SCHEMA_VERSION"][0] + 1, 1]
|
| 249 |
+
|
| 250 |
+
if len(commit.subtractions) > 0:
|
| 251 |
+
for k, v in commit.subtractions.items():
|
| 252 |
+
if k not in commit.result:
|
| 253 |
+
continue
|
| 254 |
+
for f in v["fields"]:
|
| 255 |
+
reason = f"Field {k}.{f} is removed from schema.py as an incompatible change which requires major version bump.\n"
|
| 256 |
+
next_version = [commit.base["SCHEMA_VERSION"][0] + 1, 1]
|
| 257 |
+
|
| 258 |
+
if force_unsafe:
|
| 259 |
+
reason += "--force-unsafe is used."
|
| 260 |
+
next_version = commit.result["SCHEMA_VERSION"]
|
| 261 |
+
else:
|
| 262 |
+
# Step 2: Detect minor schema updates.
|
| 263 |
+
if next_version is None and len(commit.additions) > 0:
|
| 264 |
+
for k, v in commit.additions.items():
|
| 265 |
+
for f in v["fields"]:
|
| 266 |
+
reason += (
|
| 267 |
+
f"Field {k}.{f} is added to schema.py as an compatible change "
|
| 268 |
+
+ "which still requires minor version bump.\n"
|
| 269 |
+
)
|
| 270 |
+
next_version = [
|
| 271 |
+
commit.base["SCHEMA_VERSION"][0],
|
| 272 |
+
commit.base["SCHEMA_VERSION"][1] + 1,
|
| 273 |
+
]
|
| 274 |
+
if next_version is None and len(commit.subtractions) > 0:
|
| 275 |
+
for k, v in commit.subtractions.items():
|
| 276 |
+
for f in v["fields"]:
|
| 277 |
+
reason += (
|
| 278 |
+
f"Field {k}.{f} is removed from schema.py as an compatible change "
|
| 279 |
+
+ "which still requires minor version bump.\n"
|
| 280 |
+
)
|
| 281 |
+
next_version = [
|
| 282 |
+
commit.base["SCHEMA_VERSION"][0],
|
| 283 |
+
commit.base["SCHEMA_VERSION"][1] + 1,
|
| 284 |
+
]
|
| 285 |
+
|
| 286 |
+
return next_version, reason
|
valley/lib/python3.10/site-packages/torch/_export/serde/serialize.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
valley/lib/python3.10/site-packages/torch/_export/serde/union.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: allow-untyped-defs
|
| 2 |
+
import functools
|
| 3 |
+
from dataclasses import fields
|
| 4 |
+
from typing import Hashable, Set
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class _UnionTag(str):
|
| 8 |
+
_cls: Hashable
|
| 9 |
+
|
| 10 |
+
@staticmethod
|
| 11 |
+
def create(t, cls):
|
| 12 |
+
tag = _UnionTag(t)
|
| 13 |
+
assert not hasattr(tag, "_cls")
|
| 14 |
+
tag._cls = cls
|
| 15 |
+
return tag
|
| 16 |
+
|
| 17 |
+
def __eq__(self, cmp) -> bool:
|
| 18 |
+
assert isinstance(cmp, str)
|
| 19 |
+
other = str(cmp)
|
| 20 |
+
assert other in _get_field_names(
|
| 21 |
+
self._cls
|
| 22 |
+
), f"{other} is not a valid tag for {self._cls}. Available tags: {_get_field_names(self._cls)}"
|
| 23 |
+
return str(self) == other
|
| 24 |
+
|
| 25 |
+
def __hash__(self):
|
| 26 |
+
return hash(str(self))
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@functools.lru_cache(maxsize=None)
|
| 30 |
+
def _get_field_names(cls) -> Set[str]:
|
| 31 |
+
return {f.name for f in fields(cls)}
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class _Union:
|
| 35 |
+
_type: _UnionTag
|
| 36 |
+
|
| 37 |
+
@classmethod
|
| 38 |
+
def create(cls, **kwargs):
|
| 39 |
+
assert len(kwargs) == 1
|
| 40 |
+
obj = cls(**{**{f.name: None for f in fields(cls)}, **kwargs}) # type: ignore[arg-type]
|
| 41 |
+
obj._type = _UnionTag.create(next(iter(kwargs.keys())), cls)
|
| 42 |
+
return obj
|
| 43 |
+
|
| 44 |
+
def __post_init__(self):
|
| 45 |
+
assert not any(f.name in ("type", "_type", "create", "value") for f in fields(self)) # type: ignore[arg-type, misc]
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def type(self) -> str:
|
| 49 |
+
try:
|
| 50 |
+
return self._type
|
| 51 |
+
except AttributeError as e:
|
| 52 |
+
raise RuntimeError(
|
| 53 |
+
f"Please use {type(self).__name__}.create to instantiate the union type."
|
| 54 |
+
) from e
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def value(self):
|
| 58 |
+
return getattr(self, self.type)
|
| 59 |
+
|
| 60 |
+
def __getattribute__(self, name):
|
| 61 |
+
attr = super().__getattribute__(name)
|
| 62 |
+
if attr is None and name in _get_field_names(type(self)) and name != self.type: # type: ignore[arg-type]
|
| 63 |
+
raise AttributeError(f"Field {name} is not set.")
|
| 64 |
+
return attr
|
| 65 |
+
|
| 66 |
+
def __str__(self):
|
| 67 |
+
return self.__repr__()
|
| 68 |
+
|
| 69 |
+
def __repr__(self):
|
| 70 |
+
return f"{type(self).__name__}({self.type}={getattr(self, self.type)})"
|
valley/lib/python3.10/site-packages/torch/_export/serde/upgrade.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: allow-untyped-defs
|
| 2 |
+
|
| 3 |
+
class GraphModuleOpUpgrader:
|
| 4 |
+
|
| 5 |
+
def __init__(
|
| 6 |
+
self,
|
| 7 |
+
*args,
|
| 8 |
+
**kwargs
|
| 9 |
+
):
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def upgrade(self, exported_program):
|
| 14 |
+
return exported_program
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (168 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/aot_autograd.cpython-310.pyc
ADDED
|
Binary file (30.8 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/benchmark_utils.cpython-310.pyc
ADDED
|
Binary file (5.45 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/compilers.cpython-310.pyc
ADDED
|
Binary file (13.6 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/eager_transforms.cpython-310.pyc
ADDED
|
Binary file (55.6 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/fx_minifier.cpython-310.pyc
ADDED
|
Binary file (13.4 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/make_functional.cpython-310.pyc
ADDED
|
Binary file (21.3 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/pyfunctorch.cpython-310.pyc
ADDED
|
Binary file (8.88 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/python_key.cpython-310.pyc
ADDED
|
Binary file (375 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/_functorch/__pycache__/vmap.cpython-310.pyc
ADDED
|
Binary file (12.3 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__init__.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: allow-untyped-defs
|
| 2 |
+
import warnings
|
| 3 |
+
|
| 4 |
+
from contextlib import contextmanager
|
| 5 |
+
from typing import Any, Iterator
|
| 6 |
+
|
| 7 |
+
import torch._C
|
| 8 |
+
|
| 9 |
+
# These are imported so users can access them from the `torch.jit` module
|
| 10 |
+
from torch._jit_internal import (
|
| 11 |
+
_Await,
|
| 12 |
+
_drop,
|
| 13 |
+
_IgnoreContextManager,
|
| 14 |
+
_isinstance,
|
| 15 |
+
_overload,
|
| 16 |
+
_overload_method,
|
| 17 |
+
export,
|
| 18 |
+
Final,
|
| 19 |
+
Future,
|
| 20 |
+
ignore,
|
| 21 |
+
is_scripting,
|
| 22 |
+
unused,
|
| 23 |
+
)
|
| 24 |
+
from torch.jit._async import fork, wait
|
| 25 |
+
from torch.jit._await import _awaitable, _awaitable_nowait, _awaitable_wait
|
| 26 |
+
from torch.jit._decomposition_utils import _register_decomposition
|
| 27 |
+
from torch.jit._freeze import freeze, optimize_for_inference, run_frozen_optimizations
|
| 28 |
+
from torch.jit._fuser import (
|
| 29 |
+
fuser,
|
| 30 |
+
last_executed_optimized_graph,
|
| 31 |
+
optimized_execution,
|
| 32 |
+
set_fusion_strategy,
|
| 33 |
+
)
|
| 34 |
+
from torch.jit._ir_utils import _InsertPoint
|
| 35 |
+
from torch.jit._script import (
|
| 36 |
+
_ScriptProfile,
|
| 37 |
+
_unwrap_optional,
|
| 38 |
+
Attribute,
|
| 39 |
+
CompilationUnit,
|
| 40 |
+
interface,
|
| 41 |
+
RecursiveScriptClass,
|
| 42 |
+
RecursiveScriptModule,
|
| 43 |
+
script,
|
| 44 |
+
script_method,
|
| 45 |
+
ScriptFunction,
|
| 46 |
+
ScriptModule,
|
| 47 |
+
ScriptWarning,
|
| 48 |
+
)
|
| 49 |
+
from torch.jit._serialization import (
|
| 50 |
+
jit_module_from_flatbuffer,
|
| 51 |
+
load,
|
| 52 |
+
save,
|
| 53 |
+
save_jit_module_to_flatbuffer,
|
| 54 |
+
)
|
| 55 |
+
from torch.jit._trace import (
|
| 56 |
+
_flatten,
|
| 57 |
+
_get_trace_graph,
|
| 58 |
+
_script_if_tracing,
|
| 59 |
+
_unique_state_dict,
|
| 60 |
+
is_tracing,
|
| 61 |
+
ONNXTracedModule,
|
| 62 |
+
TopLevelTracedModule,
|
| 63 |
+
trace,
|
| 64 |
+
trace_module,
|
| 65 |
+
TracedModule,
|
| 66 |
+
TracerWarning,
|
| 67 |
+
TracingCheckError,
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
from torch.utils import set_module
|
| 71 |
+
|
| 72 |
+
__all__ = [
|
| 73 |
+
"Attribute",
|
| 74 |
+
"CompilationUnit",
|
| 75 |
+
"Error",
|
| 76 |
+
"Future",
|
| 77 |
+
"ScriptFunction",
|
| 78 |
+
"ScriptModule",
|
| 79 |
+
"annotate",
|
| 80 |
+
"enable_onednn_fusion",
|
| 81 |
+
"export",
|
| 82 |
+
"export_opnames",
|
| 83 |
+
"fork",
|
| 84 |
+
"freeze",
|
| 85 |
+
"interface",
|
| 86 |
+
"ignore",
|
| 87 |
+
"isinstance",
|
| 88 |
+
"load",
|
| 89 |
+
"onednn_fusion_enabled",
|
| 90 |
+
"optimize_for_inference",
|
| 91 |
+
"save",
|
| 92 |
+
"script",
|
| 93 |
+
"script_if_tracing",
|
| 94 |
+
"set_fusion_strategy",
|
| 95 |
+
"strict_fusion",
|
| 96 |
+
"trace",
|
| 97 |
+
"trace_module",
|
| 98 |
+
"unused",
|
| 99 |
+
"wait",
|
| 100 |
+
]
|
| 101 |
+
|
| 102 |
+
# For backwards compatibility
|
| 103 |
+
_fork = fork
|
| 104 |
+
_wait = wait
|
| 105 |
+
_set_fusion_strategy = set_fusion_strategy
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def export_opnames(m):
|
| 109 |
+
r"""
|
| 110 |
+
Generate new bytecode for a Script module.
|
| 111 |
+
|
| 112 |
+
Returns what the op list would be for a Script Module based off the current code base.
|
| 113 |
+
|
| 114 |
+
If you have a LiteScriptModule and want to get the currently present
|
| 115 |
+
list of ops call _export_operator_list instead.
|
| 116 |
+
"""
|
| 117 |
+
return torch._C._export_opnames(m._c)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
# torch.jit.Error
|
| 121 |
+
Error = torch._C.JITException
|
| 122 |
+
set_module(Error, "torch.jit")
|
| 123 |
+
# This is not perfect but works in common cases
|
| 124 |
+
Error.__name__ = "Error"
|
| 125 |
+
Error.__qualname__ = "Error"
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
# for use in python if using annotate
|
| 129 |
+
def annotate(the_type, the_value):
|
| 130 |
+
"""Use to give type of `the_value` in TorchScript compiler.
|
| 131 |
+
|
| 132 |
+
This method is a pass-through function that returns `the_value`, used to hint TorchScript
|
| 133 |
+
compiler the type of `the_value`. It is a no-op when running outside of TorchScript.
|
| 134 |
+
|
| 135 |
+
Though TorchScript can infer correct type for most Python expressions, there are some cases where
|
| 136 |
+
type inference can be wrong, including:
|
| 137 |
+
|
| 138 |
+
- Empty containers like `[]` and `{}`, which TorchScript assumes to be container of `Tensor`
|
| 139 |
+
- Optional types like `Optional[T]` but assigned a valid value of type `T`, TorchScript would assume
|
| 140 |
+
it is type `T` rather than `Optional[T]`
|
| 141 |
+
|
| 142 |
+
Note that `annotate()` does not help in `__init__` method of `torch.nn.Module` subclasses because it
|
| 143 |
+
is executed in eager mode. To annotate types of `torch.nn.Module` attributes,
|
| 144 |
+
use :meth:`~torch.jit.Attribute` instead.
|
| 145 |
+
|
| 146 |
+
Example:
|
| 147 |
+
|
| 148 |
+
.. testcode::
|
| 149 |
+
|
| 150 |
+
import torch
|
| 151 |
+
from typing import Dict
|
| 152 |
+
|
| 153 |
+
@torch.jit.script
|
| 154 |
+
def fn():
|
| 155 |
+
# Telling TorchScript that this empty dictionary is a (str -> int) dictionary
|
| 156 |
+
# instead of default dictionary type of (str -> Tensor).
|
| 157 |
+
d = torch.jit.annotate(Dict[str, int], {})
|
| 158 |
+
|
| 159 |
+
# Without `torch.jit.annotate` above, following statement would fail because of
|
| 160 |
+
# type mismatch.
|
| 161 |
+
d["name"] = 20
|
| 162 |
+
|
| 163 |
+
.. testcleanup::
|
| 164 |
+
|
| 165 |
+
del fn
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
the_type: Python type that should be passed to TorchScript compiler as type hint for `the_value`
|
| 169 |
+
the_value: Value or expression to hint type for.
|
| 170 |
+
|
| 171 |
+
Returns:
|
| 172 |
+
`the_value` is passed back as return value.
|
| 173 |
+
"""
|
| 174 |
+
return the_value
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def script_if_tracing(fn):
|
| 178 |
+
"""
|
| 179 |
+
Compiles ``fn`` when it is first called during tracing.
|
| 180 |
+
|
| 181 |
+
``torch.jit.script`` has a non-negligible start up time when it is first called due to
|
| 182 |
+
lazy-initializations of many compiler builtins. Therefore you should not use
|
| 183 |
+
it in library code. However, you may want to have parts of your library work
|
| 184 |
+
in tracing even if they use control flow. In these cases, you should use
|
| 185 |
+
``@torch.jit.script_if_tracing`` to substitute for
|
| 186 |
+
``torch.jit.script``.
|
| 187 |
+
|
| 188 |
+
Args:
|
| 189 |
+
fn: A function to compile.
|
| 190 |
+
|
| 191 |
+
Returns:
|
| 192 |
+
If called during tracing, a :class:`ScriptFunction` created by `torch.jit.script` is returned.
|
| 193 |
+
Otherwise, the original function `fn` is returned.
|
| 194 |
+
"""
|
| 195 |
+
return _script_if_tracing(fn)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
# for torch.jit.isinstance
|
| 199 |
+
def isinstance(obj, target_type):
|
| 200 |
+
"""
|
| 201 |
+
Provide container type refinement in TorchScript.
|
| 202 |
+
|
| 203 |
+
It can refine parameterized containers of the List, Dict, Tuple, and Optional types. E.g. ``List[str]``,
|
| 204 |
+
``Dict[str, List[torch.Tensor]]``, ``Optional[Tuple[int,str,int]]``. It can also
|
| 205 |
+
refine basic types such as bools and ints that are available in TorchScript.
|
| 206 |
+
|
| 207 |
+
Args:
|
| 208 |
+
obj: object to refine the type of
|
| 209 |
+
target_type: type to try to refine obj to
|
| 210 |
+
Returns:
|
| 211 |
+
``bool``: True if obj was successfully refined to the type of target_type,
|
| 212 |
+
False otherwise with no new type refinement
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
Example (using ``torch.jit.isinstance`` for type refinement):
|
| 216 |
+
.. testcode::
|
| 217 |
+
|
| 218 |
+
import torch
|
| 219 |
+
from typing import Any, Dict, List
|
| 220 |
+
|
| 221 |
+
class MyModule(torch.nn.Module):
|
| 222 |
+
def __init__(self):
|
| 223 |
+
super().__init__()
|
| 224 |
+
|
| 225 |
+
def forward(self, input: Any): # note the Any type
|
| 226 |
+
if torch.jit.isinstance(input, List[torch.Tensor]):
|
| 227 |
+
for t in input:
|
| 228 |
+
y = t.clamp(0, 0.5)
|
| 229 |
+
elif torch.jit.isinstance(input, Dict[str, str]):
|
| 230 |
+
for val in input.values():
|
| 231 |
+
print(val)
|
| 232 |
+
|
| 233 |
+
m = torch.jit.script(MyModule())
|
| 234 |
+
x = [torch.rand(3,3), torch.rand(4,3)]
|
| 235 |
+
m(x)
|
| 236 |
+
y = {"key1":"val1","key2":"val2"}
|
| 237 |
+
m(y)
|
| 238 |
+
"""
|
| 239 |
+
return _isinstance(obj, target_type)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
class strict_fusion:
|
| 243 |
+
"""
|
| 244 |
+
Give errors if not all nodes have been fused in inference, or symbolically differentiated in training.
|
| 245 |
+
|
| 246 |
+
Example:
|
| 247 |
+
Forcing fusion of additions.
|
| 248 |
+
|
| 249 |
+
.. code-block:: python
|
| 250 |
+
|
| 251 |
+
@torch.jit.script
|
| 252 |
+
def foo(x):
|
| 253 |
+
with torch.jit.strict_fusion():
|
| 254 |
+
return x + x + x
|
| 255 |
+
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def __init__(self):
|
| 259 |
+
if not torch._jit_internal.is_scripting():
|
| 260 |
+
warnings.warn("Only works in script mode")
|
| 261 |
+
pass
|
| 262 |
+
|
| 263 |
+
def __enter__(self):
|
| 264 |
+
pass
|
| 265 |
+
|
| 266 |
+
def __exit__(self, type: Any, value: Any, tb: Any) -> None:
|
| 267 |
+
pass
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
# Context manager for globally hiding source ranges when printing graphs.
|
| 271 |
+
# Note that these functions are exposed to Python as static members of the
|
| 272 |
+
# Graph class, so mypy checks need to be skipped.
|
| 273 |
+
@contextmanager
|
| 274 |
+
def _hide_source_ranges() -> Iterator[None]:
|
| 275 |
+
old_enable_source_ranges = torch._C.Graph.global_print_source_ranges # type: ignore[attr-defined]
|
| 276 |
+
try:
|
| 277 |
+
torch._C.Graph.set_global_print_source_ranges(False) # type: ignore[attr-defined]
|
| 278 |
+
yield
|
| 279 |
+
finally:
|
| 280 |
+
torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges) # type: ignore[attr-defined]
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
def enable_onednn_fusion(enabled: bool):
|
| 284 |
+
"""Enable or disables onednn JIT fusion based on the parameter `enabled`."""
|
| 285 |
+
torch._C._jit_set_llga_enabled(enabled)
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def onednn_fusion_enabled():
|
| 289 |
+
"""Return whether onednn JIT fusion is enabled."""
|
| 290 |
+
return torch._C._jit_llga_enabled()
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
del Any
|
| 294 |
+
|
| 295 |
+
if not torch._C._jit_init():
|
| 296 |
+
raise RuntimeError("JIT initialization failed")
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_async.cpython-310.pyc
ADDED
|
Binary file (4.08 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_await.cpython-310.pyc
ADDED
|
Binary file (1.1 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_builtins.cpython-310.pyc
ADDED
|
Binary file (5.49 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_check.cpython-310.pyc
ADDED
|
Binary file (6.36 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_dataclass_impls.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_decomposition_utils.cpython-310.pyc
ADDED
|
Binary file (628 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_decompositions.cpython-310.pyc
ADDED
|
Binary file (3.16 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_freeze.cpython-310.pyc
ADDED
|
Binary file (9.35 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_fuser.cpython-310.pyc
ADDED
|
Binary file (5.28 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_ir_utils.cpython-310.pyc
ADDED
|
Binary file (1.19 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_logging.cpython-310.pyc
ADDED
|
Binary file (387 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_monkeytype_config.cpython-310.pyc
ADDED
|
Binary file (6.95 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_pickle.cpython-310.pyc
ADDED
|
Binary file (857 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_recursive.cpython-310.pyc
ADDED
|
Binary file (26.3 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_script.cpython-310.pyc
ADDED
|
Binary file (51.4 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_serialization.cpython-310.pyc
ADDED
|
Binary file (8.91 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_shape_functions.cpython-310.pyc
ADDED
|
Binary file (35.6 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/_state.cpython-310.pyc
ADDED
|
Binary file (3.87 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/annotations.cpython-310.pyc
ADDED
|
Binary file (13.6 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/frontend.cpython-310.pyc
ADDED
|
Binary file (35.5 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/generate_bytecode.cpython-310.pyc
ADDED
|
Binary file (1.29 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/quantized.cpython-310.pyc
ADDED
|
Binary file (4.23 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/supported_ops.cpython-310.pyc
ADDED
|
Binary file (8.12 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/__pycache__/unsupported_tensor_ops.cpython-310.pyc
ADDED
|
Binary file (2.32 kB). View file
|
|
|
valley/lib/python3.10/site-packages/torch/jit/_async.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: allow-untyped-defs
|
| 2 |
+
"""Async API.
|
| 3 |
+
|
| 4 |
+
This module contains the API for parallelism in TorchScript, notably:
|
| 5 |
+
* torch.jit.fork
|
| 6 |
+
* torch.jit.wait
|
| 7 |
+
|
| 8 |
+
This is not intended to be imported directly; please use the exposed
|
| 9 |
+
functionalities in `torch.jit`.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import torch
|
| 13 |
+
from torch._jit_internal import Future
|
| 14 |
+
from torch.jit._builtins import _register_builtin
|
| 15 |
+
|
| 16 |
+
from torch.utils import set_module
|
| 17 |
+
|
| 18 |
+
set_module(Future, "torch.jit")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def fork(func, *args, **kwargs):
|
| 22 |
+
r"""
|
| 23 |
+
Create an asynchronous task executing `func` and a reference to the value of the result of this execution.
|
| 24 |
+
|
| 25 |
+
`fork` will return immediately, so the return value of `func` may not have been computed yet. To force completion
|
| 26 |
+
of the task and access the return value invoke `torch.jit.wait` on the Future. `fork` invoked
|
| 27 |
+
with a `func` which returns `T` is typed as `torch.jit.Future[T]`. `fork` calls can be arbitrarily
|
| 28 |
+
nested, and may be invoked with positional and keyword arguments.
|
| 29 |
+
Asynchronous execution will only occur when run in TorchScript. If run in pure python,
|
| 30 |
+
`fork` will not execute in parallel. `fork` will also not execute in parallel when invoked
|
| 31 |
+
while tracing, however the `fork` and `wait` calls will be captured in the exported IR Graph.
|
| 32 |
+
|
| 33 |
+
.. warning::
|
| 34 |
+
`fork` tasks will execute non-deterministically. We recommend only spawning
|
| 35 |
+
parallel fork tasks for pure functions that do not modify their inputs,
|
| 36 |
+
module attributes, or global state.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
func (callable or torch.nn.Module): A Python function or `torch.nn.Module`
|
| 40 |
+
that will be invoked. If executed in TorchScript, it will execute asynchronously,
|
| 41 |
+
otherwise it will not. Traced invocations of fork will be captured in the IR.
|
| 42 |
+
``*args``, ``**kwargs``: arguments to invoke `func` with.
|
| 43 |
+
Returns:
|
| 44 |
+
`torch.jit.Future[T]`: a reference to the execution of `func`. The value `T`
|
| 45 |
+
can only be accessed by forcing completion of `func` through `torch.jit.wait`.
|
| 46 |
+
|
| 47 |
+
Example (fork a free function):
|
| 48 |
+
|
| 49 |
+
.. code-block:: python
|
| 50 |
+
|
| 51 |
+
import torch
|
| 52 |
+
from torch import Tensor
|
| 53 |
+
def foo(a : Tensor, b : int) -> Tensor:
|
| 54 |
+
return a + b
|
| 55 |
+
def bar(a):
|
| 56 |
+
fut : torch.jit.Future[Tensor] = torch.jit.fork(foo, a, b=2)
|
| 57 |
+
return torch.jit.wait(fut)
|
| 58 |
+
script_bar = torch.jit.script(bar)
|
| 59 |
+
input = torch.tensor(2)
|
| 60 |
+
# only the scripted version executes asynchronously
|
| 61 |
+
assert script_bar(input) == bar(input)
|
| 62 |
+
# trace is not run asynchronously, but fork is captured in IR
|
| 63 |
+
graph = torch.jit.trace(bar, (input,)).graph
|
| 64 |
+
assert "fork" in str(graph)
|
| 65 |
+
|
| 66 |
+
Example (fork a module method):
|
| 67 |
+
|
| 68 |
+
.. code-block:: python
|
| 69 |
+
|
| 70 |
+
import torch
|
| 71 |
+
from torch import Tensor
|
| 72 |
+
class AddMod(torch.nn.Module):
|
| 73 |
+
def forward(self, a: Tensor, b : int):
|
| 74 |
+
return a + b
|
| 75 |
+
class Mod(torch.nn.Module):
|
| 76 |
+
def __init__(self):
|
| 77 |
+
super(self).__init__()
|
| 78 |
+
self.mod = AddMod()
|
| 79 |
+
def forward(self, input):
|
| 80 |
+
fut = torch.jit.fork(self.mod, a, b=2)
|
| 81 |
+
return torch.jit.wait(fut)
|
| 82 |
+
input = torch.tensor(2)
|
| 83 |
+
mod = Mod()
|
| 84 |
+
assert mod(input) == torch.jit.script(mod).forward(input)
|
| 85 |
+
"""
|
| 86 |
+
return torch._C.fork(func, *args, **kwargs)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def wait(future):
|
| 90 |
+
r"""
|
| 91 |
+
Force completion of a `torch.jit.Future[T]` asynchronous task, returning the result of the task.
|
| 92 |
+
|
| 93 |
+
See :func:`~fork` for docs and examples.
|
| 94 |
+
Args:
|
| 95 |
+
future (torch.jit.Future[T]): an asynchronous task reference, created through `torch.jit.fork`
|
| 96 |
+
Returns:
|
| 97 |
+
`T`: the return value of the completed task
|
| 98 |
+
"""
|
| 99 |
+
return torch._C.wait(future)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
_register_builtin(wait, "aten::wait")
|