diff --git a/.gitattributes b/.gitattributes index b18fdc81a4eb68cf86d34d18c65dc0ad1b30b003..7526ca530986b7539d6235d229ebb6f16fa982ef 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1380,3 +1380,6 @@ evalkit_tf446/lib/python3.10/site-packages/scipy/io/matlab/_mio5_utils.cpython-3 evalkit_tf446/lib/python3.10/site-packages/scipy/io/matlab/_streams.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text evalkit_tf446/lib/python3.10/site-packages/scipy/stats/__pycache__/_mstats_basic.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text deepseek/bin/lzma filter=lfs diff=lfs merge=lfs -text +evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_moduleTNC.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +evalkit_tf446/lib/python3.10/site-packages/scipy/special/tests/__pycache__/test_basic.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_lbfgsb.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text diff --git a/deepseek/lib/python3.10/importlib/metadata/__pycache__/_text.cpython-310.pyc b/deepseek/lib/python3.10/importlib/metadata/__pycache__/_text.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88f7565310c100896a9e98f723a7cdb10cd001d5 Binary files /dev/null and b/deepseek/lib/python3.10/importlib/metadata/__pycache__/_text.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ddd37ad861892ca8ec7273b2e055c55e93f88eaf --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/RECORD @@ -0,0 +1,20 @@ +attrdict-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrdict-2.0.1.dist-info/LICENSE.txt,sha256=7bXwDR-EXRD9ybjGNRq4IPk_ZEm3aWev8xkme2Fb4k4,1066 +attrdict-2.0.1.dist-info/METADATA,sha256=qp1NCkW6JHByrhJOt9Pt6JuS45qvdtTxRWIf0jdIGlA,6690 +attrdict-2.0.1.dist-info/RECORD,, +attrdict-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrdict-2.0.1.dist-info/WHEEL,sha256=_wJFdOYk7i3xxT8ElOkUJvOdOvfNGbR9g-bf6UQT6sU,110 +attrdict-2.0.1.dist-info/top_level.txt,sha256=2f1-Wyfr5ZHsGvOFLqcj3y6OfZglxI3gjETO12COZRc,9 +attrdict-2.0.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +attrdict/__init__.py,sha256=fdJfkB3hQK2tcqck7FSmCKjyzmjx29Z3MBQyAev43E0,267 +attrdict/__pycache__/__init__.cpython-310.pyc,, +attrdict/__pycache__/default.cpython-310.pyc,, +attrdict/__pycache__/dictionary.cpython-310.pyc,, +attrdict/__pycache__/mapping.cpython-310.pyc,, +attrdict/__pycache__/merge.cpython-310.pyc,, +attrdict/__pycache__/mixins.cpython-310.pyc,, +attrdict/default.py,sha256=dpolSpC0J185AIAG75E0Sm6fqOkp-7hssH-x1u6v8co,3540 +attrdict/dictionary.py,sha256=EjolfMd-kzn5K009pTx2Mr_O4OCEPEg-57Z_6-Lsixw,1462 +attrdict/mapping.py,sha256=QGEy-z-3O3OnPXBB9XgE7WXlWtM_nheQwBaqmSfmfRs,2464 +attrdict/merge.py,sha256=ffljqIQ1fKRWUOcLRtoZnDzZp9h06aYmGFGYz66-wlY,1083 +attrdict/mixins.py,sha256=Jya5crO7NsUbZJYVMUnZvc2QjxpSWDUFoIEq7xcMd6U,6624 diff --git a/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..c4bde30377756381c5d37d2ed1a082566a715c9b --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.3) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/top_level.txt b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f182865577508cd3a69aa35d907fcaf5f850a16 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/attrdict-2.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +attrdict diff --git a/deepseek/lib/python3.10/site-packages/click-8.1.7.dist-info/top_level.txt b/deepseek/lib/python3.10/site-packages/click-8.1.7.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..dca9a909647e3b066931de2909c2d1e65c78c995 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/click-8.1.7.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/deepseek/lib/python3.10/site-packages/cv2/detail/__init__.pyi b/deepseek/lib/python3.10/site-packages/cv2/detail/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..71917cce8679bbfddc13b7fcc650b713f12d9251 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/cv2/detail/__init__.pyi @@ -0,0 +1,600 @@ +__all__: list[str] = [] + +import cv2 +import cv2.gapi +import cv2.gapi.ie +import cv2.gapi.onnx +import cv2.gapi.ov +import cv2.typing +import numpy +import typing as _typing + + +# Enumerations +TEST_CUSTOM: int +TEST_EQ: int +TEST_NE: int +TEST_LE: int +TEST_LT: int +TEST_GE: int +TEST_GT: int +TestOp = int +"""One of [TEST_CUSTOM, TEST_EQ, TEST_NE, TEST_LE, TEST_LT, TEST_GE, TEST_GT]""" + +WAVE_CORRECT_HORIZ: int +WAVE_CORRECT_VERT: int +WAVE_CORRECT_AUTO: int +WaveCorrectKind = int +"""One of [WAVE_CORRECT_HORIZ, WAVE_CORRECT_VERT, WAVE_CORRECT_AUTO]""" + +OpaqueKind_CV_UNKNOWN: int +OPAQUE_KIND_CV_UNKNOWN: int +OpaqueKind_CV_BOOL: int +OPAQUE_KIND_CV_BOOL: int +OpaqueKind_CV_INT: int +OPAQUE_KIND_CV_INT: int +OpaqueKind_CV_INT64: int +OPAQUE_KIND_CV_INT64: int +OpaqueKind_CV_DOUBLE: int +OPAQUE_KIND_CV_DOUBLE: int +OpaqueKind_CV_FLOAT: int +OPAQUE_KIND_CV_FLOAT: int +OpaqueKind_CV_UINT64: int +OPAQUE_KIND_CV_UINT64: int +OpaqueKind_CV_STRING: int +OPAQUE_KIND_CV_STRING: int +OpaqueKind_CV_POINT: int +OPAQUE_KIND_CV_POINT: int +OpaqueKind_CV_POINT2F: int +OPAQUE_KIND_CV_POINT2F: int +OpaqueKind_CV_POINT3F: int +OPAQUE_KIND_CV_POINT3F: int +OpaqueKind_CV_SIZE: int +OPAQUE_KIND_CV_SIZE: int +OpaqueKind_CV_RECT: int +OPAQUE_KIND_CV_RECT: int +OpaqueKind_CV_SCALAR: int +OPAQUE_KIND_CV_SCALAR: int +OpaqueKind_CV_MAT: int +OPAQUE_KIND_CV_MAT: int +OpaqueKind_CV_DRAW_PRIM: int +OPAQUE_KIND_CV_DRAW_PRIM: int +OpaqueKind = int +"""One of [OpaqueKind_CV_UNKNOWN, OPAQUE_KIND_CV_UNKNOWN, OpaqueKind_CV_BOOL, OPAQUE_KIND_CV_BOOL, OpaqueKind_CV_INT, OPAQUE_KIND_CV_INT, OpaqueKind_CV_INT64, OPAQUE_KIND_CV_INT64, OpaqueKind_CV_DOUBLE, OPAQUE_KIND_CV_DOUBLE, OpaqueKind_CV_FLOAT, OPAQUE_KIND_CV_FLOAT, OpaqueKind_CV_UINT64, OPAQUE_KIND_CV_UINT64, OpaqueKind_CV_STRING, OPAQUE_KIND_CV_STRING, OpaqueKind_CV_POINT, OPAQUE_KIND_CV_POINT, OpaqueKind_CV_POINT2F, OPAQUE_KIND_CV_POINT2F, OpaqueKind_CV_POINT3F, OPAQUE_KIND_CV_POINT3F, OpaqueKind_CV_SIZE, OPAQUE_KIND_CV_SIZE, OpaqueKind_CV_RECT, OPAQUE_KIND_CV_RECT, OpaqueKind_CV_SCALAR, OPAQUE_KIND_CV_SCALAR, OpaqueKind_CV_MAT, OPAQUE_KIND_CV_MAT, OpaqueKind_CV_DRAW_PRIM, OPAQUE_KIND_CV_DRAW_PRIM]""" + +ArgKind_OPAQUE_VAL: int +ARG_KIND_OPAQUE_VAL: int +ArgKind_OPAQUE: int +ARG_KIND_OPAQUE: int +ArgKind_GOBJREF: int +ARG_KIND_GOBJREF: int +ArgKind_GMAT: int +ARG_KIND_GMAT: int +ArgKind_GMATP: int +ARG_KIND_GMATP: int +ArgKind_GFRAME: int +ARG_KIND_GFRAME: int +ArgKind_GSCALAR: int +ARG_KIND_GSCALAR: int +ArgKind_GARRAY: int +ARG_KIND_GARRAY: int +ArgKind_GOPAQUE: int +ARG_KIND_GOPAQUE: int +ArgKind = int +"""One of [ArgKind_OPAQUE_VAL, ARG_KIND_OPAQUE_VAL, ArgKind_OPAQUE, ARG_KIND_OPAQUE, ArgKind_GOBJREF, ARG_KIND_GOBJREF, ArgKind_GMAT, ARG_KIND_GMAT, ArgKind_GMATP, ARG_KIND_GMATP, ArgKind_GFRAME, ARG_KIND_GFRAME, ArgKind_GSCALAR, ARG_KIND_GSCALAR, ArgKind_GARRAY, ARG_KIND_GARRAY, ArgKind_GOPAQUE, ARG_KIND_GOPAQUE]""" + + +Blender_NO: int +BLENDER_NO: int +Blender_FEATHER: int +BLENDER_FEATHER: int +Blender_MULTI_BAND: int +BLENDER_MULTI_BAND: int + +ExposureCompensator_NO: int +EXPOSURE_COMPENSATOR_NO: int +ExposureCompensator_GAIN: int +EXPOSURE_COMPENSATOR_GAIN: int +ExposureCompensator_GAIN_BLOCKS: int +EXPOSURE_COMPENSATOR_GAIN_BLOCKS: int +ExposureCompensator_CHANNELS: int +EXPOSURE_COMPENSATOR_CHANNELS: int +ExposureCompensator_CHANNELS_BLOCKS: int +EXPOSURE_COMPENSATOR_CHANNELS_BLOCKS: int + +SeamFinder_NO: int +SEAM_FINDER_NO: int +SeamFinder_VORONOI_SEAM: int +SEAM_FINDER_VORONOI_SEAM: int +SeamFinder_DP_SEAM: int +SEAM_FINDER_DP_SEAM: int + +DpSeamFinder_COLOR: int +DP_SEAM_FINDER_COLOR: int +DpSeamFinder_COLOR_GRAD: int +DP_SEAM_FINDER_COLOR_GRAD: int +DpSeamFinder_CostFunction = int +"""One of [DpSeamFinder_COLOR, DP_SEAM_FINDER_COLOR, DpSeamFinder_COLOR_GRAD, DP_SEAM_FINDER_COLOR_GRAD]""" + +Timelapser_AS_IS: int +TIMELAPSER_AS_IS: int +Timelapser_CROP: int +TIMELAPSER_CROP: int + +GraphCutSeamFinderBase_COST_COLOR: int +GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR: int +GraphCutSeamFinderBase_COST_COLOR_GRAD: int +GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR_GRAD: int +GraphCutSeamFinderBase_CostType = int +"""One of [GraphCutSeamFinderBase_COST_COLOR, GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR, GraphCutSeamFinderBase_COST_COLOR_GRAD, GRAPH_CUT_SEAM_FINDER_BASE_COST_COLOR_GRAD]""" + +TrackerSamplerCSC_MODE_INIT_POS: int +TRACKER_SAMPLER_CSC_MODE_INIT_POS: int +TrackerSamplerCSC_MODE_INIT_NEG: int +TRACKER_SAMPLER_CSC_MODE_INIT_NEG: int +TrackerSamplerCSC_MODE_TRACK_POS: int +TRACKER_SAMPLER_CSC_MODE_TRACK_POS: int +TrackerSamplerCSC_MODE_TRACK_NEG: int +TRACKER_SAMPLER_CSC_MODE_TRACK_NEG: int +TrackerSamplerCSC_MODE_DETECT: int +TRACKER_SAMPLER_CSC_MODE_DETECT: int +TrackerSamplerCSC_MODE = int +"""One of [TrackerSamplerCSC_MODE_INIT_POS, TRACKER_SAMPLER_CSC_MODE_INIT_POS, TrackerSamplerCSC_MODE_INIT_NEG, TRACKER_SAMPLER_CSC_MODE_INIT_NEG, TrackerSamplerCSC_MODE_TRACK_POS, TRACKER_SAMPLER_CSC_MODE_TRACK_POS, TrackerSamplerCSC_MODE_TRACK_NEG, TRACKER_SAMPLER_CSC_MODE_TRACK_NEG, TrackerSamplerCSC_MODE_DETECT, TRACKER_SAMPLER_CSC_MODE_DETECT]""" + + +# Classes +class Blender: + # Functions + @classmethod + def createDefault(cls, type: int, try_gpu: bool = ...) -> Blender: ... + + @_typing.overload + def prepare(self, corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> None: ... + @_typing.overload + def prepare(self, dst_roi: cv2.typing.Rect) -> None: ... + + @_typing.overload + def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ... + @_typing.overload + def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ... + + @_typing.overload + def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ... + @_typing.overload + def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ... + + +class FeatherBlender(Blender): + # Functions + def __init__(self, sharpness: float = ...) -> None: ... + + def sharpness(self) -> float: ... + + def setSharpness(self, val: float) -> None: ... + + def prepare(self, dst_roi: cv2.typing.Rect) -> None: ... + + @_typing.overload + def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ... + @_typing.overload + def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ... + + @_typing.overload + def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ... + @_typing.overload + def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ... + + def createWeightMaps(self, masks: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], weight_maps: _typing.Sequence[cv2.UMat]) -> tuple[cv2.typing.Rect, _typing.Sequence[cv2.UMat]]: ... + + +class MultiBandBlender(Blender): + # Functions + def __init__(self, try_gpu: int = ..., num_bands: int = ..., weight_type: int = ...) -> None: ... + + def numBands(self) -> int: ... + + def setNumBands(self, val: int) -> None: ... + + def prepare(self, dst_roi: cv2.typing.Rect) -> None: ... + + @_typing.overload + def feed(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ... + @_typing.overload + def feed(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ... + + @_typing.overload + def blend(self, dst: cv2.typing.MatLike, dst_mask: cv2.typing.MatLike) -> tuple[cv2.typing.MatLike, cv2.typing.MatLike]: ... + @_typing.overload + def blend(self, dst: cv2.UMat, dst_mask: cv2.UMat) -> tuple[cv2.UMat, cv2.UMat]: ... + + +class CameraParams: + focal: float + aspect: float + ppx: float + ppy: float + R: cv2.typing.MatLike + t: cv2.typing.MatLike + + # Functions + def K(self) -> cv2.typing.MatLike: ... + + +class ExposureCompensator: + # Functions + @classmethod + def createDefault(cls, type: int) -> ExposureCompensator: ... + + def feed(self, corners: _typing.Sequence[cv2.typing.Point], images: _typing.Sequence[cv2.UMat], masks: _typing.Sequence[cv2.UMat]) -> None: ... + + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, arg1: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, arg1: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + def setUpdateGain(self, b: bool) -> None: ... + + def getUpdateGain(self) -> bool: ... + + +class NoExposureCompensator(ExposureCompensator): + # Functions + @_typing.overload + def apply(self, arg1: int, arg2: cv2.typing.Point, arg3: cv2.typing.MatLike, arg4: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, arg1: int, arg2: cv2.typing.Point, arg3: cv2.UMat, arg4: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + +class GainCompensator(ExposureCompensator): + # Functions + @_typing.overload + def __init__(self) -> None: ... + @_typing.overload + def __init__(self, nr_feeds: int) -> None: ... + + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + def setNrFeeds(self, nr_feeds: int) -> None: ... + + def getNrFeeds(self) -> int: ... + + def setSimilarityThreshold(self, similarity_threshold: float) -> None: ... + + def getSimilarityThreshold(self) -> float: ... + + +class ChannelsCompensator(ExposureCompensator): + # Functions + def __init__(self, nr_feeds: int = ...) -> None: ... + + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + def setNrFeeds(self, nr_feeds: int) -> None: ... + + def getNrFeeds(self) -> int: ... + + def setSimilarityThreshold(self, similarity_threshold: float) -> None: ... + + def getSimilarityThreshold(self) -> float: ... + + +class BlocksCompensator(ExposureCompensator): + # Functions + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + def setNrFeeds(self, nr_feeds: int) -> None: ... + + def getNrFeeds(self) -> int: ... + + def setSimilarityThreshold(self, similarity_threshold: float) -> None: ... + + def getSimilarityThreshold(self) -> float: ... + + @_typing.overload + def setBlockSize(self, width: int, height: int) -> None: ... + @_typing.overload + def setBlockSize(self, size: cv2.typing.Size) -> None: ... + + def getBlockSize(self) -> cv2.typing.Size: ... + + def setNrGainsFilteringIterations(self, nr_iterations: int) -> None: ... + + def getNrGainsFilteringIterations(self) -> int: ... + + +class BlocksGainCompensator(BlocksCompensator): + # Functions + @_typing.overload + def __init__(self, bl_width: int = ..., bl_height: int = ...) -> None: ... + @_typing.overload + def __init__(self, bl_width: int, bl_height: int, nr_feeds: int) -> None: ... + + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.typing.MatLike, mask: cv2.typing.MatLike) -> cv2.typing.MatLike: ... + @_typing.overload + def apply(self, index: int, corner: cv2.typing.Point, image: cv2.UMat, mask: cv2.UMat) -> cv2.UMat: ... + + def getMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[cv2.typing.MatLike]: ... + + def setMatGains(self, umv: _typing.Sequence[cv2.typing.MatLike]) -> None: ... + + +class BlocksChannelsCompensator(BlocksCompensator): + # Functions + def __init__(self, bl_width: int = ..., bl_height: int = ..., nr_feeds: int = ...) -> None: ... + + +class ImageFeatures: + img_idx: int + img_size: cv2.typing.Size + keypoints: _typing.Sequence[cv2.KeyPoint] + descriptors: cv2.UMat + + # Functions + def getKeypoints(self) -> _typing.Sequence[cv2.KeyPoint]: ... + + +class MatchesInfo: + src_img_idx: int + dst_img_idx: int + matches: _typing.Sequence[cv2.DMatch] + inliers_mask: numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]] + num_inliers: int + H: cv2.typing.MatLike + confidence: float + + # Functions + def getMatches(self) -> _typing.Sequence[cv2.DMatch]: ... + + def getInliers(self) -> numpy.ndarray[_typing.Any, numpy.dtype[numpy.uint8]]: ... + + +class FeaturesMatcher: + # Functions + def apply(self, features1: ImageFeatures, features2: ImageFeatures) -> MatchesInfo: ... + + def apply2(self, features: _typing.Sequence[ImageFeatures], mask: cv2.UMat | None = ...) -> _typing.Sequence[MatchesInfo]: ... + + def isThreadSafe(self) -> bool: ... + + def collectGarbage(self) -> None: ... + + +class BestOf2NearestMatcher(FeaturesMatcher): + # Functions + def __init__(self, try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ..., matches_confindece_thresh: float = ...) -> None: ... + + def collectGarbage(self) -> None: ... + + @classmethod + def create(cls, try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ..., matches_confindece_thresh: float = ...) -> BestOf2NearestMatcher: ... + + +class BestOf2NearestRangeMatcher(BestOf2NearestMatcher): + # Functions + def __init__(self, range_width: int = ..., try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ..., num_matches_thresh2: int = ...) -> None: ... + + +class AffineBestOf2NearestMatcher(BestOf2NearestMatcher): + # Functions + def __init__(self, full_affine: bool = ..., try_use_gpu: bool = ..., match_conf: float = ..., num_matches_thresh1: int = ...) -> None: ... + + +class Estimator: + # Functions + def apply(self, features: _typing.Sequence[ImageFeatures], pairwise_matches: _typing.Sequence[MatchesInfo], cameras: _typing.Sequence[CameraParams]) -> tuple[bool, _typing.Sequence[CameraParams]]: ... + + +class HomographyBasedEstimator(Estimator): + # Functions + def __init__(self, is_focals_estimated: bool = ...) -> None: ... + + +class AffineBasedEstimator(Estimator): + # Functions + def __init__(self) -> None: ... + + +class BundleAdjusterBase(Estimator): + # Functions + def refinementMask(self) -> cv2.typing.MatLike: ... + + def setRefinementMask(self, mask: cv2.typing.MatLike) -> None: ... + + def confThresh(self) -> float: ... + + def setConfThresh(self, conf_thresh: float) -> None: ... + + def termCriteria(self) -> cv2.typing.TermCriteria: ... + + def setTermCriteria(self, term_criteria: cv2.typing.TermCriteria) -> None: ... + + +class NoBundleAdjuster(BundleAdjusterBase): + # Functions + def __init__(self) -> None: ... + + +class BundleAdjusterReproj(BundleAdjusterBase): + # Functions + def __init__(self) -> None: ... + + +class BundleAdjusterRay(BundleAdjusterBase): + # Functions + def __init__(self) -> None: ... + + +class BundleAdjusterAffine(BundleAdjusterBase): + # Functions + def __init__(self) -> None: ... + + +class BundleAdjusterAffinePartial(BundleAdjusterBase): + # Functions + def __init__(self) -> None: ... + + +class SeamFinder: + # Functions + def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + + @classmethod + def createDefault(cls, type: int) -> SeamFinder: ... + + +class NoSeamFinder(SeamFinder): + # Functions + def find(self, arg1: _typing.Sequence[cv2.UMat], arg2: _typing.Sequence[cv2.typing.Point], arg3: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + + +class PairwiseSeamFinder(SeamFinder): + # Functions + def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + + +class VoronoiSeamFinder(PairwiseSeamFinder): + # Functions + def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + + +class DpSeamFinder(SeamFinder): + # Functions + def __init__(self, costFunc: str) -> None: ... + + def setCostFunction(self, val: str) -> None: ... + + +class GraphCutSeamFinder: + # Functions + def __init__(self, cost_type: str, terminal_cost: float = ..., bad_region_penalty: float = ...) -> None: ... + + def find(self, src: _typing.Sequence[cv2.UMat], corners: _typing.Sequence[cv2.typing.Point], masks: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + + +class Timelapser: + # Functions + @classmethod + def createDefault(cls, type: int) -> Timelapser: ... + + def initialize(self, corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> None: ... + + @_typing.overload + def process(self, img: cv2.typing.MatLike, mask: cv2.typing.MatLike, tl: cv2.typing.Point) -> None: ... + @_typing.overload + def process(self, img: cv2.UMat, mask: cv2.UMat, tl: cv2.typing.Point) -> None: ... + + def getDst(self) -> cv2.UMat: ... + + +class TimelapserCrop(Timelapser): + ... + +class ProjectorBase: + ... + +class SphericalProjector(ProjectorBase): + # Functions + def mapForward(self, x: float, y: float, u: float, v: float) -> None: ... + + def mapBackward(self, u: float, v: float, x: float, y: float) -> None: ... + + + +# Functions +def calibrateRotatingCamera(Hs: _typing.Sequence[cv2.typing.MatLike], K: cv2.typing.MatLike | None = ...) -> tuple[bool, cv2.typing.MatLike]: ... + +@_typing.overload +def computeImageFeatures(featuresFinder: cv2.Feature2D, images: _typing.Sequence[cv2.typing.MatLike], masks: _typing.Sequence[cv2.typing.MatLike] | None = ...) -> _typing.Sequence[ImageFeatures]: ... +@_typing.overload +def computeImageFeatures(featuresFinder: cv2.Feature2D, images: _typing.Sequence[cv2.UMat], masks: _typing.Sequence[cv2.UMat] | None = ...) -> _typing.Sequence[ImageFeatures]: ... + +@_typing.overload +def computeImageFeatures2(featuresFinder: cv2.Feature2D, image: cv2.typing.MatLike, mask: cv2.typing.MatLike | None = ...) -> ImageFeatures: ... +@_typing.overload +def computeImageFeatures2(featuresFinder: cv2.Feature2D, image: cv2.UMat, mask: cv2.UMat | None = ...) -> ImageFeatures: ... + +@_typing.overload +def createLaplacePyr(img: cv2.typing.MatLike, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... +@_typing.overload +def createLaplacePyr(img: cv2.UMat, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + +@_typing.overload +def createLaplacePyrGpu(img: cv2.typing.MatLike, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... +@_typing.overload +def createLaplacePyrGpu(img: cv2.UMat, num_levels: int, pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + +@_typing.overload +def createWeightMap(mask: cv2.typing.MatLike, sharpness: float, weight: cv2.typing.MatLike) -> cv2.typing.MatLike: ... +@_typing.overload +def createWeightMap(mask: cv2.UMat, sharpness: float, weight: cv2.UMat) -> cv2.UMat: ... + +def focalsFromHomography(H: cv2.typing.MatLike, f0: float, f1: float, f0_ok: bool, f1_ok: bool) -> None: ... + +def leaveBiggestComponent(features: _typing.Sequence[ImageFeatures], pairwise_matches: _typing.Sequence[MatchesInfo], conf_threshold: float) -> _typing.Sequence[int]: ... + +def matchesGraphAsString(paths: _typing.Sequence[str], pairwise_matches: _typing.Sequence[MatchesInfo], conf_threshold: float) -> str: ... + +@_typing.overload +def normalizeUsingWeightMap(weight: cv2.typing.MatLike, src: cv2.typing.MatLike) -> cv2.typing.MatLike: ... +@_typing.overload +def normalizeUsingWeightMap(weight: cv2.UMat, src: cv2.UMat) -> cv2.UMat: ... + +def overlapRoi(tl1: cv2.typing.Point, tl2: cv2.typing.Point, sz1: cv2.typing.Size, sz2: cv2.typing.Size, roi: cv2.typing.Rect) -> bool: ... + +def restoreImageFromLaplacePyr(pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + +def restoreImageFromLaplacePyrGpu(pyr: _typing.Sequence[cv2.UMat]) -> _typing.Sequence[cv2.UMat]: ... + +@_typing.overload +def resultRoi(corners: _typing.Sequence[cv2.typing.Point], images: _typing.Sequence[cv2.UMat]) -> cv2.typing.Rect: ... +@_typing.overload +def resultRoi(corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> cv2.typing.Rect: ... + +def resultRoiIntersection(corners: _typing.Sequence[cv2.typing.Point], sizes: _typing.Sequence[cv2.typing.Size]) -> cv2.typing.Rect: ... + +def resultTl(corners: _typing.Sequence[cv2.typing.Point]) -> cv2.typing.Point: ... + +def selectRandomSubset(count: int, size: int, subset: _typing.Sequence[int]) -> None: ... + +def stitchingLogLevel() -> int: ... + +@_typing.overload +def strip(params: cv2.gapi.ie.PyParams) -> cv2.gapi.GNetParam: ... +@_typing.overload +def strip(params: cv2.gapi.onnx.PyParams) -> cv2.gapi.GNetParam: ... +@_typing.overload +def strip(params: cv2.gapi.ov.PyParams) -> cv2.gapi.GNetParam: ... + +def waveCorrect(rmats: _typing.Sequence[cv2.typing.MatLike], kind: WaveCorrectKind) -> _typing.Sequence[cv2.typing.MatLike]: ... + + diff --git a/deepseek/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c2bfa2d465561fa84e4569eec558e76f716ac34 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/cv2/gapi/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi b/deepseek/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/cv2/gapi/core/ocl/__init__.pyi @@ -0,0 +1,9 @@ +__all__: list[str] = [] + +import cv2 + + +# Functions +def kernels() -> cv2.GKernelPackage: ... + + diff --git a/deepseek/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi b/deepseek/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..e9aa68c68a73a2a25a419d64f5781581931251ab --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/cv2/gapi/ie/detail/__init__.pyi @@ -0,0 +1,12 @@ +__all__: list[str] = [] + +ParamDesc_Kind_Load: int +PARAM_DESC_KIND_LOAD: int +ParamDesc_Kind_Import: int +PARAM_DESC_KIND_IMPORT: int +ParamDesc_Kind = int +"""One of [ParamDesc_Kind_Load, PARAM_DESC_KIND_LOAD, ParamDesc_Kind_Import, PARAM_DESC_KIND_IMPORT]""" + + +# Classes + diff --git a/deepseek/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi b/deepseek/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..b85ebb121e506c99a5bf55d46d4b61f31b62da80 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/cv2/gapi/ot/cpu/__init__.pyi @@ -0,0 +1,9 @@ +__all__: list[str] = [] + +import cv2 + + +# Functions +def kernels() -> cv2.GKernelPackage: ... + + diff --git a/deepseek/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi b/deepseek/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..9bc2c8683cd47aa3c2466a9c40832c7a04e464fb --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/cv2/gapi/ov/__init__.pyi @@ -0,0 +1,74 @@ +__all__: list[str] = [] + +import cv2.typing +import typing as _typing + + +# Classes +class PyParams: + # Functions + @_typing.overload + def __init__(self) -> None: ... + @_typing.overload + def __init__(self, tag: str, model_path: str, bin_path: str, device: str) -> None: ... + @_typing.overload + def __init__(self, tag: str, blob_path: str, device: str) -> None: ... + + def cfgPluginConfig(self, config: cv2.typing.map_string_and_string) -> PyParams: ... + + @_typing.overload + def cfgInputTensorLayout(self, tensor_layout: str) -> PyParams: ... + @_typing.overload + def cfgInputTensorLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ... + + @_typing.overload + def cfgInputModelLayout(self, tensor_layout: str) -> PyParams: ... + @_typing.overload + def cfgInputModelLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ... + + @_typing.overload + def cfgOutputTensorLayout(self, tensor_layout: str) -> PyParams: ... + @_typing.overload + def cfgOutputTensorLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ... + + @_typing.overload + def cfgOutputModelLayout(self, tensor_layout: str) -> PyParams: ... + @_typing.overload + def cfgOutputModelLayout(self, layout_map: cv2.typing.map_string_and_string) -> PyParams: ... + + @_typing.overload + def cfgOutputTensorPrecision(self, precision: int) -> PyParams: ... + @_typing.overload + def cfgOutputTensorPrecision(self, precision_map: cv2.typing.map_string_and_int) -> PyParams: ... + + @_typing.overload + def cfgReshape(self, new_shape: _typing.Sequence[int]) -> PyParams: ... + @_typing.overload + def cfgReshape(self, new_shape_map: cv2.typing.map_string_and_vector_size_t) -> PyParams: ... + + def cfgNumRequests(self, nireq: int) -> PyParams: ... + + @_typing.overload + def cfgMean(self, mean_values: _typing.Sequence[float]) -> PyParams: ... + @_typing.overload + def cfgMean(self, mean_map: cv2.typing.map_string_and_vector_float) -> PyParams: ... + + @_typing.overload + def cfgScale(self, scale_values: _typing.Sequence[float]) -> PyParams: ... + @_typing.overload + def cfgScale(self, scale_map: cv2.typing.map_string_and_vector_float) -> PyParams: ... + + @_typing.overload + def cfgResize(self, interpolation: int) -> PyParams: ... + @_typing.overload + def cfgResize(self, interpolation: cv2.typing.map_string_and_int) -> PyParams: ... + + + +# Functions +@_typing.overload +def params(tag: str, model_path: str, weights: str, device: str) -> PyParams: ... +@_typing.overload +def params(tag: str, bin_path: str, device: str) -> PyParams: ... + + diff --git a/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..0078392f3181a0cc92b39a4456890848d821c55e --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/METADATA @@ -0,0 +1,59 @@ +Metadata-Version: 2.3 +Name: filelock +Version: 3.16.1 +Summary: A platform independent file lock. +Project-URL: Documentation, https://py-filelock.readthedocs.io +Project-URL: Homepage, https://github.com/tox-dev/py-filelock +Project-URL: Source, https://github.com/tox-dev/py-filelock +Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues +Maintainer-email: Bernát Gábor +License-Expression: Unlicense +License-File: LICENSE +Keywords: application,cache,directory,log,user +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: The Unlicense (Unlicense) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: System +Requires-Python: >=3.8 +Provides-Extra: docs +Requires-Dist: furo>=2024.8.6; extra == 'docs' +Requires-Dist: sphinx-autodoc-typehints>=2.4.1; extra == 'docs' +Requires-Dist: sphinx>=8.0.2; extra == 'docs' +Provides-Extra: testing +Requires-Dist: covdefaults>=2.3; extra == 'testing' +Requires-Dist: coverage>=7.6.1; extra == 'testing' +Requires-Dist: diff-cover>=9.2; extra == 'testing' +Requires-Dist: pytest-asyncio>=0.24; extra == 'testing' +Requires-Dist: pytest-cov>=5; extra == 'testing' +Requires-Dist: pytest-mock>=3.14; extra == 'testing' +Requires-Dist: pytest-timeout>=2.3.1; extra == 'testing' +Requires-Dist: pytest>=8.3.3; extra == 'testing' +Requires-Dist: virtualenv>=20.26.4; extra == 'testing' +Provides-Extra: typing +Requires-Dist: typing-extensions>=4.12.2; (python_version < '3.11') and extra == 'typing' +Description-Content-Type: text/markdown + +# filelock + +[![PyPI](https://img.shields.io/pypi/v/filelock)](https://pypi.org/project/filelock/) +[![Supported Python +versions](https://img.shields.io/pypi/pyversions/filelock.svg)](https://pypi.org/project/filelock/) +[![Documentation +status](https://readthedocs.org/projects/py-filelock/badge/?version=latest)](https://py-filelock.readthedocs.io/en/latest/?badge=latest) +[![Code style: +black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Downloads](https://static.pepy.tech/badge/filelock/month)](https://pepy.tech/project/filelock) +[![check](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml/badge.svg)](https://github.com/tox-dev/py-filelock/actions/workflows/check.yml) + +For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html). diff --git a/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a9c03ce620495d35cf7be0ffb1772cbe18ce13d0 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/RECORD @@ -0,0 +1,25 @@ +filelock-3.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +filelock-3.16.1.dist-info/METADATA,sha256=LXL5-XQe_eTKkdNs76A6jSicQ1DBSTXqkDcjsprWvIM,2944 +filelock-3.16.1.dist-info/RECORD,, +filelock-3.16.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +filelock-3.16.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +filelock-3.16.1.dist-info/licenses/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 +filelock/__init__.py,sha256=_t_-OAGXo_qyPa9lNQ1YnzVYEvSW3I0onPqzpomsVVg,1769 +filelock/__pycache__/__init__.cpython-310.pyc,, +filelock/__pycache__/_api.cpython-310.pyc,, +filelock/__pycache__/_error.cpython-310.pyc,, +filelock/__pycache__/_soft.cpython-310.pyc,, +filelock/__pycache__/_unix.cpython-310.pyc,, +filelock/__pycache__/_util.cpython-310.pyc,, +filelock/__pycache__/_windows.cpython-310.pyc,, +filelock/__pycache__/asyncio.cpython-310.pyc,, +filelock/__pycache__/version.cpython-310.pyc,, +filelock/_api.py,sha256=GVeBEGjpDD8S1bYqG6_u0MZfbYHS6XrHs_n3PVKq-h0,14541 +filelock/_error.py,sha256=-5jMcjTu60YAvAO1UbqDD1GIEjVkwr8xCFwDBtMeYDg,787 +filelock/_soft.py,sha256=haqtc_TB_KJbYv2a8iuEAclKuM4fMG1vTcp28sK919c,1711 +filelock/_unix.py,sha256=-FXP0tjInBHUYygOlMpp4taUmD87QOkrD_4ybg_iT7Q,2259 +filelock/_util.py,sha256=QHBoNFIYfbAThhotH3Q8E2acFc84wpG49-T-uu017ZE,1715 +filelock/_windows.py,sha256=eMKL8dZKrgekf5VYVGR14an29JGEInRtUO8ui9ABywg,2177 +filelock/asyncio.py,sha256=3D4JP4Ms5IXTGib5eOekyr6uH6rZlieV_moVGY36juA,12463 +filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +filelock/version.py,sha256=KSOBzuLwiqiVWDPGfMj1ntr25YrY6JBDr8RvinQX_FM,413 diff --git a/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..cdd68a497cdfa8d3f2b837225beacef711b85047 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/filelock-3.16.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/deepseek/lib/python3.10/site-packages/isympy.py b/deepseek/lib/python3.10/site-packages/isympy.py new file mode 100644 index 0000000000000000000000000000000000000000..50e9bc78d08904b8c177105ee90d984ea4b01d20 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/isympy.py @@ -0,0 +1,342 @@ +""" +Python shell for SymPy. + +This is just a normal Python shell (IPython shell if you have the +IPython package installed), that executes the following commands for +the user: + + >>> from __future__ import division + >>> from sympy import * + >>> x, y, z, t = symbols('x y z t') + >>> k, m, n = symbols('k m n', integer=True) + >>> f, g, h = symbols('f g h', cls=Function) + >>> init_printing() + +So starting 'isympy' is equivalent to starting Python (or IPython) and +executing the above commands by hand. It is intended for easy and quick +experimentation with SymPy. isympy is a good way to use SymPy as an +interactive calculator. If you have IPython and Matplotlib installed, then +interactive plotting is enabled by default. + +COMMAND LINE OPTIONS +-------------------- + +-c CONSOLE, --console=CONSOLE + + Use the specified shell (Python or IPython) shell as the console + backend instead of the default one (IPython if present, Python + otherwise), e.g.: + + $isympy -c python + + CONSOLE must be one of 'ipython' or 'python' + +-p PRETTY, --pretty PRETTY + + Setup pretty-printing in SymPy. When pretty-printing is enabled, + expressions can be printed with Unicode or ASCII. The default is + to use pretty-printing (with Unicode if the terminal supports it). + When this option is 'no', expressions will not be pretty-printed + and ASCII will be used: + + $isympy -p no + + PRETTY must be one of 'unicode', 'ascii', or 'no' + +-t TYPES, --types=TYPES + + Setup the ground types for the polys. By default, gmpy ground types + are used if gmpy2 or gmpy is installed, otherwise it falls back to python + ground types, which are a little bit slower. You can manually + choose python ground types even if gmpy is installed (e.g., for + testing purposes): + + $isympy -t python + + TYPES must be one of 'gmpy', 'gmpy1' or 'python' + + Note that the ground type gmpy1 is primarily intended for testing; it + forces the use of gmpy version 1 even if gmpy2 is available. + + This is the same as setting the environment variable + SYMPY_GROUND_TYPES to the given ground type (e.g., + SYMPY_GROUND_TYPES='gmpy') + + The ground types can be determined interactively from the variable + sympy.polys.domains.GROUND_TYPES. + +-o ORDER, --order ORDER + + Setup the ordering of terms for printing. The default is lex, which + orders terms lexicographically (e.g., x**2 + x + 1). You can choose + other orderings, such as rev-lex, which will use reverse + lexicographic ordering (e.g., 1 + x + x**2): + + $isympy -o rev-lex + + ORDER must be one of 'lex', 'rev-lex', 'grlex', 'rev-grlex', + 'grevlex', 'rev-grevlex', 'old', or 'none'. + + Note that for very large expressions, ORDER='none' may speed up + printing considerably but the terms will have no canonical order. + +-q, --quiet + + Print only Python's and SymPy's versions to stdout at startup. + +-d, --doctest + + Use the same format that should be used for doctests. This is + equivalent to -c python -p no. + +-C, --no-cache + + Disable the caching mechanism. Disabling the cache may slow certain + operations down considerably. This is useful for testing the cache, + or for benchmarking, as the cache can result in deceptive timings. + + This is equivalent to setting the environment variable + SYMPY_USE_CACHE to 'no'. + +-a, --auto-symbols (requires at least IPython 0.11) + + Automatically create missing symbols. Normally, typing a name of a + Symbol that has not been instantiated first would raise NameError, + but with this option enabled, any undefined name will be + automatically created as a Symbol. + + Note that this is intended only for interactive, calculator style + usage. In a script that uses SymPy, Symbols should be instantiated + at the top, so that it's clear what they are. + + This will not override any names that are already defined, which + includes the single character letters represented by the mnemonic + QCOSINE (see the "Gotchas and Pitfalls" document in the + documentation). You can delete existing names by executing "del + name". If a name is defined, typing "'name' in dir()" will return True. + + The Symbols that are created using this have default assumptions. + If you want to place assumptions on symbols, you should create them + using symbols() or var(). + + Finally, this only works in the top level namespace. So, for + example, if you define a function in isympy with an undefined + Symbol, it will not work. + + See also the -i and -I options. + +-i, --int-to-Integer (requires at least IPython 0.11) + + Automatically wrap int literals with Integer. This makes it so that + things like 1/2 will come out as Rational(1, 2), rather than 0.5. This + works by preprocessing the source and wrapping all int literals with + Integer. Note that this will not change the behavior of int literals + assigned to variables, and it also won't change the behavior of functions + that return int literals. + + If you want an int, you can wrap the literal in int(), e.g. int(3)/int(2) + gives 1.5 (with division imported from __future__). + +-I, --interactive (requires at least IPython 0.11) + + This is equivalent to --auto-symbols --int-to-Integer. Future options + designed for ease of interactive use may be added to this. + +-D, --debug + + Enable debugging output. This is the same as setting the + environment variable SYMPY_DEBUG to 'True'. The debug status is set + in the variable SYMPY_DEBUG within isympy. + +-- IPython options + + Additionally you can pass command line options directly to the IPython + interpreter (the standard Python shell is not supported). However you + need to add the '--' separator between two types of options, e.g the + startup banner option and the colors option. You need to enter the + options as required by the version of IPython that you are using, too: + + in IPython 0.11, + + $isympy -q -- --colors=NoColor + + or older versions of IPython, + + $isympy -q -- -colors NoColor + +See also isympy --help. +""" + +import os +import sys + +# DO NOT IMPORT SYMPY HERE! Or the setting of the sympy environment variables +# by the command line will break. + +def main() -> None: + from argparse import ArgumentParser, RawDescriptionHelpFormatter + + VERSION = None + if '--version' in sys.argv: + # We cannot import sympy before this is run, because flags like -C and + # -t set environment variables that must be set before SymPy is + # imported. The only thing we need to import it for is to get the + # version, which only matters with the --version flag. + import sympy + VERSION = sympy.__version__ + + usage = 'isympy [options] -- [ipython options]' + parser = ArgumentParser( + usage=usage, + description=__doc__, + formatter_class=RawDescriptionHelpFormatter, + ) + + parser.add_argument('--version', action='version', version=VERSION) + + parser.add_argument( + '-c', '--console', + dest='console', + action='store', + default=None, + choices=['ipython', 'python'], + metavar='CONSOLE', + help='select type of interactive session: ipython | python; defaults ' + 'to ipython if IPython is installed, otherwise python') + + parser.add_argument( + '-p', '--pretty', + dest='pretty', + action='store', + default=None, + metavar='PRETTY', + choices=['unicode', 'ascii', 'no'], + help='setup pretty printing: unicode | ascii | no; defaults to ' + 'unicode printing if the terminal supports it, otherwise ascii') + + parser.add_argument( + '-t', '--types', + dest='types', + action='store', + default=None, + metavar='TYPES', + choices=['gmpy', 'gmpy1', 'python'], + help='setup ground types: gmpy | gmpy1 | python; defaults to gmpy if gmpy2 ' + 'or gmpy is installed, otherwise python') + + parser.add_argument( + '-o', '--order', + dest='order', + action='store', + default=None, + metavar='ORDER', + choices=['lex', 'grlex', 'grevlex', 'rev-lex', 'rev-grlex', 'rev-grevlex', 'old', 'none'], + help='setup ordering of terms: [rev-]lex | [rev-]grlex | [rev-]grevlex | old | none; defaults to lex') + + parser.add_argument( + '-q', '--quiet', + dest='quiet', + action='store_true', + default=False, + help='print only version information at startup') + + parser.add_argument( + '-d', '--doctest', + dest='doctest', + action='store_true', + default=False, + help='use the doctest format for output (you can just copy and paste it)') + + parser.add_argument( + '-C', '--no-cache', + dest='cache', + action='store_false', + default=True, + help='disable caching mechanism') + + parser.add_argument( + '-a', '--auto-symbols', + dest='auto_symbols', + action='store_true', + default=False, + help='automatically construct missing symbols') + + parser.add_argument( + '-i', '--int-to-Integer', + dest='auto_int_to_Integer', + action='store_true', + default=False, + help="automatically wrap int literals with Integer") + + parser.add_argument( + '-I', '--interactive', + dest='interactive', + action='store_true', + default=False, + help="equivalent to -a -i") + + parser.add_argument( + '-D', '--debug', + dest='debug', + action='store_true', + default=False, + help='enable debugging output') + + (options, ipy_args) = parser.parse_known_args() + if '--' in ipy_args: + ipy_args.remove('--') + + if not options.cache: + os.environ['SYMPY_USE_CACHE'] = 'no' + + if options.types: + os.environ['SYMPY_GROUND_TYPES'] = options.types + + if options.debug: + os.environ['SYMPY_DEBUG'] = str(options.debug) + + if options.doctest: + options.pretty = 'no' + options.console = 'python' + + session = options.console + + if session is not None: + ipython = session == 'ipython' + else: + try: + import IPython + ipython = True + except ImportError: + if not options.quiet: + from sympy.interactive.session import no_ipython + print(no_ipython) + ipython = False + + args = { + 'pretty_print': True, + 'use_unicode': None, + 'use_latex': None, + 'order': None, + 'argv': ipy_args, + } + + if options.pretty == 'unicode': + args['use_unicode'] = True + elif options.pretty == 'ascii': + args['use_unicode'] = False + elif options.pretty == 'no': + args['pretty_print'] = False + + if options.order is not None: + args['order'] = options.order + + args['quiet'] = options.quiet + args['auto_symbols'] = options.auto_symbols or options.interactive + args['auto_int_to_Integer'] = options.auto_int_to_Integer or options.interactive + + from sympy.interactive import init_session + init_session(ipython, **args) + +if __name__ == "__main__": + main() diff --git a/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/LICENSE b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..090132eeeaa640b8042fdc0bd97d3dbe6a253a18 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2022 Tim Schwenke + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..f78fd7e213db0ab7c004b7d8615d0aa799e497a0 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/METADATA @@ -0,0 +1,339 @@ +Metadata-Version: 2.1 +Name: prometheus-fastapi-instrumentator +Version: 7.0.0 +Summary: Instrument your FastAPI with Prometheus metrics. +Home-page: https://github.com/trallnag/prometheus-fastapi-instrumentator +License: ISC +Keywords: prometheus,instrumentation,fastapi,exporter,metrics +Author: Tim Schwenke +Author-email: tim@trallnag.com +Requires-Python: >=3.8.1,<4.0.0 +Classifier: License :: OSI Approved +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Dist: prometheus-client (>=0.8.0,<1.0.0) +Requires-Dist: starlette (>=0.30.0,<1.0.0) +Project-URL: Repository, https://github.com/trallnag/prometheus-fastapi-instrumentator +Description-Content-Type: text/markdown + +# Prometheus FastAPI Instrumentator + +[![pypi-version](https://badge.fury.io/py/prometheus-fastapi-instrumentator.svg)](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator) +[![python-versions](https://img.shields.io/pypi/pyversions/prometheus-fastapi-instrumentator.svg)](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator) +[![downloads](https://pepy.tech/badge/prometheus-fastapi-instrumentator/month)](https://pepy.tech/project/prometheus-fastapi-instrumentator/month) +[![build](https://img.shields.io/github/actions/workflow/status/trallnag/kubestatus2cloudwatch/ci.yaml?branch=master)](https://github.com/trallnag/kubestatus2cloudwatch/actions) +[![codecov](https://codecov.io/gh/trallnag/prometheus-fastapi-instrumentator/branch/master/graph/badge.svg)](https://codecov.io/gh/trallnag/prometheus-fastapi-instrumentator) + +A configurable and modular Prometheus Instrumentator for your FastAPI. Install +`prometheus-fastapi-instrumentator` from +[PyPI](https://pypi.python.org/pypi/prometheus-fastapi-instrumentator/). Here is +the fast track to get started with a pre-configured instrumentator. Import the +instrumentator class: + +```python +from prometheus_fastapi_instrumentator import Instrumentator +``` + +Instrument your app with default metrics and expose the metrics: + +```python +Instrumentator().instrument(app).expose(app) +``` + +Depending on your code you might have to use the following instead: + +```python +instrumentator = Instrumentator().instrument(app) + +@app.on_event("startup") +async def _startup(): + instrumentator.expose(app) +``` + +With this, your FastAPI is instrumented and metrics are ready to be scraped. The +defaults give you: + +- Counter `http_requests_total` with `handler`, `status` and `method`. Total + number of requests. +- Summary `http_request_size_bytes` with `handler`. Added up total of the + content lengths of all incoming requests. +- Summary `http_response_size_bytes` with `handler`. Added up total of the + content lengths of all outgoing responses. +- Histogram `http_request_duration_seconds` with `handler` and `method`. Only a + few buckets to keep cardinality low. +- Histogram `http_request_duration_highr_seconds` without any labels. Large + number of buckets (>20). + +In addition, following behavior is active: + +- Status codes are grouped into `2xx`, `3xx` and so on. +- Requests without a matching template are grouped into the handler `none`. + +If one of these presets does not suit your needs you can do one of multiple +things: + +- Pick one of the already existing closures from + [`metrics`](./src/prometheus_fastapi_instrumentator/metrics.py) and pass it to + the instrumentator instance. See [here](#adding-metrics) how to do that. +- Create your own instrumentation function that you can pass to an + instrumentator instance. See [here](#creating-new-metrics) to learn how more. +- Don't use this package at all and just use the source code as inspiration on + how to instrument your FastAPI. + +## Table of Contents + + + +- [Disclaimer](#disclaimer) +- [Features](#features) +- [Advanced Usage](#advanced-usage) + - [Creating the Instrumentator](#creating-the-instrumentator) + - [Adding metrics](#adding-metrics) + - [Creating new metrics](#creating-new-metrics) + - [Perform instrumentation](#perform-instrumentation) + - [Specify namespace and subsystem](#specify-namespace-and-subsystem) + - [Exposing endpoint](#exposing-endpoint) +- [Contributing](#contributing) +- [Licensing](#licensing) + + + +## Disclaimer + +Not made for generic Prometheus instrumentation in Python. Use the Prometheus +client library for that. This packages uses it as well. + +All the generic middleware and instrumentation code comes with a cost in +performance that can become noticeable. + +## Features + +Beyond the fast track, this instrumentator is **highly configurable** and it is +very easy to customize and adapt to your specific use case. Here is a list of +some of these options you may opt-in to: + +- Regex patterns to ignore certain routes. +- Completely ignore untemplated routes. +- Control instrumentation and exposition with an env var. +- Rounding of latencies to a certain decimal number. +- Renaming of labels and the metric. +- Metrics endpoint can compress data with gzip. +- Opt-in metric to monitor the number of requests in progress. + +It also features a **modular approach to metrics** that should instrument all +FastAPI endpoints. You can either choose from a set of already existing metrics +or create your own. And every metric function by itself can be configured as +well. + +## Advanced Usage + +This chapter contains an example on the advanced usage of the Prometheus FastAPI +Instrumentator to showcase most of it's features. + +### Creating the Instrumentator + +We start by creating an instance of the Instrumentator. Notice the additional +`metrics` import. This will come in handy later. + +```python +from prometheus_fastapi_instrumentator import Instrumentator, metrics + +instrumentator = Instrumentator( + should_group_status_codes=False, + should_ignore_untemplated=True, + should_respect_env_var=True, + should_instrument_requests_inprogress=True, + excluded_handlers=[".*admin.*", "/metrics"], + env_var_name="ENABLE_METRICS", + inprogress_name="inprogress", + inprogress_labels=True, +) +``` + +Unlike in the fast track example, now the instrumentation and exposition will +only take place if the environment variable `ENABLE_METRICS` is `true` at +run-time. This can be helpful in larger deployments with multiple services +depending on the same base FastAPI. + +### Adding metrics + +Let's say we also want to instrument the size of requests and responses. For +this we use the `add()` method. This method does nothing more than taking a +function and adding it to a list. Then during run-time every time FastAPI +handles a request all functions in this list will be called while giving them a +single argument that stores useful information like the request and response +objects. If no `add()` at all is used, the default metric gets added in the +background. This is what happens in the fast track example. + +All instrumentation functions are stored as closures in the `metrics` module. + +Closures come in handy here because it allows us to configure the functions +within. + +```python +instrumentator.add(metrics.latency(buckets=(1, 2, 3,))) +``` + +This simply adds the metric you also get in the fast track example with a +modified buckets argument. But we would also like to record the size of all +requests and responses. + +```python +instrumentator.add( + metrics.request_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="a", + metric_subsystem="b", + ) +).add( + metrics.response_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="namespace", + metric_subsystem="subsystem", + ) +) +``` + +You can add as many metrics you like to the instrumentator. + +### Creating new metrics + +As already mentioned, it is possible to create custom functions to pass on to +`add()`. This is also how the default metrics are implemented. + +The basic idea is that the instrumentator creates an `info` object that contains +everything necessary for instrumentation based on the configuration of the +instrumentator. This includes the raw request and response objects but also the +modified handler, grouped status code and duration. Next, all registered +instrumentation functions are called. They get `info` as their single argument. + +Let's say we want to count the number of times a certain language has been +requested. + +```python +from typing import Callable +from prometheus_fastapi_instrumentator.metrics import Info +from prometheus_client import Counter + +def http_requested_languages_total() -> Callable[[Info], None]: + METRIC = Counter( + "http_requested_languages_total", + "Number of times a certain language has been requested.", + labelnames=("langs",) + ) + + def instrumentation(info: Info) -> None: + langs = set() + lang_str = info.request.headers["Accept-Language"] + for element in lang_str.split(","): + element = element.split(";")[0].strip().lower() + langs.add(element) + for language in langs: + METRIC.labels(language).inc() + + return instrumentation +``` + +The function `http_requested_languages_total` is used for persistent elements +that are stored between all instrumentation executions (for example the metric +instance itself). Next comes the closure. This function must adhere to the shown +interface. It will always get an `Info` object that contains the request, +response and a few other modified informations. For example the (grouped) status +code or the handler. Finally, the closure is returned. + +**Important:** The response object inside `info` can either be the response +object or `None`. In addition, errors thrown in the handler are not caught by +the instrumentator. I recommend to check the documentation and/or the source +code before creating your own metrics. + +To use it, we hand over the closure to the instrumentator object. + +```python +instrumentator.add(http_requested_languages_total()) +``` + +### Perform instrumentation + +Up to this point, the FastAPI has not been touched at all. Everything has been +stored in the `instrumentator` only. To actually register the instrumentation +with FastAPI, the `instrument()` method has to be called. + +```python +instrumentator.instrument(app) +``` + +Notice that this will do nothing if `should_respect_env_var` has been set during +construction of the instrumentator object and the respective env var is not +found. + +### Specify namespace and subsystem + +You can specify the namespace and subsystem of the metrics by passing them in +the instrument method. + +```python +from prometheus_fastapi_instrumentator import Instrumentator + +@app.on_event("startup") +async def startup(): + Instrumentator().instrument(app, metric_namespace='myproject', metric_subsystem='myservice').expose(app) +``` + +Then your metrics will contain the namespace and subsystem in the metric name. + +```sh +# TYPE myproject_myservice_http_request_duration_highr_seconds histogram +myproject_myservice_http_request_duration_highr_seconds_bucket{le="0.01"} 0.0 +``` + +### Exposing endpoint + +To expose an endpoint for the metrics either follow +[Prometheus Python Client](https://github.com/prometheus/client_python) and add +the endpoint manually to the FastAPI or serve it on a separate server. You can +also use the included `expose` method. It will add an endpoint to the given +FastAPI. With `should_gzip` you can instruct the endpoint to compress the data +as long as the client accepts gzip encoding. Prometheus for example does by +default. Beware that network bandwith is often cheaper than CPU cycles. + +```python +instrumentator.expose(app, include_in_schema=False, should_gzip=True) +``` + +Notice that this will to nothing if `should_respect_env_var` has been set during +construction of the instrumentator object and the respective env var is not +found. + +## Contributing + +Please refer to [`CONTRIBUTING.md`](CONTRIBUTING). + +Consult [`DEVELOPMENT.md`](DEVELOPMENT.md) for guidance regarding development. + +Read [`RELEASE.md`](RELEASE.md) for details about the release process. + +## Licensing + +The default license for this project is the +[ISC License](https://choosealicense.com/licenses/isc). A permissive license +functionally equivalent to the BSD 2-Clause and MIT licenses, removing some +language that is no longer necessary. See [`LICENSE`](LICENSE) for the license +text. + +The [BSD 3-Clause License](https://choosealicense.com/licenses/bsd-3-clause) is +used as the license for the +[`routing`](src/prometheus_fastapi_instrumentator/routing.py) module. This is +due to it containing code from +[elastic/apm-agent-python](https://github.com/elastic/apm-agent-python). BSD +3-Clause is a permissive license similar to the BSD 2-Clause License, but with a +3rd clause that prohibits others from using the name of the copyright holder or +its contributors to promote derived products without written consent. The +license text is included in the module itself. + diff --git a/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..630edd58b54a51b37bfdb921becae95ee2c35e04 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/prometheus_fastapi_instrumentator-7.0.0.dist-info/RECORD @@ -0,0 +1,17 @@ +prometheus_fastapi_instrumentator-7.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +prometheus_fastapi_instrumentator-7.0.0.dist-info/LICENSE,sha256=1Bb46zX6e7vYSh8YDT_6oXB-XpP6E2AyHdQtnXY9Cfw,762 +prometheus_fastapi_instrumentator-7.0.0.dist-info/METADATA,sha256=_D1vbKE2TeerP-Nbw4LbehbmEintN5ezG34dJIYfVvw,13110 +prometheus_fastapi_instrumentator-7.0.0.dist-info/RECORD,, +prometheus_fastapi_instrumentator-7.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +prometheus_fastapi_instrumentator-7.0.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88 +prometheus_fastapi_instrumentator/__init__.py,sha256=ilCcCHTBzi04jKjOR70MkMt2Y9uGNwabCRm3Yi55m6Q,134 +prometheus_fastapi_instrumentator/__pycache__/__init__.cpython-310.pyc,, +prometheus_fastapi_instrumentator/__pycache__/instrumentation.cpython-310.pyc,, +prometheus_fastapi_instrumentator/__pycache__/metrics.cpython-310.pyc,, +prometheus_fastapi_instrumentator/__pycache__/middleware.cpython-310.pyc,, +prometheus_fastapi_instrumentator/__pycache__/routing.cpython-310.pyc,, +prometheus_fastapi_instrumentator/instrumentation.py,sha256=ZZlvPfEvIhzi0PVbxEWo3AXjNcZe56EjxkNHYl-422A,13750 +prometheus_fastapi_instrumentator/metrics.py,sha256=z87HnRSO3TpKorElRtO2ceWWELsgNSRO9TA8JDZLHuM,27724 +prometheus_fastapi_instrumentator/middleware.py,sha256=guRv1ptWdDm8Z7GW3p2zaNZc3opFWaB_QRReS-RlBjM,9484 +prometheus_fastapi_instrumentator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +prometheus_fastapi_instrumentator/routing.py,sha256=uQ0I9gHF7IIkVjBmfAy8Ax8A3wOChLTmH0aUXRgshfs,4028 diff --git a/deepseek/lib/python3.10/site-packages/sentencepiece/__pycache__/sentencepiece_model_pb2.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/sentencepiece/__pycache__/sentencepiece_model_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..607c2d5c0671f7bd58f1f01f9aaf275bb4082845 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/sentencepiece/__pycache__/sentencepiece_model_pb2.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/sentencepiece/_version.py b/deepseek/lib/python3.10/site-packages/sentencepiece/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..7fd229a32b5eefeffa54b9c187302d2c03a0a680 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/sentencepiece/_version.py @@ -0,0 +1 @@ +__version__ = '0.2.0' diff --git a/deepseek/lib/python3.10/site-packages/sentencepiece/sentencepiece_model_pb2.py b/deepseek/lib/python3.10/site-packages/sentencepiece/sentencepiece_model_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..b07107d69de178e107544a29bb4d0280b5482241 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/sentencepiece/sentencepiece_model_pb2.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sentencepiece_model.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19sentencepiece_model.proto\x12\rsentencepiece\"\x80\x0c\n\x0bTrainerSpec\x12\r\n\x05input\x18\x01 \x03(\t\x12\x14\n\x0cinput_format\x18\x07 \x01(\t\x12\x14\n\x0cmodel_prefix\x18\x02 \x01(\t\x12\x41\n\nmodel_type\x18\x03 \x01(\x0e\x32$.sentencepiece.TrainerSpec.ModelType:\x07UNIGRAM\x12\x18\n\nvocab_size\x18\x04 \x01(\x05:\x04\x38\x30\x30\x30\x12\x17\n\x0f\x61\x63\x63\x65pt_language\x18\x05 \x03(\t\x12 \n\x15self_test_sample_size\x18\x06 \x01(\x05:\x01\x30\x12*\n\x1b\x65nable_differential_privacy\x18\x32 \x01(\x08:\x05\x66\x61lse\x12+\n differential_privacy_noise_level\x18\x33 \x01(\x02:\x01\x30\x12\x32\n\'differential_privacy_clipping_threshold\x18\x34 \x01(\x04:\x01\x30\x12\"\n\x12\x63haracter_coverage\x18\n \x01(\x02:\x06\x30.9995\x12\x1e\n\x13input_sentence_size\x18\x0b \x01(\x04:\x01\x30\x12$\n\x16shuffle_input_sentence\x18\x13 \x01(\x08:\x04true\x12 \n\x14mining_sentence_size\x18\x0c \x01(\x05\x42\x02\x18\x01\x12\"\n\x16training_sentence_size\x18\r \x01(\x05\x42\x02\x18\x01\x12(\n\x17seed_sentencepiece_size\x18\x0e \x01(\x05:\x07\x31\x30\x30\x30\x30\x30\x30\x12\x1e\n\x10shrinking_factor\x18\x0f \x01(\x02:\x04\x30.75\x12!\n\x13max_sentence_length\x18\x12 \x01(\x05:\x04\x34\x31\x39\x32\x12\x17\n\x0bnum_threads\x18\x10 \x01(\x05:\x02\x31\x36\x12\x1d\n\x12num_sub_iterations\x18\x11 \x01(\x05:\x01\x32\x12$\n\x18max_sentencepiece_length\x18\x14 \x01(\x05:\x02\x31\x36\x12%\n\x17split_by_unicode_script\x18\x15 \x01(\x08:\x04true\x12\x1d\n\x0fsplit_by_number\x18\x17 \x01(\x08:\x04true\x12!\n\x13split_by_whitespace\x18\x16 \x01(\x08:\x04true\x12)\n\x1atreat_whitespace_as_suffix\x18\x18 \x01(\x08:\x05\x66\x61lse\x12+\n\x1c\x61llow_whitespace_only_pieces\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0csplit_digits\x18\x19 \x01(\x08:\x05\x66\x61lse\x12#\n\x19pretokenization_delimiter\x18\x35 \x01(\t:\x00\x12\x17\n\x0f\x63ontrol_symbols\x18\x1e \x03(\t\x12\x1c\n\x14user_defined_symbols\x18\x1f \x03(\t\x12\x16\n\x0erequired_chars\x18$ \x01(\t\x12\x1c\n\rbyte_fallback\x18# \x01(\x08:\x05\x66\x61lse\x12+\n\x1dvocabulary_output_piece_score\x18 \x01(\x08:\x04true\x12\x1e\n\x10hard_vocab_limit\x18! \x01(\x08:\x04true\x12\x1c\n\ruse_all_vocab\x18\" \x01(\x08:\x05\x66\x61lse\x12\x11\n\x06unk_id\x18( \x01(\x05:\x01\x30\x12\x11\n\x06\x62os_id\x18) \x01(\x05:\x01\x31\x12\x11\n\x06\x65os_id\x18* \x01(\x05:\x01\x32\x12\x12\n\x06pad_id\x18+ \x01(\x05:\x02-1\x12\x18\n\tunk_piece\x18- \x01(\t:\x05\x12\x16\n\tbos_piece\x18. \x01(\t:\x03\x12\x17\n\teos_piece\x18/ \x01(\t:\x04\x12\x18\n\tpad_piece\x18\x30 \x01(\t:\x05\x12\x1a\n\x0bunk_surface\x18, \x01(\t:\x05 \xe2\x81\x87 \x12+\n\x1ctrain_extremely_large_corpus\x18\x31 \x01(\x08:\x05\x66\x61lse\"5\n\tModelType\x12\x0b\n\x07UNIGRAM\x10\x01\x12\x07\n\x03\x42PE\x10\x02\x12\x08\n\x04WORD\x10\x03\x12\x08\n\x04\x43HAR\x10\x04*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"\xd1\x01\n\x0eNormalizerSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1c\n\x14precompiled_charsmap\x18\x02 \x01(\x0c\x12\x1e\n\x10\x61\x64\x64_dummy_prefix\x18\x03 \x01(\x08:\x04true\x12&\n\x18remove_extra_whitespaces\x18\x04 \x01(\x08:\x04true\x12 \n\x12\x65scape_whitespaces\x18\x05 \x01(\x08:\x04true\x12\x1e\n\x16normalization_rule_tsv\x18\x06 \x01(\t*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"y\n\x0cSelfTestData\x12\x33\n\x07samples\x18\x01 \x03(\x0b\x32\".sentencepiece.SelfTestData.Sample\x1a)\n\x06Sample\x12\r\n\x05input\x18\x01 \x01(\t\x12\x10\n\x08\x65xpected\x18\x02 \x01(\t*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"\xfe\x03\n\nModelProto\x12\x37\n\x06pieces\x18\x01 \x03(\x0b\x32\'.sentencepiece.ModelProto.SentencePiece\x12\x30\n\x0ctrainer_spec\x18\x02 \x01(\x0b\x32\x1a.sentencepiece.TrainerSpec\x12\x36\n\x0fnormalizer_spec\x18\x03 \x01(\x0b\x32\x1d.sentencepiece.NormalizerSpec\x12\x33\n\x0eself_test_data\x18\x04 \x01(\x0b\x32\x1b.sentencepiece.SelfTestData\x12\x38\n\x11\x64\x65normalizer_spec\x18\x05 \x01(\x0b\x32\x1d.sentencepiece.NormalizerSpec\x1a\xd2\x01\n\rSentencePiece\x12\r\n\x05piece\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x42\n\x04type\x18\x03 \x01(\x0e\x32,.sentencepiece.ModelProto.SentencePiece.Type:\x06NORMAL\"T\n\x04Type\x12\n\n\x06NORMAL\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x0b\n\x07\x43ONTROL\x10\x03\x12\x10\n\x0cUSER_DEFINED\x10\x04\x12\x08\n\x04\x42YTE\x10\x06\x12\n\n\x06UNUSED\x10\x05*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\x42\x02H\x03') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'sentencepiece_model_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'H\003' + _TRAINERSPEC.fields_by_name['mining_sentence_size']._options = None + _TRAINERSPEC.fields_by_name['mining_sentence_size']._serialized_options = b'\030\001' + _TRAINERSPEC.fields_by_name['training_sentence_size']._options = None + _TRAINERSPEC.fields_by_name['training_sentence_size']._serialized_options = b'\030\001' + _TRAINERSPEC._serialized_start=45 + _TRAINERSPEC._serialized_end=1581 + _TRAINERSPEC_MODELTYPE._serialized_start=1517 + _TRAINERSPEC_MODELTYPE._serialized_end=1570 + _NORMALIZERSPEC._serialized_start=1584 + _NORMALIZERSPEC._serialized_end=1793 + _SELFTESTDATA._serialized_start=1795 + _SELFTESTDATA._serialized_end=1916 + _SELFTESTDATA_SAMPLE._serialized_start=1864 + _SELFTESTDATA_SAMPLE._serialized_end=1905 + _MODELPROTO._serialized_start=1919 + _MODELPROTO._serialized_end=2429 + _MODELPROTO_SENTENCEPIECE._serialized_start=2208 + _MODELPROTO_SENTENCEPIECE._serialized_end=2418 + _MODELPROTO_SENTENCEPIECE_TYPE._serialized_start=2323 + _MODELPROTO_SENTENCEPIECE_TYPE._serialized_end=2407 +# @@protoc_insertion_point(module_scope) diff --git a/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/INSTALLER b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a95151c2aeef00ef9017ef3b38c0276d464d618c --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/METADATA @@ -0,0 +1,557 @@ +Metadata-Version: 2.1 +Name: torch +Version: 2.5.1 +Summary: Tensors and Dynamic neural networks in Python with strong GPU acceleration +Home-page: https://pytorch.org/ +Download-URL: https://github.com/pytorch/pytorch/tags +Author: PyTorch Team +Author-email: packages@pytorch.org +License: BSD-3-Clause +Keywords: pytorch,machine learning +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Programming Language :: C++ +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: NOTICE +Requires-Dist: filelock +Requires-Dist: typing-extensions (>=4.8.0) +Requires-Dist: networkx +Requires-Dist: jinja2 +Requires-Dist: fsspec +Requires-Dist: nvidia-cuda-nvrtc-cu12 (==12.4.127) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cuda-runtime-cu12 (==12.4.127) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cuda-cupti-cu12 (==12.4.127) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cudnn-cu12 (==9.1.0.70) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cublas-cu12 (==12.4.5.8) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cufft-cu12 (==11.2.1.3) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-curand-cu12 (==10.3.5.147) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cusolver-cu12 (==11.6.1.9) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-cusparse-cu12 (==12.3.1.170) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-nccl-cu12 (==2.21.5) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-nvtx-cu12 (==12.4.127) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: nvidia-nvjitlink-cu12 (==12.4.127) ; platform_system == "Linux" and platform_machine == "x86_64" +Requires-Dist: triton (==3.1.0) ; platform_system == "Linux" and platform_machine == "x86_64" and python_version < "3.13" +Requires-Dist: sympy (==1.12.1) ; python_version == "3.8" +Requires-Dist: setuptools ; python_version >= "3.12" +Requires-Dist: sympy (==1.13.1) ; python_version >= "3.9" +Provides-Extra: opt-einsum +Requires-Dist: opt-einsum (>=3.3) ; extra == 'opt-einsum' +Provides-Extra: optree +Requires-Dist: optree (>=0.12.0) ; extra == 'optree' + +![PyTorch Logo](https://github.com/pytorch/pytorch/raw/main/docs/source/_static/img/pytorch-logo-dark.png) + +-------------------------------------------------------------------------------- + +PyTorch is a Python package that provides two high-level features: +- Tensor computation (like NumPy) with strong GPU acceleration +- Deep neural networks built on a tape-based autograd system + +You can reuse your favorite Python packages such as NumPy, SciPy, and Cython to extend PyTorch when needed. + +Our trunk health (Continuous Integration signals) can be found at [hud.pytorch.org](https://hud.pytorch.org/ci/pytorch/pytorch/main). + + + +- [More About PyTorch](#more-about-pytorch) + - [A GPU-Ready Tensor Library](#a-gpu-ready-tensor-library) + - [Dynamic Neural Networks: Tape-Based Autograd](#dynamic-neural-networks-tape-based-autograd) + - [Python First](#python-first) + - [Imperative Experiences](#imperative-experiences) + - [Fast and Lean](#fast-and-lean) + - [Extensions Without Pain](#extensions-without-pain) +- [Installation](#installation) + - [Binaries](#binaries) + - [NVIDIA Jetson Platforms](#nvidia-jetson-platforms) + - [From Source](#from-source) + - [Prerequisites](#prerequisites) + - [NVIDIA CUDA Support](#nvidia-cuda-support) + - [AMD ROCm Support](#amd-rocm-support) + - [Intel GPU Support](#intel-gpu-support) + - [Get the PyTorch Source](#get-the-pytorch-source) + - [Install Dependencies](#install-dependencies) + - [Install PyTorch](#install-pytorch) + - [Adjust Build Options (Optional)](#adjust-build-options-optional) + - [Docker Image](#docker-image) + - [Using pre-built images](#using-pre-built-images) + - [Building the image yourself](#building-the-image-yourself) + - [Building the Documentation](#building-the-documentation) + - [Previous Versions](#previous-versions) +- [Getting Started](#getting-started) +- [Resources](#resources) +- [Communication](#communication) +- [Releases and Contributing](#releases-and-contributing) +- [The Team](#the-team) +- [License](#license) + + + +## More About PyTorch + +[Learn the basics of PyTorch](https://pytorch.org/tutorials/beginner/basics/intro.html) + +At a granular level, PyTorch is a library that consists of the following components: + +| Component | Description | +| ---- | --- | +| [**torch**](https://pytorch.org/docs/stable/torch.html) | A Tensor library like NumPy, with strong GPU support | +| [**torch.autograd**](https://pytorch.org/docs/stable/autograd.html) | A tape-based automatic differentiation library that supports all differentiable Tensor operations in torch | +| [**torch.jit**](https://pytorch.org/docs/stable/jit.html) | A compilation stack (TorchScript) to create serializable and optimizable models from PyTorch code | +| [**torch.nn**](https://pytorch.org/docs/stable/nn.html) | A neural networks library deeply integrated with autograd designed for maximum flexibility | +| [**torch.multiprocessing**](https://pytorch.org/docs/stable/multiprocessing.html) | Python multiprocessing, but with magical memory sharing of torch Tensors across processes. Useful for data loading and Hogwild training | +| [**torch.utils**](https://pytorch.org/docs/stable/data.html) | DataLoader and other utility functions for convenience | + +Usually, PyTorch is used either as: + +- A replacement for NumPy to use the power of GPUs. +- A deep learning research platform that provides maximum flexibility and speed. + +Elaborating Further: + +### A GPU-Ready Tensor Library + +If you use NumPy, then you have used Tensors (a.k.a. ndarray). + +![Tensor illustration](./docs/source/_static/img/tensor_illustration.png) + +PyTorch provides Tensors that can live either on the CPU or the GPU and accelerates the +computation by a huge amount. + +We provide a wide variety of tensor routines to accelerate and fit your scientific computation needs +such as slicing, indexing, mathematical operations, linear algebra, reductions. +And they are fast! + +### Dynamic Neural Networks: Tape-Based Autograd + +PyTorch has a unique way of building neural networks: using and replaying a tape recorder. + +Most frameworks such as TensorFlow, Theano, Caffe, and CNTK have a static view of the world. +One has to build a neural network and reuse the same structure again and again. +Changing the way the network behaves means that one has to start from scratch. + +With PyTorch, we use a technique called reverse-mode auto-differentiation, which allows you to +change the way your network behaves arbitrarily with zero lag or overhead. Our inspiration comes +from several research papers on this topic, as well as current and past work such as +[torch-autograd](https://github.com/twitter/torch-autograd), +[autograd](https://github.com/HIPS/autograd), +[Chainer](https://chainer.org), etc. + +While this technique is not unique to PyTorch, it's one of the fastest implementations of it to date. +You get the best of speed and flexibility for your crazy research. + +![Dynamic graph](https://github.com/pytorch/pytorch/raw/main/docs/source/_static/img/dynamic_graph.gif) + +### Python First + +PyTorch is not a Python binding into a monolithic C++ framework. +It is built to be deeply integrated into Python. +You can use it naturally like you would use [NumPy](https://www.numpy.org/) / [SciPy](https://www.scipy.org/) / [scikit-learn](https://scikit-learn.org) etc. +You can write your new neural network layers in Python itself, using your favorite libraries +and use packages such as [Cython](https://cython.org/) and [Numba](http://numba.pydata.org/). +Our goal is to not reinvent the wheel where appropriate. + +### Imperative Experiences + +PyTorch is designed to be intuitive, linear in thought, and easy to use. +When you execute a line of code, it gets executed. There isn't an asynchronous view of the world. +When you drop into a debugger or receive error messages and stack traces, understanding them is straightforward. +The stack trace points to exactly where your code was defined. +We hope you never spend hours debugging your code because of bad stack traces or asynchronous and opaque execution engines. + +### Fast and Lean + +PyTorch has minimal framework overhead. We integrate acceleration libraries +such as [Intel MKL](https://software.intel.com/mkl) and NVIDIA ([cuDNN](https://developer.nvidia.com/cudnn), [NCCL](https://developer.nvidia.com/nccl)) to maximize speed. +At the core, its CPU and GPU Tensor and neural network backends +are mature and have been tested for years. + +Hence, PyTorch is quite fast — whether you run small or large neural networks. + +The memory usage in PyTorch is extremely efficient compared to Torch or some of the alternatives. +We've written custom memory allocators for the GPU to make sure that +your deep learning models are maximally memory efficient. +This enables you to train bigger deep learning models than before. + +### Extensions Without Pain + +Writing new neural network modules, or interfacing with PyTorch's Tensor API was designed to be straightforward +and with minimal abstractions. + +You can write new neural network layers in Python using the torch API +[or your favorite NumPy-based libraries such as SciPy](https://pytorch.org/tutorials/advanced/numpy_extensions_tutorial.html). + +If you want to write your layers in C/C++, we provide a convenient extension API that is efficient and with minimal boilerplate. +No wrapper code needs to be written. You can see [a tutorial here](https://pytorch.org/tutorials/advanced/cpp_extension.html) and [an example here](https://github.com/pytorch/extension-cpp). + + +## Installation + +### Binaries +Commands to install binaries via Conda or pip wheels are on our website: [https://pytorch.org/get-started/locally/](https://pytorch.org/get-started/locally/) + + +#### NVIDIA Jetson Platforms + +Python wheels for NVIDIA's Jetson Nano, Jetson TX1/TX2, Jetson Xavier NX/AGX, and Jetson AGX Orin are provided [here](https://forums.developer.nvidia.com/t/pytorch-for-jetson-version-1-10-now-available/72048) and the L4T container is published [here](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/l4t-pytorch) + +They require JetPack 4.2 and above, and [@dusty-nv](https://github.com/dusty-nv) and [@ptrblck](https://github.com/ptrblck) are maintaining them. + + +### From Source + +#### Prerequisites +If you are installing from source, you will need: +- Python 3.8 or later (for Linux, Python 3.8.1+ is needed) +- A compiler that fully supports C++17, such as clang or gcc (gcc 9.4.0 or newer is required, on Linux) +- Visual Studio or Visual Studio Build Tool on Windows + +\* PyTorch CI uses Visual C++ BuildTools, which come with Visual Studio Enterprise, +Professional, or Community Editions. You can also install the build tools from +https://visualstudio.microsoft.com/visual-cpp-build-tools/. The build tools *do not* +come with Visual Studio Code by default. + +\* We highly recommend installing an [Anaconda](https://www.anaconda.com/download) environment. You will get a high-quality BLAS library (MKL) and you get controlled dependency versions regardless of your Linux distro. + +An example of environment setup is shown below: + +* Linux: + +```bash +$ source /bin/activate +$ conda create -y -n +$ conda activate +``` + +* Windows: + +```bash +$ source \Scripts\activate.bat +$ conda create -y -n +$ conda activate +$ call "C:\Program Files\Microsoft Visual Studio\\Community\VC\Auxiliary\Build\vcvarsall.bat" x64 +``` + +##### NVIDIA CUDA Support +If you want to compile with CUDA support, [select a supported version of CUDA from our support matrix](https://pytorch.org/get-started/locally/), then install the following: +- [NVIDIA CUDA](https://developer.nvidia.com/cuda-downloads) +- [NVIDIA cuDNN](https://developer.nvidia.com/cudnn) v8.5 or above +- [Compiler](https://gist.github.com/ax3l/9489132) compatible with CUDA + +Note: You could refer to the [cuDNN Support Matrix](https://docs.nvidia.com/deeplearning/cudnn/reference/support-matrix.html) for cuDNN versions with the various supported CUDA, CUDA driver and NVIDIA hardware + +If you want to disable CUDA support, export the environment variable `USE_CUDA=0`. +Other potentially useful environment variables may be found in `setup.py`. + +If you are building for NVIDIA's Jetson platforms (Jetson Nano, TX1, TX2, AGX Xavier), Instructions to install PyTorch for Jetson Nano are [available here](https://devtalk.nvidia.com/default/topic/1049071/jetson-nano/pytorch-for-jetson-nano/) + +##### AMD ROCm Support +If you want to compile with ROCm support, install +- [AMD ROCm](https://rocm.docs.amd.com/en/latest/deploy/linux/quick_start.html) 4.0 and above installation +- ROCm is currently supported only for Linux systems. + +If you want to disable ROCm support, export the environment variable `USE_ROCM=0`. +Other potentially useful environment variables may be found in `setup.py`. + +##### Intel GPU Support +If you want to compile with Intel GPU support, follow these +- [PyTorch Prerequisites for Intel GPUs](https://www.intel.com/content/www/us/en/developer/articles/tool/pytorch-prerequisites-for-intel-gpus.html) instructions. +- Intel GPU is supported for Linux and Windows. + +If you want to disable Intel GPU support, export the environment variable `USE_XPU=0`. +Other potentially useful environment variables may be found in `setup.py`. + +#### Get the PyTorch Source +```bash +git clone --recursive https://github.com/pytorch/pytorch +cd pytorch +# if you are updating an existing checkout +git submodule sync +git submodule update --init --recursive +``` + +#### Install Dependencies + +**Common** + +```bash +conda install cmake ninja +# Run this command on native Windows +conda install rust +# Run this command from the PyTorch directory after cloning the source code using the “Get the PyTorch Source“ section below +pip install -r requirements.txt +``` + +**On Linux** + +```bash +pip install mkl-static mkl-include +# CUDA only: Add LAPACK support for the GPU if needed +conda install -c pytorch magma-cuda121 # or the magma-cuda* that matches your CUDA version from https://anaconda.org/pytorch/repo + +# (optional) If using torch.compile with inductor/triton, install the matching version of triton +# Run from the pytorch directory after cloning +# For Intel GPU support, please explicitly `export USE_XPU=1` before running command. +make triton +``` + +**On MacOS** + +```bash +# Add this package on intel x86 processor machines only +pip install mkl-static mkl-include +# Add these packages if torch.distributed is needed +conda install pkg-config libuv +``` + +**On Windows** + +```bash +pip install mkl-static mkl-include +# Add these packages if torch.distributed is needed. +# Distributed package support on Windows is a prototype feature and is subject to changes. +conda install -c conda-forge libuv=1.39 +``` + +#### Install PyTorch +**On Linux** + +If you would like to compile PyTorch with [new C++ ABI](https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html) enabled, then first run this command: +```bash +export _GLIBCXX_USE_CXX11_ABI=1 +``` + +Please **note** that starting from PyTorch 2.5, the PyTorch build with XPU supports both new and old C++ ABIs. Previously, XPU only supported the new C++ ABI. If you want to compile with Intel GPU support, please follow [Intel GPU Support](#intel-gpu-support). + +If you're compiling for AMD ROCm then first run this command: +```bash +# Only run this if you're compiling for ROCm +python tools/amd_build/build_amd.py +``` + +Install PyTorch +```bash +export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"} +python setup.py develop +``` + +> _Aside:_ If you are using [Anaconda](https://www.anaconda.com/distribution/#download-section), you may experience an error caused by the linker: +> +> ```plaintext +> build/temp.linux-x86_64-3.7/torch/csrc/stub.o: file not recognized: file format not recognized +> collect2: error: ld returned 1 exit status +> error: command 'g++' failed with exit status 1 +> ``` +> +> This is caused by `ld` from the Conda environment shadowing the system `ld`. You should use a newer version of Python that fixes this issue. The recommended Python version is 3.8.1+. + +**On macOS** + +```bash +python3 setup.py develop +``` + +**On Windows** + +If you want to build legacy python code, please refer to [Building on legacy code and CUDA](https://github.com/pytorch/pytorch/blob/main/CONTRIBUTING.md#building-on-legacy-code-and-cuda) + +**CPU-only builds** + +In this mode PyTorch computations will run on your CPU, not your GPU + +```cmd +python setup.py develop +``` + +Note on OpenMP: The desired OpenMP implementation is Intel OpenMP (iomp). In order to link against iomp, you'll need to manually download the library and set up the building environment by tweaking `CMAKE_INCLUDE_PATH` and `LIB`. The instruction [here](https://github.com/pytorch/pytorch/blob/main/docs/source/notes/windows.rst#building-from-source) is an example for setting up both MKL and Intel OpenMP. Without these configurations for CMake, Microsoft Visual C OpenMP runtime (vcomp) will be used. + +**CUDA based build** + +In this mode PyTorch computations will leverage your GPU via CUDA for faster number crunching + +[NVTX](https://docs.nvidia.com/gameworks/content/gameworkslibrary/nvtx/nvidia_tools_extension_library_nvtx.htm) is needed to build Pytorch with CUDA. +NVTX is a part of CUDA distributive, where it is called "Nsight Compute". To install it onto an already installed CUDA run CUDA installation once again and check the corresponding checkbox. +Make sure that CUDA with Nsight Compute is installed after Visual Studio. + +Currently, VS 2017 / 2019, and Ninja are supported as the generator of CMake. If `ninja.exe` is detected in `PATH`, then Ninja will be used as the default generator, otherwise, it will use VS 2017 / 2019. +
If Ninja is selected as the generator, the latest MSVC will get selected as the underlying toolchain. + +Additional libraries such as +[Magma](https://developer.nvidia.com/magma), [oneDNN, a.k.a. MKLDNN or DNNL](https://github.com/oneapi-src/oneDNN), and [Sccache](https://github.com/mozilla/sccache) are often needed. Please refer to the [installation-helper](https://github.com/pytorch/pytorch/tree/main/.ci/pytorch/win-test-helpers/installation-helpers) to install them. + +You can refer to the [build_pytorch.bat](https://github.com/pytorch/pytorch/blob/main/.ci/pytorch/win-test-helpers/build_pytorch.bat) script for some other environment variables configurations + + +```cmd +cmd + +:: Set the environment variables after you have downloaded and unzipped the mkl package, +:: else CMake would throw an error as `Could NOT find OpenMP`. +set CMAKE_INCLUDE_PATH={Your directory}\mkl\include +set LIB={Your directory}\mkl\lib;%LIB% + +:: Read the content in the previous section carefully before you proceed. +:: [Optional] If you want to override the underlying toolset used by Ninja and Visual Studio with CUDA, please run the following script block. +:: "Visual Studio 2019 Developer Command Prompt" will be run automatically. +:: Make sure you have CMake >= 3.12 before you do this when you use the Visual Studio generator. +set CMAKE_GENERATOR_TOOLSET_VERSION=14.27 +set DISTUTILS_USE_SDK=1 +for /f "usebackq tokens=*" %i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -version [15^,17^) -products * -latest -property installationPath`) do call "%i\VC\Auxiliary\Build\vcvarsall.bat" x64 -vcvars_ver=%CMAKE_GENERATOR_TOOLSET_VERSION% + +:: [Optional] If you want to override the CUDA host compiler +set CUDAHOSTCXX=C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.27.29110\bin\HostX64\x64\cl.exe + +python setup.py develop + +``` + +##### Adjust Build Options (Optional) + +You can adjust the configuration of cmake variables optionally (without building first), by doing +the following. For example, adjusting the pre-detected directories for CuDNN or BLAS can be done +with such a step. + +On Linux +```bash +export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"} +python setup.py build --cmake-only +ccmake build # or cmake-gui build +``` + +On macOS +```bash +export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"} +MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ python setup.py build --cmake-only +ccmake build # or cmake-gui build +``` + +### Docker Image + +#### Using pre-built images + +You can also pull a pre-built docker image from Docker Hub and run with docker v19.03+ + +```bash +docker run --gpus all --rm -ti --ipc=host pytorch/pytorch:latest +``` + +Please note that PyTorch uses shared memory to share data between processes, so if torch multiprocessing is used (e.g. +for multithreaded data loaders) the default shared memory segment size that container runs with is not enough, and you +should increase shared memory size either with `--ipc=host` or `--shm-size` command line options to `nvidia-docker run`. + +#### Building the image yourself + +**NOTE:** Must be built with a docker version > 18.06 + +The `Dockerfile` is supplied to build images with CUDA 11.1 support and cuDNN v8. +You can pass `PYTHON_VERSION=x.y` make variable to specify which Python version is to be used by Miniconda, or leave it +unset to use the default. + +```bash +make -f docker.Makefile +# images are tagged as docker.io/${your_docker_username}/pytorch +``` + +You can also pass the `CMAKE_VARS="..."` environment variable to specify additional CMake variables to be passed to CMake during the build. +See [setup.py](./setup.py) for the list of available variables. + +```bash +make -f docker.Makefile +``` + +### Building the Documentation + +To build documentation in various formats, you will need [Sphinx](http://www.sphinx-doc.org) and the +readthedocs theme. + +```bash +cd docs/ +pip install -r requirements.txt +``` +You can then build the documentation by running `make ` from the +`docs/` folder. Run `make` to get a list of all available output formats. + +If you get a katex error run `npm install katex`. If it persists, try +`npm install -g katex` + +> Note: if you installed `nodejs` with a different package manager (e.g., +`conda`) then `npm` will probably install a version of `katex` that is not +compatible with your version of `nodejs` and doc builds will fail. +A combination of versions that is known to work is `node@6.13.1` and +`katex@0.13.18`. To install the latter with `npm` you can run +```npm install -g katex@0.13.18``` + +### Previous Versions + +Installation instructions and binaries for previous PyTorch versions may be found +on [our website](https://pytorch.org/previous-versions). + + +## Getting Started + +Three-pointers to get you started: +- [Tutorials: get you started with understanding and using PyTorch](https://pytorch.org/tutorials/) +- [Examples: easy to understand PyTorch code across all domains](https://github.com/pytorch/examples) +- [The API Reference](https://pytorch.org/docs/) +- [Glossary](https://github.com/pytorch/pytorch/blob/main/GLOSSARY.md) + +## Resources + +* [PyTorch.org](https://pytorch.org/) +* [PyTorch Tutorials](https://pytorch.org/tutorials/) +* [PyTorch Examples](https://github.com/pytorch/examples) +* [PyTorch Models](https://pytorch.org/hub/) +* [Intro to Deep Learning with PyTorch from Udacity](https://www.udacity.com/course/deep-learning-pytorch--ud188) +* [Intro to Machine Learning with PyTorch from Udacity](https://www.udacity.com/course/intro-to-machine-learning-nanodegree--nd229) +* [Deep Neural Networks with PyTorch from Coursera](https://www.coursera.org/learn/deep-neural-networks-with-pytorch) +* [PyTorch Twitter](https://twitter.com/PyTorch) +* [PyTorch Blog](https://pytorch.org/blog/) +* [PyTorch YouTube](https://www.youtube.com/channel/UCWXI5YeOsh03QvJ59PMaXFw) + +## Communication +* Forums: Discuss implementations, research, etc. https://discuss.pytorch.org +* GitHub Issues: Bug reports, feature requests, install issues, RFCs, thoughts, etc. +* Slack: The [PyTorch Slack](https://pytorch.slack.com/) hosts a primary audience of moderate to experienced PyTorch users and developers for general chat, online discussions, collaboration, etc. If you are a beginner looking for help, the primary medium is [PyTorch Forums](https://discuss.pytorch.org). If you need a slack invite, please fill this form: https://goo.gl/forms/PP1AGvNHpSaJP8to1 +* Newsletter: No-noise, a one-way email newsletter with important announcements about PyTorch. You can sign-up here: https://eepurl.com/cbG0rv +* Facebook Page: Important announcements about PyTorch. https://www.facebook.com/pytorch +* For brand guidelines, please visit our website at [pytorch.org](https://pytorch.org/) + +## Releases and Contributing + +Typically, PyTorch has three minor releases a year. Please let us know if you encounter a bug by [filing an issue](https://github.com/pytorch/pytorch/issues). + +We appreciate all contributions. If you are planning to contribute back bug-fixes, please do so without any further discussion. + +If you plan to contribute new features, utility functions, or extensions to the core, please first open an issue and discuss the feature with us. +Sending a PR without discussion might end up resulting in a rejected PR because we might be taking the core in a different direction than you might be aware of. + +To learn more about making a contribution to Pytorch, please see our [Contribution page](CONTRIBUTING.md). For more information about PyTorch releases, see [Release page](RELEASE.md). + +## The Team + +PyTorch is a community-driven project with several skillful engineers and researchers contributing to it. + +PyTorch is currently maintained by [Soumith Chintala](http://soumith.ch), [Gregory Chanan](https://github.com/gchanan), [Dmytro Dzhulgakov](https://github.com/dzhulgakov), [Edward Yang](https://github.com/ezyang), and [Nikita Shulga](https://github.com/malfet) with major contributions coming from hundreds of talented individuals in various forms and means. +A non-exhaustive but growing list needs to mention: [Trevor Killeen](https://github.com/killeent), [Sasank Chilamkurthy](https://github.com/chsasank), [Sergey Zagoruyko](https://github.com/szagoruyko), [Adam Lerer](https://github.com/adamlerer), [Francisco Massa](https://github.com/fmassa), [Alykhan Tejani](https://github.com/alykhantejani), [Luca Antiga](https://github.com/lantiga), [Alban Desmaison](https://github.com/albanD), [Andreas Koepf](https://github.com/andreaskoepf), [James Bradbury](https://github.com/jamesb93), [Zeming Lin](https://github.com/ebetica), [Yuandong Tian](https://github.com/yuandong-tian), [Guillaume Lample](https://github.com/glample), [Marat Dukhan](https://github.com/Maratyszcza), [Natalia Gimelshein](https://github.com/ngimel), [Christian Sarofeen](https://github.com/csarofeen), [Martin Raison](https://github.com/martinraison), [Edward Yang](https://github.com/ezyang), [Zachary Devito](https://github.com/zdevito). + +Note: This project is unrelated to [hughperkins/pytorch](https://github.com/hughperkins/pytorch) with the same name. Hugh is a valuable contributor to the Torch community and has helped with many things Torch and PyTorch. + +## License + +PyTorch has a BSD-style license, as found in the [LICENSE](LICENSE) file. diff --git a/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..5dbeadbac94dc57c6f7eb283e8e8b392cef2ad3d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/RECORD @@ -0,0 +1,12774 @@ +../../../bin/convert-caffe2-to-onnx,sha256=JAY9jxYUXPotTawAU4mfbAHcBorJqcPzgNAAOhpSkHQ,266 +../../../bin/convert-onnx-to-caffe2,sha256=AWQNxZB3eWU0rlx_iXf0pq5O7FYxq42egriw20JDHDc,266 +../../../bin/torchfrtrace,sha256=xNs3zbkguHaxn2p4h8GJPCEBUeWrcoRgK-nFWyljynQ,243 +../../../bin/torchrun,sha256=Yvb3Gzgzvd129OS6-KsuYABNTUdqFT9IGGaZ59Gg1AI,234 +functorch/_C.cpython-310-x86_64-linux-gnu.so,sha256=nT_osFedMn9wllqkB4B47IqcCnF0G8vO1ls9tpt5Uiw,320240 +functorch/__init__.py,sha256=NAwGN21zq-tccaF-ROtv-VWFoPdb7y9iuAt6Hy6QCtc,1037 +functorch/__pycache__/__init__.cpython-310.pyc,, +functorch/_src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +functorch/_src/__pycache__/__init__.cpython-310.pyc,, +functorch/_src/aot_autograd/__init__.py,sha256=SGo7gh6XGYcOxTGf5g-R8Y9AF95ICcJbQwQD6rFyrpQ,291 +functorch/_src/aot_autograd/__pycache__/__init__.cpython-310.pyc,, +functorch/_src/eager_transforms/__init__.py,sha256=kX_52fDvSX9YX9OAwo5bjvJtrxyjEBUJ1PueW8xgsuw,291 +functorch/_src/eager_transforms/__pycache__/__init__.cpython-310.pyc,, +functorch/_src/make_functional/__init__.py,sha256=b3y8s3KhtCqFB8lM4Pi48AwuztUt7NBK-VISZNJYYjw,235 +functorch/_src/make_functional/__pycache__/__init__.cpython-310.pyc,, +functorch/_src/vmap/__init__.py,sha256=k8r2Esz6tB5D7U_UA0_BCDaWoOmn8JNVrRqK7nG7_fM,467 +functorch/_src/vmap/__pycache__/__init__.cpython-310.pyc,, +functorch/compile/__init__.py,sha256=fZnNG56VBLfKlXMqX5Rj3tORQYLyxbyoA0rEoEBt3KM,756 +functorch/compile/__pycache__/__init__.cpython-310.pyc,, +functorch/dim/__init__.py,sha256=TnTKY_0spHZkgd5cVaJjpz8J3pTHWyp8-rOQwrXfQ1Q,4723 +functorch/dim/__pycache__/__init__.cpython-310.pyc,, +functorch/dim/__pycache__/batch_tensor.cpython-310.pyc,, +functorch/dim/__pycache__/delayed_mul_tensor.cpython-310.pyc,, +functorch/dim/__pycache__/dim.cpython-310.pyc,, +functorch/dim/__pycache__/magic_trace.cpython-310.pyc,, +functorch/dim/__pycache__/op_properties.cpython-310.pyc,, +functorch/dim/__pycache__/reference.cpython-310.pyc,, +functorch/dim/__pycache__/tree_map.cpython-310.pyc,, +functorch/dim/__pycache__/wrap_type.cpython-310.pyc,, +functorch/dim/batch_tensor.py,sha256=DivqprUhdjkrwvNRL-NO1CQ30z5QuI2npkKwRGgR_cU,668 +functorch/dim/delayed_mul_tensor.py,sha256=B9pt_vOxrKeISz-MY6_qZBhnNKtrixs4GtUA792b7G0,2441 +functorch/dim/dim.py,sha256=_x1W_dPyhU10sD79Gr-v6_JKUTb62E74Dc_Aeoty6A8,3398 +functorch/dim/magic_trace.py,sha256=oUxIOV2TPg0eIStZDoGt03_l_T2vFxGohnhBtL6SB-w,1329 +functorch/dim/op_properties.py,sha256=GpW8Ylgq1YlMilQh6cgNQ66tAiN84WRyEjn_VkbJYFQ,6687 +functorch/dim/reference.py,sha256=OyipGpLydeHeDeDwhye7U46ZMwvza3xxdzNQOlsX_80,20348 +functorch/dim/tree_map.py,sha256=nYN6f98uIYQBq8o-0gx_Ad60GL71ZSnnaKw6evNQWD4,375 +functorch/dim/wrap_type.py,sha256=0V0QVkjtU4Q-wPnw9gpchfUx1yBUrAmnuEDZNrOimPk,1871 +functorch/einops/__init__.py,sha256=qNdomhBnsKNuNNlGsbqqipT9wYQkcxMuEQJKq4zhry8,59 +functorch/einops/__pycache__/__init__.cpython-310.pyc,, +functorch/einops/__pycache__/_parsing.cpython-310.pyc,, +functorch/einops/__pycache__/rearrange.cpython-310.pyc,, +functorch/einops/_parsing.py,sha256=jzsUTtyJgFnkeWxz6PLTdcsdgrVGdQmrUF5D5ws05TY,12260 +functorch/einops/rearrange.py,sha256=PTrNt3TIaoibiwnfrkUztOmqkzdqawslP1PE6f1X_xk,8041 +functorch/experimental/__init__.py,sha256=oKN9tnpkCih0kicct1MwkKhGao2Qh7aoc5hsnqLadVE,273 +functorch/experimental/__pycache__/__init__.cpython-310.pyc,, +functorch/experimental/__pycache__/control_flow.cpython-310.pyc,, +functorch/experimental/__pycache__/ops.cpython-310.pyc,, +functorch/experimental/control_flow.py,sha256=7QVY2HkJ6H20TCPdAEsLVPrh-6X5R54s42JloKxZ6yQ,233 +functorch/experimental/ops.py,sha256=kDGcckdoYwOg9fS4JqvZnsIt8Ss9O6RGeasJSyi0OUY,57 +torch-2.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +torch-2.5.1.dist-info/LICENSE,sha256=Rq55jMsJIKaVn8t7gBgNicpvtlKQkjWw8DWlLOwihYQ,507602 +torch-2.5.1.dist-info/METADATA,sha256=w7gm5XnbD4jIYP2ULQPMliSrg6O--JN84liCehG1YZo,28312 +torch-2.5.1.dist-info/NOTICE,sha256=wsx78MrsdlLCtGCopHC-oWd_JB5KuOQx3zTPF_Wp_sA,23632 +torch-2.5.1.dist-info/RECORD,, +torch-2.5.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch-2.5.1.dist-info/WHEEL,sha256=oPTolxX39D9SX81DhGjP9ncZ9PHcmhO1jInNxQx689I,105 +torch-2.5.1.dist-info/entry_points.txt,sha256=xj50BO_q8NtdIVA-qixQJgGjH3XG542Y3hypb1gk5Rs,347 +torch-2.5.1.dist-info/top_level.txt,sha256=MsBcfJyMU15lW1efu5w7Tzd4MenrYHiuaixbHMfAoco,25 +torch/_C.cpython-310-x86_64-linux-gnu.so,sha256=m_8eZXaH3w6MecRXAMw1UYaGDfil4fadO0YBIR_zwYs,37857 +torch/_C/_VariableFunctions.pyi,sha256=-FCBbSvkXpo_8spkoQ0Q25ZA82iHCOegdORBis03T-c,1143553 +torch/_C/__init__.pyi,sha256=3yqkb0eBspYfAwQmbBw8E-CVMnTWrGNGMh_Pw24PrMU,407742 +torch/_C/_aoti.pyi,sha256=PSyfhxyETFo0uRNell-q6c7JQyJdmmHHm1XXteHH7Jw,589 +torch/_C/_autograd.pyi,sha256=LiwMknKO7vqP8cZZtM2-BZWi1WQybC8SXQzI4dwFSy4,4557 +torch/_C/_cpu.pyi,sha256=hNNpcqaNcX_LNa5BGWOx3_GMoEmEh3xJZ3wcLg-VVos,391 +torch/_C/_cudnn.pyi,sha256=FwKVMtP_qbhryc7MBN6zm3XIJoLMk8pWqvvQKJSx3O4,370 +torch/_C/_cusparselt.pyi,sha256=kD30gidilch4jKuIr7-ItTvDuEtt6L7vxL1l56sb_mw,32 +torch/_C/_distributed_autograd.pyi,sha256=ts-Y7ruLJe-UBRGnc2TbkqQM3jvQkQB5NTxN9ogP3Tc,918 +torch/_C/_distributed_c10d.pyi,sha256=Owx6cQ-HqjLHlwq3jC8CwXnMFilj2v5SAf1_l4T9bys,19921 +torch/_C/_distributed_rpc.pyi,sha256=0mlTwOqrF608gU4oUnIPc25nQ4zeJ6LWycHcuyfB3e8,6080 +torch/_C/_distributed_rpc_testing.pyi,sha256=2cUz7bIO5y9fKUzfafx6Uh84QNO10gtP3uJXAXmE8Dk,1017 +torch/_C/_functions.pyi,sha256=VFIzphSWy5ZmNDPu3lfyCaeiCwcY9PCoS18YoI0iw_0,324 +torch/_C/_functorch.pyi,sha256=oyOPSPGwrxpySnqymbG7PACTw7WzDRvPXEcuM1TtW60,3256 +torch/_C/_instruction_counter.pyi,sha256=xjwkiHBgXbvN8Iw7UNVEvE2YDJKFUU1_EoeTS4prCV8,109 +torch/_C/_itt.pyi,sha256=6fhhHGYgreXbGka-VtqX9FjjPaSznfOmDHPVC171DII,169 +torch/_C/_lazy.pyi,sha256=JKDVp5NHjiNk45hiTQRzciOdWWjr4oYBWV6_4gb3Kzc,971 +torch/_C/_lazy_ts_backend.pyi,sha256=YfYcEssTgLqOHQqUDYg5ZgbdB_D52mG19VETBmh7yuc,326 +torch/_C/_monitor.pyi,sha256=60fuhstORKX_-dS0h_7AJE5zcG_yQdbzj0UB91wlmkg,1026 +torch/_C/_nn.pyi,sha256=vqkbOGTVO36_0RmF-VKEOXvMg1U9v4ajpTO_wzkCneA,4324 +torch/_C/_nvtx.pyi,sha256=04PqUeCT1UtaX9HgiOwshfu9WNHkXBLIWVPzQK_gz30,250 +torch/_C/_onnx.pyi,sha256=7xxwokJOBBQxnbox3lGLgMo9RkVGVgtfPBtsKcaGaTE,710 +torch/_C/_profiler.pyi,sha256=pWXR7K4GPKGWkd0VhWFEiVztvSqZT5YItR9BWk01BlA,6191 +torch/_C/_verbose.pyi,sha256=vMdQYMqABMqBFxYykp8_VD0QBaubolczBmCEv-UcA00,134 +torch/_VF.py,sha256=XIfh8pIjzvG0ySWcOScz4E067DFoT-8TkcmAG5oYn94,664 +torch/_VF.pyi,sha256=-FCBbSvkXpo_8spkoQ0Q25ZA82iHCOegdORBis03T-c,1143553 +torch/__config__.py,sha256=jmap9bxAICCG1tSYV7J62wHyG9tzIa_cYkTQVG8aU5c,580 +torch/__future__.py,sha256=yk9l_KWsfVIzUBx9cGr-OdtWmb-pI8ZhcROAm3a_FQw,3185 +torch/__init__.py,sha256=4bW_QVG9JGoh46xTGPbt_PLm7w_U8jO_t3omRpjsExk,95190 +torch/__pycache__/_VF.cpython-310.pyc,, +torch/__pycache__/__config__.cpython-310.pyc,, +torch/__pycache__/__future__.cpython-310.pyc,, +torch/__pycache__/__init__.cpython-310.pyc,, +torch/__pycache__/_appdirs.cpython-310.pyc,, +torch/__pycache__/_classes.cpython-310.pyc,, +torch/__pycache__/_compile.cpython-310.pyc,, +torch/__pycache__/_custom_ops.cpython-310.pyc,, +torch/__pycache__/_deploy.cpython-310.pyc,, +torch/__pycache__/_guards.cpython-310.pyc,, +torch/__pycache__/_jit_internal.cpython-310.pyc,, +torch/__pycache__/_linalg_utils.cpython-310.pyc,, +torch/__pycache__/_lobpcg.cpython-310.pyc,, +torch/__pycache__/_lowrank.cpython-310.pyc,, +torch/__pycache__/_meta_registrations.cpython-310.pyc,, +torch/__pycache__/_namedtensor_internals.cpython-310.pyc,, +torch/__pycache__/_ops.cpython-310.pyc,, +torch/__pycache__/_python_dispatcher.cpython-310.pyc,, +torch/__pycache__/_size_docs.cpython-310.pyc,, +torch/__pycache__/_sources.cpython-310.pyc,, +torch/__pycache__/_storage_docs.cpython-310.pyc,, +torch/__pycache__/_streambase.cpython-310.pyc,, +torch/__pycache__/_tensor.cpython-310.pyc,, +torch/__pycache__/_tensor_docs.cpython-310.pyc,, +torch/__pycache__/_tensor_str.cpython-310.pyc,, +torch/__pycache__/_torch_docs.cpython-310.pyc,, +torch/__pycache__/_utils.cpython-310.pyc,, +torch/__pycache__/_utils_internal.cpython-310.pyc,, +torch/__pycache__/_vmap_internals.cpython-310.pyc,, +torch/__pycache__/_weights_only_unpickler.cpython-310.pyc,, +torch/__pycache__/functional.cpython-310.pyc,, +torch/__pycache__/hub.cpython-310.pyc,, +torch/__pycache__/library.cpython-310.pyc,, +torch/__pycache__/overrides.cpython-310.pyc,, +torch/__pycache__/quasirandom.cpython-310.pyc,, +torch/__pycache__/random.cpython-310.pyc,, +torch/__pycache__/return_types.cpython-310.pyc,, +torch/__pycache__/serialization.cpython-310.pyc,, +torch/__pycache__/storage.cpython-310.pyc,, +torch/__pycache__/torch_version.cpython-310.pyc,, +torch/__pycache__/types.cpython-310.pyc,, +torch/__pycache__/version.cpython-310.pyc,, +torch/_appdirs.py,sha256=GrfSWQ8yf8Y6cnOlIP4KWTI_HgB_V4O-RJZlqtBsZTA,26168 +torch/_awaits/__init__.py,sha256=T0RT6TwpQbHV9RX23zQYck-XpJwWC1bH3ObNLeqq6yA,1652 +torch/_awaits/__pycache__/__init__.cpython-310.pyc,, +torch/_classes.py,sha256=Kj3ZPXLp8ZRQZwuJ86V7W1rfJFcJ9qSaVRu_rTPENL8,1721 +torch/_compile.py,sha256=Q88hFk5i07jtOgwCQfp8qMOUdFQV9IVVrSioBpdhEXY,1305 +torch/_custom_op/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_custom_op/__pycache__/__init__.cpython-310.pyc,, +torch/_custom_op/__pycache__/autograd.cpython-310.pyc,, +torch/_custom_op/__pycache__/functional.cpython-310.pyc,, +torch/_custom_op/__pycache__/impl.cpython-310.pyc,, +torch/_custom_op/autograd.py,sha256=MXuiQXSMJM69DcMJnmIRQe6eBGvX8aWORhy0U1tmSrY,11818 +torch/_custom_op/functional.py,sha256=yg380pZCBjSJPsg1qxZJ0GaBanjEs_XP6uhllksoIFI,7952 +torch/_custom_op/impl.py,sha256=2pTECsXWQQYvtnWheloWspti015zfayhUHqNqIuK7XI,26696 +torch/_custom_ops.py,sha256=jizwU0mH5w4_T2cEb88gGvwFw5uzoUZXYSwGWdDi_3Y,12822 +torch/_decomp/__init__.py,sha256=RWUuRaTlrBLoftIZFFnxvCo9ZVnm9LsNxLZlF1CPmQU,16904 +torch/_decomp/__pycache__/__init__.cpython-310.pyc,, +torch/_decomp/__pycache__/decompositions.cpython-310.pyc,, +torch/_decomp/__pycache__/decompositions_for_jvp.cpython-310.pyc,, +torch/_decomp/__pycache__/decompositions_for_rng.cpython-310.pyc,, +torch/_decomp/decompositions.py,sha256=Lx-7IYbU-324IQuii17ZMS0Yj0jMHItmg-hkuU1m1M8,172745 +torch/_decomp/decompositions_for_jvp.py,sha256=m_ZHSPLkhO643TbGC3aVuylmblW0TWNpgrUT5tFYkrs,11703 +torch/_decomp/decompositions_for_rng.py,sha256=DM0-j6pimdtLESd4fgOmGAuFdreUC6ot5VF_EYcxnok,9184 +torch/_deploy.py,sha256=g1HU2xJxIgt17gVzWEOErF9o9xPWatuDxyTMs0vhSCM,3457 +torch/_dispatch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_dispatch/__pycache__/__init__.cpython-310.pyc,, +torch/_dispatch/__pycache__/python.cpython-310.pyc,, +torch/_dispatch/python.py,sha256=rNjbRa61vO_vxkrlfGjaGddyHwj1A1b518xbLWbB6Ss,6373 +torch/_dynamo/__init__.py,sha256=A_WPkxFlJAZpvduhQgSdcqjq8TXeKw3rNA959B9A_tw,3151 +torch/_dynamo/__pycache__/__init__.cpython-310.pyc,, +torch/_dynamo/__pycache__/_trace_wrapped_higher_order_op.cpython-310.pyc,, +torch/_dynamo/__pycache__/bytecode_analysis.cpython-310.pyc,, +torch/_dynamo/__pycache__/bytecode_transformation.cpython-310.pyc,, +torch/_dynamo/__pycache__/cache_size.cpython-310.pyc,, +torch/_dynamo/__pycache__/callback.cpython-310.pyc,, +torch/_dynamo/__pycache__/code_context.cpython-310.pyc,, +torch/_dynamo/__pycache__/codegen.cpython-310.pyc,, +torch/_dynamo/__pycache__/compiled_autograd.cpython-310.pyc,, +torch/_dynamo/__pycache__/comptime.cpython-310.pyc,, +torch/_dynamo/__pycache__/config.cpython-310.pyc,, +torch/_dynamo/__pycache__/convert_frame.cpython-310.pyc,, +torch/_dynamo/__pycache__/create_parameter_op.cpython-310.pyc,, +torch/_dynamo/__pycache__/current_scope_id.cpython-310.pyc,, +torch/_dynamo/__pycache__/debug_utils.cpython-310.pyc,, +torch/_dynamo/__pycache__/decorators.cpython-310.pyc,, +torch/_dynamo/__pycache__/device_interface.cpython-310.pyc,, +torch/_dynamo/__pycache__/distributed.cpython-310.pyc,, +torch/_dynamo/__pycache__/eval_frame.cpython-310.pyc,, +torch/_dynamo/__pycache__/exc.cpython-310.pyc,, +torch/_dynamo/__pycache__/external_utils.cpython-310.pyc,, +torch/_dynamo/__pycache__/funcname_cache.cpython-310.pyc,, +torch/_dynamo/__pycache__/guards.cpython-310.pyc,, +torch/_dynamo/__pycache__/hooks.cpython-310.pyc,, +torch/_dynamo/__pycache__/logging.cpython-310.pyc,, +torch/_dynamo/__pycache__/mutation_guard.cpython-310.pyc,, +torch/_dynamo/__pycache__/output_graph.cpython-310.pyc,, +torch/_dynamo/__pycache__/profiler.cpython-310.pyc,, +torch/_dynamo/__pycache__/replay_record.cpython-310.pyc,, +torch/_dynamo/__pycache__/resume_execution.cpython-310.pyc,, +torch/_dynamo/__pycache__/side_effects.cpython-310.pyc,, +torch/_dynamo/__pycache__/source.cpython-310.pyc,, +torch/_dynamo/__pycache__/symbolic_convert.cpython-310.pyc,, +torch/_dynamo/__pycache__/tensor_version_op.cpython-310.pyc,, +torch/_dynamo/__pycache__/test_case.cpython-310.pyc,, +torch/_dynamo/__pycache__/test_minifier_common.cpython-310.pyc,, +torch/_dynamo/__pycache__/testing.cpython-310.pyc,, +torch/_dynamo/__pycache__/trace_rules.cpython-310.pyc,, +torch/_dynamo/__pycache__/types.cpython-310.pyc,, +torch/_dynamo/__pycache__/utils.cpython-310.pyc,, +torch/_dynamo/_trace_wrapped_higher_order_op.py,sha256=tlv1wnqulFU9KuO_-746pLU3otOMEG7_Fs0wYG4-BfM,5167 +torch/_dynamo/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_dynamo/backends/__pycache__/__init__.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/common.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/cudagraphs.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/debugging.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/distributed.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/inductor.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/onnxrt.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/registry.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/tensorrt.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/torchxla.cpython-310.pyc,, +torch/_dynamo/backends/__pycache__/tvm.cpython-310.pyc,, +torch/_dynamo/backends/common.py,sha256=cunEBQX9sN5njUqNVyE3Cr7ETf6_6SeDm4CeUBjYYWg,3952 +torch/_dynamo/backends/cudagraphs.py,sha256=Q3IYGqBYCeO7G23cD4ws8BEushOTKeX5fZE8K0_VbCI,8369 +torch/_dynamo/backends/debugging.py,sha256=B5tuV_QzUI8GVV8EP6BjAs_nYly4mRu3SVLKCzF9hFg,10815 +torch/_dynamo/backends/distributed.py,sha256=Ori1Cs-gUqF6AiptB-Eo6rw1NMnNTI4DZ39rO7eHD-8,25671 +torch/_dynamo/backends/inductor.py,sha256=HmZwyPZ5pW1xZv4kSpQYdthqeuAQ4oO8Hm9-EDnL03A,291 +torch/_dynamo/backends/onnxrt.py,sha256=p-xjUoLDCUsf3_ks4qGlrJipMyMkhO7MllYa985CpDQ,1541 +torch/_dynamo/backends/registry.py,sha256=HykfC_wnx9Rsr58Mj1r2gTb2O-oFkW1ENGNgtV7qRj0,3684 +torch/_dynamo/backends/tensorrt.py,sha256=oMtmZB2Op6yeZvFXskMDZUATwLmgRCg4xxh3OrcVqSY,406 +torch/_dynamo/backends/torchxla.py,sha256=mIMj0a3Zs2GjNfAWktDGzbJnB7oRkMvdgXK9qWFbYcM,1255 +torch/_dynamo/backends/tvm.py,sha256=Vpk0vf3jULyI1IiReWwr0crNcDdUJZ1pLEuImwY2cj4,6949 +torch/_dynamo/bytecode_analysis.py,sha256=ACKi0-RCZksyu1eR8Cb9cvecqCOnL1ki4vNiYjRYMF8,8746 +torch/_dynamo/bytecode_transformation.py,sha256=lYw68l-03wGTEfeCMX6T2UmctdWm0WpAk_eLwTmJQhk,54909 +torch/_dynamo/cache_size.py,sha256=XOIVv-k-Tdw4K5Fto0WH4dD_tWViCi6dv1Bbj55l13Q,7844 +torch/_dynamo/callback.py,sha256=OuDQ4Gr3J2LoZf9jTNxGRDr-jF1our9HvHA8ayhzZY8,2207 +torch/_dynamo/code_context.py,sha256=GncsTJWOn8lyrZoA2DqqSbO8aCtguaT5Icbg6yfEib4,726 +torch/_dynamo/codegen.py,sha256=OTiI2PFwNQoC6eDAHW98ALwkfupNsZ9K4Ac1BbHyDUA,19439 +torch/_dynamo/compiled_autograd.py,sha256=BvXm8eIQHcATl-geCbXmO0mEU4sePMTsjGGs5Gi3vEY,20067 +torch/_dynamo/comptime.py,sha256=MR1_3j_TQaf2YLd3mH7hJyovg2a22w090irnPXgw-ww,13586 +torch/_dynamo/config.py,sha256=4qcWyQZJyP_dCg5T2YlhNwWrzgtRsqFzA2YiuDquszU,20789 +torch/_dynamo/convert_frame.py,sha256=RXUdCDf3FL9lYbPvCKrFDAr8QukwB7P-zFyd_U3krzM,46321 +torch/_dynamo/create_parameter_op.py,sha256=1z1YZdmTqTYbsiM19Zv7Yh8GA0yQV4GrtHpKxQtDBYU,2013 +torch/_dynamo/current_scope_id.py,sha256=aqb2iUMi4j0jzarvWC4ALOBG_SRjVtpVG0goo3jgJNw,638 +torch/_dynamo/debug_utils.py,sha256=I9BfDxXj5snX9WDVMk56bQ-eXXLylGw9TKvawGsimFs,27767 +torch/_dynamo/decorators.py,sha256=gQeQjkLlk3QJ4k8ccvMb-dlNPVgBuVNIK3C89wywqng,21578 +torch/_dynamo/device_interface.py,sha256=xkWRiQkWbXcmbZ7-1SB28mub0nnn0HhUEUecb5AL2WI,11683 +torch/_dynamo/distributed.py,sha256=SHNU8D7dQfHhWZfghph_GAh4SUSH3D9mFJ7hW-Q8eDk,594 +torch/_dynamo/eval_frame.py,sha256=Z-dV979EB3iBIT7oeVdMtFZPqOeCcvff-iNJlwbrCEs,64000 +torch/_dynamo/exc.py,sha256=3EjBpJTMuaNmW3Od0azrHMQMLSipOBSiX5LEX2UKsRs,14474 +torch/_dynamo/external_utils.py,sha256=LtLqD2LKkJrM_dRkqFJhFiETgXdrZ7BlK9noiLiPWOQ,3939 +torch/_dynamo/funcname_cache.py,sha256=GhDnnmN8eGDOKesLj7alcRm7HVd2jRh7jGx8b7fjbUQ,1759 +torch/_dynamo/guards.py,sha256=vS9UmJbKa71REMp79ErwHd2kvPwlAo2xdxZZUkUfsVE,116666 +torch/_dynamo/hooks.py,sha256=7vFrS736lPm_K8l1EPtIc8BIMoNAYfbQZWo62VQrOEM,292 +torch/_dynamo/logging.py,sha256=rvqSEObztO8YNryksL-UJzJCdGLxj72Mhm6cGHyTt6o,1596 +torch/_dynamo/mutation_guard.py,sha256=F5_FwJonfyf9ZsZZ_6iLZMZaSxjAoPJOgVaIRIcbECg,4224 +torch/_dynamo/output_graph.py,sha256=H_liU7jew1TFZimq4BffJmui_mboHFyTXdETIcbEU-o,89251 +torch/_dynamo/polyfills/__init__.py,sha256=DZpx7sDKNAycoixtBzP3ZF6rxDa53zdT6JMHZLFWNqI,4284 +torch/_dynamo/polyfills/__pycache__/__init__.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/builtins.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/functools.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/itertools.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/loader.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/os.cpython-310.pyc,, +torch/_dynamo/polyfills/__pycache__/sys.cpython-310.pyc,, +torch/_dynamo/polyfills/builtins.py,sha256=IlDaNDdVCXQmjhr-6Y5xQvpyh11xfzXK_b6CCOrsmlA,1050 +torch/_dynamo/polyfills/functools.py,sha256=DvKj6YYimbZ-B8NhJzICQ2tLunfajoGjp_1ASchi0HQ,85 +torch/_dynamo/polyfills/itertools.py,sha256=xvWkXqE6y0h2kPBOxKHzQCCpPCcX8Q0vprr1Wj8mNYE,2586 +torch/_dynamo/polyfills/loader.py,sha256=cPUJZwUh2h27WMx2LvPNVxMBNl9FVFj4x5FWtwipAI8,1180 +torch/_dynamo/polyfills/os.py,sha256=GiDwxYjoV6wajRo3SZxPpmCFDfT8rlrL8bnOO_MbXsM,978 +torch/_dynamo/polyfills/sys.py,sha256=eruG3oZf8sEp4jrrQKb-WtAzVkIfXSv8wmk3s6_Wk-c,79 +torch/_dynamo/profiler.py,sha256=3tGFggZkCy6ufL5EspbQwqD5qVUC86qACHVsnZhfMSc,4908 +torch/_dynamo/replay_record.py,sha256=0xt1GZ48ingaBLVJhGisVq87x2fv0EnW7iiTv414qw4,3337 +torch/_dynamo/repro/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_dynamo/repro/__pycache__/__init__.cpython-310.pyc,, +torch/_dynamo/repro/__pycache__/after_aot.cpython-310.pyc,, +torch/_dynamo/repro/__pycache__/after_dynamo.cpython-310.pyc,, +torch/_dynamo/repro/after_aot.py,sha256=lnONZnzk-Cy3WX7Ys3htTnYdP2L7fmL4dWr7v1Mk1UA,33134 +torch/_dynamo/repro/after_dynamo.py,sha256=WO9BZvIf4kgYvlxyXu77daPkooHW2VBYiyZY261zXiA,19591 +torch/_dynamo/resume_execution.py,sha256=x5SMr0LK66C6qem3aGU1onl3vlDa-rdwA60LeA4VHFE,28175 +torch/_dynamo/side_effects.py,sha256=z8rJU0W9zb2e_B9jyBiPiIP966jqCvYHETh5_F6rhi4,29313 +torch/_dynamo/source.py,sha256=plAuWeGe_UvS79TGASoSwPEnQs1zM9eK2FkOZgsAK4s,24982 +torch/_dynamo/symbolic_convert.py,sha256=N0RBHul7reaOb4lKcc5HAwTWdPJXfD7wqgV1DFO9Pnc,132903 +torch/_dynamo/tensor_version_op.py,sha256=4bwxWaQWtcYaN5I2bUUMpXT0E1Aj7BXb7JMhFuf1TCU,2128 +torch/_dynamo/test_case.py,sha256=4OIJ8jvqGXCl3R_1oysyWb2D1WpivKzH3rljzPplC38,1993 +torch/_dynamo/test_minifier_common.py,sha256=MPW7xlUzSq8OfsX0kSgxC8VvyX_r1jDMG1zLK2ELDyg,10037 +torch/_dynamo/testing.py,sha256=RZqQYKL5lkUi85iWUOiW4j6MCmWO7RZJ2iEcP3_DhQw,11617 +torch/_dynamo/trace_rules.py,sha256=ukOK_StXTV1zNTLnmV7QTCkwbzK3Xfh2V5eCiDdbztU,140951 +torch/_dynamo/types.py,sha256=xDjB7mN3LPAi9eaaLVz-vXmQC26eg_4PeXJOqmLYDKo,2278 +torch/_dynamo/utils.py,sha256=W-cuftO_g3hhUVVMywf7fJ10ACyEIwxG7NU91_yaMOk,104079 +torch/_dynamo/variables/__init__.py,sha256=TctFE7HIUINHzA96udXC7gMWBPXwi3xJolTSGo0D0wE,4708 +torch/_dynamo/variables/__pycache__/__init__.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/base.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/builder.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/builtin.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/constant.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/ctx_manager.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/dicts.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/distributed.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/functions.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/higher_order_ops.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/iter.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/lazy.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/lists.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/misc.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/nn_module.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/optimizer.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/script_object.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/sdpa.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/tensor.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/torch.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/torch_function.cpython-310.pyc,, +torch/_dynamo/variables/__pycache__/user_defined.cpython-310.pyc,, +torch/_dynamo/variables/base.py,sha256=rqW7sta5aJhw9vU26Boxb3uLTYHI5n6-jjKgmWkv0iY,12384 +torch/_dynamo/variables/builder.py,sha256=WVTstcEb-7kaJaCQqqo0fVYAUhdnpCVQycbOxhCBA9s,122803 +torch/_dynamo/variables/builtin.py,sha256=Go5-K2jSgGCElwufLiYB5aN_2IIGuBHWrK3N0EB2ZXs,82224 +torch/_dynamo/variables/constant.py,sha256=Pp0BE442vjFgeYdpH0jyF7L-hJ4FkK7VhWCiOW0qoAk,9530 +torch/_dynamo/variables/ctx_manager.py,sha256=7qRqjiAkjtr9S63mYxXQNMg5WJVlkcg_7t2Ok8XaiIs,39620 +torch/_dynamo/variables/dicts.py,sha256=HE8TqtW5dcnvvne1Evj5S8iizp5PjvfQA30HVlwOsWI,37046 +torch/_dynamo/variables/distributed.py,sha256=02fMS7LJQ-i-JFW4WcBR_YtbYgAMcGczbK8uiL5HksA,14059 +torch/_dynamo/variables/functions.py,sha256=qeYO9iSHCPs0lQ9dHLeDdnBux8S7TfOggZE1G-z92sE,41423 +torch/_dynamo/variables/higher_order_ops.py,sha256=qwGpFkn9AVmnWBCXOpL5A3T5Q6C18iohZc6Ls-ZPUn4,83139 +torch/_dynamo/variables/iter.py,sha256=s1NLfpGznTz8_w3kghX8DE1YeX8iEk4EeyKV1SPTigM,16661 +torch/_dynamo/variables/lazy.py,sha256=v8-HDnzxJeWlt1QlDh383W89h8CPdAlFZU5Q4xfCq9I,5420 +torch/_dynamo/variables/lists.py,sha256=v1RYY4KiMHQZDnXVavOsnAke1JSBRuOlhfItARr26hw,33278 +torch/_dynamo/variables/misc.py,sha256=SpaLAbewHrCTBz33mWv2JVdCF2GSzNWYtzepnn7PiVY,61104 +torch/_dynamo/variables/nn_module.py,sha256=3P033DM9WEkCzNTEQVXPacU55Do_oz9X-NXFKRredkg,48954 +torch/_dynamo/variables/optimizer.py,sha256=zv4BAqFu81o5nvI1lhabeeyXVdf_HNu-hD664YSWJmA,14197 +torch/_dynamo/variables/script_object.py,sha256=zJXbnjHJPEM3LXVC94Z1Qmbn-h8l9hurWbGcOJfdeqo,2816 +torch/_dynamo/variables/sdpa.py,sha256=-_-e7bkzQElnhC3GGtPQIKHcbms_0kGc7NGSyx4vW18,3244 +torch/_dynamo/variables/tensor.py,sha256=vYQrleplNRRrqiQKqzojJzjgEs75nwjCx4lWcdU4hlo,51732 +torch/_dynamo/variables/torch.py,sha256=Vj-pdYtmWhlFky9jcxSv7M9rJ26ATW22B_CD_dupbTM,47648 +torch/_dynamo/variables/torch_function.py,sha256=6blJVX69ddSQ9w_XGHv84HPuVg76onM_HhJZEngnn_k,14397 +torch/_dynamo/variables/user_defined.py,sha256=s58WXy8I-aI2EVLYECz0xtuutaufEpBT-chlDKwhDNQ,53926 +torch/_export/__init__.py,sha256=0RMK5w1fkmP7lIEiIx9NzuqX0lk4P28bElKTkeIdi6g,12109 +torch/_export/__pycache__/__init__.cpython-310.pyc,, +torch/_export/__pycache__/converter.cpython-310.pyc,, +torch/_export/__pycache__/error.cpython-310.pyc,, +torch/_export/__pycache__/non_strict_utils.cpython-310.pyc,, +torch/_export/__pycache__/pass_base.cpython-310.pyc,, +torch/_export/__pycache__/tools.cpython-310.pyc,, +torch/_export/__pycache__/utils.cpython-310.pyc,, +torch/_export/__pycache__/verifier.cpython-310.pyc,, +torch/_export/__pycache__/wrappers.cpython-310.pyc,, +torch/_export/converter.py,sha256=gktUlPVAIG2E8t3oFda5BcOMI1HcM8lkZlWewS4zqCk,63306 +torch/_export/db/__init__.py,sha256=a3XxW1RcNAPwEVaI2g11hpnJvSHxGUFsGmDAWfDnLP8,206 +torch/_export/db/__pycache__/__init__.cpython-310.pyc,, +torch/_export/db/__pycache__/case.cpython-310.pyc,, +torch/_export/db/__pycache__/gen_example.cpython-310.pyc,, +torch/_export/db/__pycache__/logging.cpython-310.pyc,, +torch/_export/db/case.py,sha256=5OJumEuLoIuQtNWttdlTQJ-M00yqL3v_tj4pX0olAYo,5022 +torch/_export/db/examples/__init__.py,sha256=PlziokXeStI1gWELwC7zX-T7ZViONPRitQ1uFWNAxL4,1648 +torch/_export/db/examples/__pycache__/__init__.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/assume_constant_result.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/autograd_function.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/class_method.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_branch_class_method.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_branch_nested_function.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_branch_nonlocal_variables.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_closed_over_variable.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_operands.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/cond_predicate.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/constrain_as_size_example.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/constrain_as_value_example.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/decorator.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dictionary.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_assert.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_constructor.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_if_guard.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_map.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_round.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_slicing.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/dynamic_shape_view.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/fn_with_kwargs.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/list_contains.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/list_unpack.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/model_attr_mutation.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/nested_function.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/null_context_manager.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/optional_input.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/pytree_flatten.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/scalar_output.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/specialized_attribute.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/static_for_loop.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/static_if.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/tensor_setattr.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/type_reflection_method.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/unsupported_operator.cpython-310.pyc,, +torch/_export/db/examples/__pycache__/user_input_mutation.cpython-310.pyc,, +torch/_export/db/examples/assume_constant_result.py,sha256=MPGQemV_ymD07kXuOVQkizSxTaTlu4vr2bAP3xxE3M8,510 +torch/_export/db/examples/autograd_function.py,sha256=i7X61hQS_JQrjSLnLoqge7BhlupzW3v_ddBEH1Rc6Cs,578 +torch/_export/db/examples/class_method.py,sha256=KM6pkWdEc1SMybUR-O2hCN1tD9Vl0B22gPI2LgDiZZ4,499 +torch/_export/db/examples/cond_branch_class_method.py,sha256=PpspYrzN3LBl0CIuGjjZon6rGQeQ32YtaIqbpRDbFyc,1327 +torch/_export/db/examples/cond_branch_nested_function.py,sha256=HgN_wnbfzoATNkD7ksQSwdo6PVhiIP5RqneZvGSTxeQ,1302 +torch/_export/db/examples/cond_branch_nonlocal_variables.py,sha256=B_ROqnPdNhhy7zbP9cyqLUl1LgDgAYYGSr8EyNRiy-M,1841 +torch/_export/db/examples/cond_closed_over_variable.py,sha256=iSWSxhJhumU6CsRSzXUeW7YVxbgjf-C05EDc0Foi-a4,547 +torch/_export/db/examples/cond_operands.py,sha256=YZsfIIS6ubXITFKKl6tHkohzAjJ7Z9y-exkzKmr0n1I,846 +torch/_export/db/examples/cond_predicate.py,sha256=PKpI-GKAdVMDh11EDIpQ3JXvRMXNGuJTKXbGb_uyQjY,663 +torch/_export/db/examples/constrain_as_size_example.py,sha256=1wYblW56aQgBRiP1u2WXkfAYXhLYbKJ6kBNWX1sdIK8,637 +torch/_export/db/examples/constrain_as_value_example.py,sha256=LYSBa_XokesbD6K7KydnVq6r9veGr2nSdm74xlk7G3o,689 +torch/_export/db/examples/decorator.py,sha256=U5YTo_25VfE_Hy9uoD4eYN8wKSJGpOG0dAJ1TZjL1i4,480 +torch/_export/db/examples/dictionary.py,sha256=gSkKkS68Vw_cropGQ57i61jqTleFsLECIXP6pZMqLBE,404 +torch/_export/db/examples/dynamic_shape_assert.py,sha256=Ff1KN5JG62k6wYqR7GvQ8i8lf1jyvc-GNVFASVfOx5w,450 +torch/_export/db/examples/dynamic_shape_constructor.py,sha256=68CRA-j59c0dPBc5hOBsEBmWpWz0opzovyqGIaTfKuw,396 +torch/_export/db/examples/dynamic_shape_if_guard.py,sha256=VG6YmeK31p2jwLPG_Ci3VWmWUMQnJIp_IuijY_UY-dU,560 +torch/_export/db/examples/dynamic_shape_map.py,sha256=XN8_Et1EVYaxmdnACPkiDAVJZ3jkYRaj9U6mU44ZntM,454 +torch/_export/db/examples/dynamic_shape_round.py,sha256=sMyW43DVXFtZa6jFO08XxVjyw06LcJ9oyuJH5jAsG4Q,525 +torch/_export/db/examples/dynamic_shape_slicing.py,sha256=Rowy_RfPf1kW1l_oCp71YVflt2ivhpP1u_HzTEupxQA,388 +torch/_export/db/examples/dynamic_shape_view.py,sha256=fEuSTQRghtJY8VcBi7qwiZTQXwd9ux8d1PRBYw6Qd6E,444 +torch/_export/db/examples/fn_with_kwargs.py,sha256=fEndpGN__D456vh3NAamoK1KdmtEyvmAsW0gZB0TExo,731 +torch/_export/db/examples/list_contains.py,sha256=aQsdZDUN4Rn94Waft7X30_Lpn4RDbEnmsY3hpvrfV2w,477 +torch/_export/db/examples/list_unpack.py,sha256=T9y8zvqEQ0SD_PDzBupW5EjS2CagaEEPKGcF4aYZrnU,592 +torch/_export/db/examples/model_attr_mutation.py,sha256=dRMK4qEV23My5bisv9EGuDnBhs9JwYqVjekAuYbGrrQ,662 +torch/_export/db/examples/nested_function.py,sha256=S-MXIlaFn4RGeA4GEFIEHG-pcM9WBznXN3E2k-sDlZo,491 +torch/_export/db/examples/null_context_manager.py,sha256=LM5a8DO8x0A0yaA0ZAhOVybcRIcpHy2fxCH0YoqH_2M,478 +torch/_export/db/examples/optional_input.py,sha256=Iol5gTORtXrleJWRCQaLlp11GPzTj5m3dYv2sV6uxZs,455 +torch/_export/db/examples/pytree_flatten.py,sha256=7ssTy3Dxb7FjNTTYKh7esnNyAT9vEjDf4w0HKQ5A4Iw,375 +torch/_export/db/examples/scalar_output.py,sha256=_CMPbP3nbTMGzfiUy3ACOdxxoL8_F1lIEya_JH4HTZ4,543 +torch/_export/db/examples/specialized_attribute.py,sha256=GiKqGiSSPwPIL-QPinKinHNEiQvRsptBx319j7FpRfA,520 +torch/_export/db/examples/static_for_loop.py,sha256=PZ37RURvfUv9KQb98k47LK7E7_2oAlEiNr-ekLB8jyo,412 +torch/_export/db/examples/static_if.py,sha256=uzqQHEytorjGRbxJQQxam6nJZLHcPIPw1uzMfj7KU8w,397 +torch/_export/db/examples/tensor_setattr.py,sha256=LXED12gZiYOvhZXBzThejfW2meNYI8U-8_aIVwfdaqI,337 +torch/_export/db/examples/type_reflection_method.py,sha256=CoXAARKtqSNciuA5V7Tkx0MVhq-h6c4khPRSnxFDjcM,461 +torch/_export/db/examples/unsupported_operator.py,sha256=v-niL6z0og2TuXmvgFWbHFxMP_m0ldu6NefmmW3ikl8,411 +torch/_export/db/examples/user_input_mutation.py,sha256=KJY_YR4w_nEZ66YVMmPsBKv6PfQ7GnQwkV12NJcUCxs,302 +torch/_export/db/gen_example.py,sha256=YoR-ZOXBjcgEN2ypFHVINmHBETs9iYEWNytTmDhNUiA,462 +torch/_export/db/logging.py,sha256=Z9_l2YqpnXFBNJdcCQAqVqLyCRrjkKeJE0aEaxZbMoI,1616 +torch/_export/error.py,sha256=OjvFCTGZVLlBtbTvaL4xDbBAizynfhwO_2fONObaISk,1770 +torch/_export/non_strict_utils.py,sha256=wiJV2HIxX4ZgIrpPfFSqOlg3btoly3M9XqqXtB5dKYo,20479 +torch/_export/pass_base.py,sha256=SikSx3JaMlAsdD-gpLKYYn77QJOCqBQ1PxZH5JSuZ6Y,17654 +torch/_export/pass_infra/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_export/pass_infra/__pycache__/__init__.cpython-310.pyc,, +torch/_export/pass_infra/__pycache__/node_metadata.cpython-310.pyc,, +torch/_export/pass_infra/__pycache__/proxy_value.cpython-310.pyc,, +torch/_export/pass_infra/node_metadata.py,sha256=r0lYig_aW6VXdmVm9q4oGJywPgwt6fHuZJNfb3mCAtI,782 +torch/_export/pass_infra/proxy_value.py,sha256=i7ywxFZvzNTNXwoUUou0fW1ZfY3tNo_bA7WpVhftMyo,1167 +torch/_export/passes/__init__.py,sha256=78MzFjtaVabk1z2X12WMIlmtkjYaFmgNJlDCTCepNt8,88 +torch/_export/passes/__pycache__/__init__.cpython-310.pyc,, +torch/_export/passes/__pycache__/_node_metadata_hook.cpython-310.pyc,, +torch/_export/passes/__pycache__/add_runtime_assertions_for_constraints_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/collect_tracepoints_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/constant_folding.cpython-310.pyc,, +torch/_export/passes/__pycache__/functionalize_side_effectful_ops_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/lift_constants_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/remove_runtime_assertions.cpython-310.pyc,, +torch/_export/passes/__pycache__/replace_autocast_with_hop_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/replace_quantized_ops_with_standard_ops_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/replace_set_grad_with_hop_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/replace_view_ops_with_view_copy_ops_pass.cpython-310.pyc,, +torch/_export/passes/__pycache__/replace_with_hop_pass_util.cpython-310.pyc,, +torch/_export/passes/_node_metadata_hook.py,sha256=c_goZjJLIJ3xqcU_v6durFFgNM6yz05xPvGZmN9hsMQ,2433 +torch/_export/passes/add_runtime_assertions_for_constraints_pass.py,sha256=JP5NFgw_cRySJEr2-f7aIbfZkNvBwT7P-mIlzIQfAcc,9750 +torch/_export/passes/collect_tracepoints_pass.py,sha256=6PS15n0HL5pXDPeib6X78GPqA6XQu_aM7F6jZpIcxzk,4506 +torch/_export/passes/constant_folding.py,sha256=7WEeuHXOaz7U9aYD2Mf6MhBTTUbfNgM5nltIhLsgtaw,11016 +torch/_export/passes/functionalize_side_effectful_ops_pass.py,sha256=GnA_6u6UTHFjhv0J3hGrQiJhR2iSwQDyJb0s5QKVHjc,3272 +torch/_export/passes/lift_constants_pass.py,sha256=BYDEnkCLCiyhrcogSv7jv-Dr4WDOieGM3lLn2xooVxg,13750 +torch/_export/passes/remove_runtime_assertions.py,sha256=BnZOajU7kYkee43CiQA3DYJhiQJc_fV2di2PlV-5ckk,1067 +torch/_export/passes/replace_autocast_with_hop_pass.py,sha256=Ncuows3HA4EcEbeOBeOd4mb_TZ13RImmIdX3JjIZTvw,6666 +torch/_export/passes/replace_quantized_ops_with_standard_ops_pass.py,sha256=hM6SMM7jtcuJro_MvgLgGWIh_zwdnBniXjCzzqXOO3k,25678 +torch/_export/passes/replace_set_grad_with_hop_pass.py,sha256=oq7Tn7pGwBazb-CP9iJ0fv7L2nkwNdRXnZh3Gifewq4,3831 +torch/_export/passes/replace_view_ops_with_view_copy_ops_pass.py,sha256=syElAO2bUEPLgimmSnYeGMXyp43Blbfq_0NiZPofWIc,2439 +torch/_export/passes/replace_with_hop_pass_util.py,sha256=-UcN2dAGt72D4cg1ERiKwQjFFCeOyzpSjQT69qmLEvE,6996 +torch/_export/serde/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_export/serde/__pycache__/__init__.cpython-310.pyc,, +torch/_export/serde/__pycache__/aoti_schema.cpython-310.pyc,, +torch/_export/serde/__pycache__/dynamic_shapes.cpython-310.pyc,, +torch/_export/serde/__pycache__/schema.cpython-310.pyc,, +torch/_export/serde/__pycache__/schema_check.cpython-310.pyc,, +torch/_export/serde/__pycache__/serialize.cpython-310.pyc,, +torch/_export/serde/__pycache__/union.cpython-310.pyc,, +torch/_export/serde/aoti_schema.py,sha256=s55tAmExZdcXNfa-D78hJQxu1auF9hLKirBVlDQU398,241 +torch/_export/serde/dynamic_shapes.py,sha256=xBLSy3ONiZ-iMDVLULtvaWE1u1iNoHq6gWqICcGSekA,11339 +torch/_export/serde/schema.py,sha256=EJFMh9EAKHWokuM5SfpXPjh-blbG9Xdei0N9u-9XYgE,8454 +torch/_export/serde/schema.yaml,sha256=dXxAIroPsUldbSpLWaQjeMsyWJ1p16KOUXC6Kk_jpc8,7558 +torch/_export/serde/schema_check.py,sha256=ICCN7ZCvNyXiqM_5ES8SXAnyZoN8Q4qqZrEmHQZ82hI,10274 +torch/_export/serde/serialize.py,sha256=Dyqyf9a7dYD1BwBitarVhKxodtLL5x7qF4IofLSPq88,119089 +torch/_export/serde/union.py,sha256=1foTKUd72rsAj7GKfwpuBQ8Rg3lhPGA_m0TSfhcWI0w,2014 +torch/_export/tools.py,sha256=Rs4X8-0cZjjrxMwYM3l0SXFlv58JHf2Av88Lrxpa5Do,4558 +torch/_export/utils.py,sha256=NH1wKXd_3gk5W0ZA_5HIiXtIcv9PBoz4CafWQErTccU,35113 +torch/_export/verifier.py,sha256=NckzxWTRrLtC0tnhgyOiYMZLTH8pItbVprTzAYlhiW8,18340 +torch/_export/wrappers.py,sha256=1TbYmoANZk0JvGRTtw1Kc-9vimKqRZ4rQesIC0xrCFc,4169 +torch/_functorch/__init__.py,sha256=a3XxW1RcNAPwEVaI2g11hpnJvSHxGUFsGmDAWfDnLP8,206 +torch/_functorch/__pycache__/__init__.cpython-310.pyc,, +torch/_functorch/__pycache__/aot_autograd.cpython-310.pyc,, +torch/_functorch/__pycache__/apis.cpython-310.pyc,, +torch/_functorch/__pycache__/autograd_function.cpython-310.pyc,, +torch/_functorch/__pycache__/batch_norm_replacement.cpython-310.pyc,, +torch/_functorch/__pycache__/benchmark_utils.cpython-310.pyc,, +torch/_functorch/__pycache__/compile_utils.cpython-310.pyc,, +torch/_functorch/__pycache__/compilers.cpython-310.pyc,, +torch/_functorch/__pycache__/config.cpython-310.pyc,, +torch/_functorch/__pycache__/deprecated.cpython-310.pyc,, +torch/_functorch/__pycache__/eager_transforms.cpython-310.pyc,, +torch/_functorch/__pycache__/functional_call.cpython-310.pyc,, +torch/_functorch/__pycache__/fx_minifier.cpython-310.pyc,, +torch/_functorch/__pycache__/make_functional.cpython-310.pyc,, +torch/_functorch/__pycache__/partitioners.cpython-310.pyc,, +torch/_functorch/__pycache__/pyfunctorch.cpython-310.pyc,, +torch/_functorch/__pycache__/python_key.cpython-310.pyc,, +torch/_functorch/__pycache__/pytree_hacks.cpython-310.pyc,, +torch/_functorch/__pycache__/top_operators_github_usage.cpython-310.pyc,, +torch/_functorch/__pycache__/utils.cpython-310.pyc,, +torch/_functorch/__pycache__/vmap.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__init__.py,sha256=a3XxW1RcNAPwEVaI2g11hpnJvSHxGUFsGmDAWfDnLP8,206 +torch/_functorch/_aot_autograd/__pycache__/__init__.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/autograd_cache.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/collect_metadata_analysis.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/dispatch_and_compile_graph.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/functional_utils.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/input_output_analysis.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/jit_compile_runtime_wrappers.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/logging_utils.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/runtime_wrappers.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/schemas.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/subclass_utils.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/traced_function_transforms.cpython-310.pyc,, +torch/_functorch/_aot_autograd/__pycache__/utils.cpython-310.pyc,, +torch/_functorch/_aot_autograd/autograd_cache.py,sha256=L8380boofWWDyM39-5gmUSp2SsJ_zgHr38-kOj1FnI8,21890 +torch/_functorch/_aot_autograd/collect_metadata_analysis.py,sha256=OQbDYn3_pwnVL4GBLlQmIIgNgfagxoBXV9AiVohmR9M,39787 +torch/_functorch/_aot_autograd/dispatch_and_compile_graph.py,sha256=kZSCKBA-QpGSMq-g_7y3d9YutmxAErZuSu3vBB2v8hM,12295 +torch/_functorch/_aot_autograd/functional_utils.py,sha256=u4eDYBqr9euUuPWZ82v_eNG0__hRWIJEGQCvjN4nwe8,21820 +torch/_functorch/_aot_autograd/input_output_analysis.py,sha256=Aa2lYfeJUzRUKd7F16eE0z0eBx6B0JcaBZjCdgt2qn0,20616 +torch/_functorch/_aot_autograd/jit_compile_runtime_wrappers.py,sha256=Gl6mKa3MWBYC3TspSwhjG4R9_o7VzU9Pq_EVYgjizOI,31880 +torch/_functorch/_aot_autograd/logging_utils.py,sha256=_P6rckasEoJzSPW3VOdJY56m7XZZXE0E5t56K-XZrSg,4654 +torch/_functorch/_aot_autograd/runtime_wrappers.py,sha256=kevUeUtT1Zyp6zSuKcixBoZdAKfrw_InUz-9PvKtpck,95988 +torch/_functorch/_aot_autograd/schemas.py,sha256=1fmX7YRZo6yqqRJ1b2rF_vmjxLerINqWvBYFOVe-Ihg,36404 +torch/_functorch/_aot_autograd/subclass_utils.py,sha256=A_V5LAgQZf6kcFdBTzq3e_fL2nyFwFG0mujpqEhJKa8,15486 +torch/_functorch/_aot_autograd/traced_function_transforms.py,sha256=nOLFwWaZ_GmpiPXwihc-7UR44SWvN1xl3rOmD6iANnI,42929 +torch/_functorch/_aot_autograd/utils.py,sha256=cdBmolouTZkCLHolyKaMn_ssznS2f15O-F3Poz5BJGQ,16765 +torch/_functorch/aot_autograd.py,sha256=RaREBVCgCv_9V4LGiMl9CjFkC5NBpenOWjnjkYsmr_Y,65737 +torch/_functorch/apis.py,sha256=l-xyTt2HcTuWpsumLPoQTN2_m8o6EyPEyOMFbTBPCo0,19119 +torch/_functorch/autograd_function.py,sha256=D4Y1zJToBXyGBYBuRju3WExhgPJknyIxfpXIDRtvrsE,27997 +torch/_functorch/batch_norm_replacement.py,sha256=D2dD2X5oEdLq53OfyvRhg8_6UqnhIhQb8XraVU3FIwM,909 +torch/_functorch/benchmark_utils.py,sha256=EQ2tfzx8JeQWx-ev7CJCvsnJ9RPump47oIitjOzlcj8,6278 +torch/_functorch/compile_utils.py,sha256=SwJDoEnbWyBdXfLLqmXrwyv98G6r3CrFW0-fhndzGe4,6087 +torch/_functorch/compilers.py,sha256=O-ofkZkhO-8Q5dhXvUA2rcST2V0w9dF3or8v9BV0Ld4,14046 +torch/_functorch/config.py,sha256=zyeYDqfPBmL698ijithlV5BD7vq4WLPXKpY92BYGqMc,9145 +torch/_functorch/deprecated.py,sha256=XVlS8_tc7PmxeCMS1vMH6tig-K7eiGufcRSYIZSlqBk,5210 +torch/_functorch/eager_transforms.py,sha256=z1IvZC281ymwLdElKk8N8B8s5yZ6QsjRBMMJ3X7PMq4,72226 +torch/_functorch/functional_call.py,sha256=Mm7_NTHcHBsLeSi4u_LUvm4mwK9_6SJ6agSjwxyyUOs,10560 +torch/_functorch/fx_minifier.py,sha256=JJV-ISDRyV3Afkng5jqkkxxEJFf6ocXONBBcyOe931M,17369 +torch/_functorch/make_functional.py,sha256=01rlMo8kSARf-O7Zrd86od2oDBRjv0E1m1bS6Xn85V8,22787 +torch/_functorch/partitioners.py,sha256=-VrvwNSiKVCPygxwucJyFxEu2VLza0OhnHPPh3CG2lI,70392 +torch/_functorch/pyfunctorch.py,sha256=ttSJaNZpTTv7fZlHvLj8kvao1A0GQ8bMHjDyCSp1VLQ,10359 +torch/_functorch/python_key.py,sha256=ZeINkQZ4B4PAdoz-zLMNU72w6cVHzE1nObMikLI0oL4,442 +torch/_functorch/pytree_hacks.py,sha256=dWzTSKcqGhK4zHgmgJbyis2kzB69OH71GP_YCDBDbp0,698 +torch/_functorch/top_operators_github_usage.py,sha256=RzbWMwnnErzwFz0FTI9mzzlXx0THNZRP-DgjWbJyz5Y,21441 +torch/_functorch/utils.py,sha256=YOBqKYpTqBYN4W1Gb-VhG6RT51nH3zwMQdIs2sYNGNE,976 +torch/_functorch/vmap.py,sha256=TQa-G5njjLohDGQ1cMGZd8Tds3fbsc9V8e58xDYGLfU,18833 +torch/_guards.py,sha256=qijSsei47zRHoRc0UpUFGZkwVE_eIzxfDgab_Dj266M,31162 +torch/_higher_order_ops/__init__.py,sha256=NV1LO_qlUe_Os-o7rAxHwgihNMOociQ2bUnJc3t_T8w,389 +torch/_higher_order_ops/__pycache__/__init__.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/associative_scan.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/auto_functionalize.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/cond.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/effects.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/executorch_call_delegate.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/flex_attention.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/hints_wrap.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/map.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/out_dtype.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/run_const_graph.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/strict_mode.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/torchbind.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/triton_kernel_wrap.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/utils.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/while_loop.cpython-310.pyc,, +torch/_higher_order_ops/__pycache__/wrap.cpython-310.pyc,, +torch/_higher_order_ops/associative_scan.py,sha256=e1k36ERVcXB8olUY3RAp97VK6R408pRS3kIS_HmIFL0,13176 +torch/_higher_order_ops/auto_functionalize.py,sha256=Vt4hNHm6Dbwdx7ldbkHO4VRAK_JuDukpw7me4kXNBZE,25582 +torch/_higher_order_ops/cond.py,sha256=EJttVtG-XQYMst8qFcts-b68SV36S810Tjm-GUXB5Uo,20579 +torch/_higher_order_ops/effects.py,sha256=NPd7XHilQ_Cn01iFuQOQfR173pTVY3Gpu-h23oej_oQ,9596 +torch/_higher_order_ops/executorch_call_delegate.py,sha256=AVf1u294nVKQ0WJenVdxh-mJ1OSA7YiRNU4gcOn0ez8,5963 +torch/_higher_order_ops/flex_attention.py,sha256=VSIaVG0yxA1Dqxv3HLTuBCQkkc39PXrgjqm5rOkAjn0,36420 +torch/_higher_order_ops/hints_wrap.py,sha256=C0BF4lwLAL2e6-i3JGHdSbeQEP20FFka-gUJrAM_HGU,5328 +torch/_higher_order_ops/map.py,sha256=qA3rdajp8hNtPVeH6pxuYvmaE7PNGn_qVnBjrAwFMdo,8851 +torch/_higher_order_ops/out_dtype.py,sha256=o061STUzrPK7a224ZHfgSUYE1pKjBDV7KvIdovzy3_0,5578 +torch/_higher_order_ops/run_const_graph.py,sha256=yBaqSjurWK40rZTNSe9QUzKg1iCAdojBE00Ijz3kyA8,1863 +torch/_higher_order_ops/strict_mode.py,sha256=ttbblRlJrULmuWSaHFFVAIf-e-sNjZvTc82ELe6zRp4,2959 +torch/_higher_order_ops/torchbind.py,sha256=I5ch9E0lpgLPpYzvpfp6WwdYRrCDubCkiFC_pP0JGME,5412 +torch/_higher_order_ops/triton_kernel_wrap.py,sha256=dqhwfsnIi74Bmi_pUQNhDsHF8HjuxTAP-WSUSaZJNFE,40513 +torch/_higher_order_ops/utils.py,sha256=FkjD-1B71N7YMNcHlhEiU5vXNUfGNlnRytYh5tWej7c,13891 +torch/_higher_order_ops/while_loop.py,sha256=Q-jRyqvZ3lAn8S_RaF4s_nXc0AS-KVomuggbeQNxQPs,10460 +torch/_higher_order_ops/wrap.py,sha256=Q66lupgY3M9k81tuqYN6-0v3XlkxpvscuCnvDKPdYJQ,9765 +torch/_inductor/__init__.py,sha256=j0Ln0Y8ByUVoDPrlnJpdLutZ3T5Uu1Omlj40z892fgs,5390 +torch/_inductor/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/__pycache__/aoti_eager.cpython-310.pyc,, +torch/_inductor/__pycache__/async_compile.cpython-310.pyc,, +torch/_inductor/__pycache__/autotune_process.cpython-310.pyc,, +torch/_inductor/__pycache__/bounds.cpython-310.pyc,, +torch/_inductor/__pycache__/codecache.cpython-310.pyc,, +torch/_inductor/__pycache__/comm_analysis.cpython-310.pyc,, +torch/_inductor/__pycache__/comms.cpython-310.pyc,, +torch/_inductor/__pycache__/compile_fx.cpython-310.pyc,, +torch/_inductor/__pycache__/config.cpython-310.pyc,, +torch/_inductor/__pycache__/constant_folding.cpython-310.pyc,, +torch/_inductor/__pycache__/cpp_builder.cpython-310.pyc,, +torch/_inductor/__pycache__/cpu_vec_isa.cpython-310.pyc,, +torch/_inductor/__pycache__/cudagraph_trees.cpython-310.pyc,, +torch/_inductor/__pycache__/cudagraph_utils.cpython-310.pyc,, +torch/_inductor/__pycache__/debug.cpython-310.pyc,, +torch/_inductor/__pycache__/decomposition.cpython-310.pyc,, +torch/_inductor/__pycache__/dependencies.cpython-310.pyc,, +torch/_inductor/__pycache__/exc.cpython-310.pyc,, +torch/_inductor/__pycache__/extern_node_serializer.cpython-310.pyc,, +torch/_inductor/__pycache__/freezing.cpython-310.pyc,, +torch/_inductor/__pycache__/fx_utils.cpython-310.pyc,, +torch/_inductor/__pycache__/graph.cpython-310.pyc,, +torch/_inductor/__pycache__/hooks.cpython-310.pyc,, +torch/_inductor/__pycache__/index_propagation.cpython-310.pyc,, +torch/_inductor/__pycache__/inductor_prims.cpython-310.pyc,, +torch/_inductor/__pycache__/ir.cpython-310.pyc,, +torch/_inductor/__pycache__/jagged_lowerings.cpython-310.pyc,, +torch/_inductor/__pycache__/loop_body.cpython-310.pyc,, +torch/_inductor/__pycache__/lowering.cpython-310.pyc,, +torch/_inductor/__pycache__/metrics.cpython-310.pyc,, +torch/_inductor/__pycache__/mkldnn_ir.cpython-310.pyc,, +torch/_inductor/__pycache__/mkldnn_lowerings.cpython-310.pyc,, +torch/_inductor/__pycache__/ops_handler.cpython-310.pyc,, +torch/_inductor/__pycache__/optimize_indexing.cpython-310.pyc,, +torch/_inductor/__pycache__/pattern_matcher.cpython-310.pyc,, +torch/_inductor/__pycache__/quantized_lowerings.cpython-310.pyc,, +torch/_inductor/__pycache__/remote_cache.cpython-310.pyc,, +torch/_inductor/__pycache__/scheduler.cpython-310.pyc,, +torch/_inductor/__pycache__/select_algorithm.cpython-310.pyc,, +torch/_inductor/__pycache__/sizevars.cpython-310.pyc,, +torch/_inductor/__pycache__/subgraph_lowering.cpython-310.pyc,, +torch/_inductor/__pycache__/test_case.cpython-310.pyc,, +torch/_inductor/__pycache__/test_operators.cpython-310.pyc,, +torch/_inductor/__pycache__/utils.cpython-310.pyc,, +torch/_inductor/__pycache__/virtualized.cpython-310.pyc,, +torch/_inductor/__pycache__/wrapper_benchmark.cpython-310.pyc,, +torch/_inductor/aoti_eager.py,sha256=2keI2DrAxZ58v4WVgAJFikmp4UPHTlzjfQVlgQEQBNs,11098 +torch/_inductor/async_compile.py,sha256=br6TlUPC3ZvAs0VA9s3zm3ua_HLqVbuR4UYPagwuoKE,9868 +torch/_inductor/autoheuristic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/autoheuristic/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/autoheuristic/__pycache__/autoheuristic.cpython-310.pyc,, +torch/_inductor/autoheuristic/__pycache__/autoheuristic_utils.cpython-310.pyc,, +torch/_inductor/autoheuristic/__pycache__/learned_heuristic_controller.cpython-310.pyc,, +torch/_inductor/autoheuristic/__pycache__/learnedheuristic_interface.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/_MMRankingA100.py,sha256=s0OiPSoX46ULe5Rh9gc3HagaSzO86X13gS3MGjalsmQ,28044 +torch/_inductor/autoheuristic/artifacts/_MMRankingH100.py,sha256=7fj-Lo91a3HOCOqwgtTmY96P8mvECjWdsi8Ln3Wy1WM,30668 +torch/_inductor/autoheuristic/artifacts/_MixedMMA100.py,sha256=KeaL43QCc3KbSo7jeWo_bGcCYtOdwHrtE5P38k9f3Pk,7920 +torch/_inductor/autoheuristic/artifacts/_MixedMMH100.py,sha256=toH1RbatH3Nbky-oFhyjI_S_Mo4nYfPpVxwN42FouD8,7882 +torch/_inductor/autoheuristic/artifacts/_PadMMA100.py,sha256=KReKRxB8tDQNfBLalXvw5H0Lludtu4eEXelYHk52xbY,4931 +torch/_inductor/autoheuristic/artifacts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/autoheuristic/artifacts/__pycache__/_MMRankingA100.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/__pycache__/_MMRankingH100.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/__pycache__/_MixedMMA100.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/__pycache__/_MixedMMH100.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/__pycache__/_PadMMA100.cpython-310.pyc,, +torch/_inductor/autoheuristic/artifacts/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/autoheuristic/autoheuristic.py,sha256=urhC46bX2qWNX4SgyTvElYyZQ7mURbbaSDqQTDRRSRM,11946 +torch/_inductor/autoheuristic/autoheuristic_utils.py,sha256=xXtxLoUomnAWJj8eVlzCv3_cUzjYblKTgXZaQxBbI-8,11300 +torch/_inductor/autoheuristic/learned_heuristic_controller.py,sha256=LITDEw2m_U2OAg6ltX_YAqstVDrdqt8xCfaS_LeEx7k,4328 +torch/_inductor/autoheuristic/learnedheuristic_interface.py,sha256=__OlqM7879bwE1OLvVrKFPLfw4_kPivGdsWNcNCuJMQ,2819 +torch/_inductor/autotune_process.py,sha256=WK-EIPatq5aiIa3hk5P6pA1Po8gU1I6zF2fXHKq_Hdk,29587 +torch/_inductor/bounds.py,sha256=bE5taWXP4YZqnP_eFFYtImfsYZt2O9gXDbHsDTtqVwc,5794 +torch/_inductor/codecache.py,sha256=eZ33go93kPxtNoNEdvDBaOrrjF8eTTQC0LpOqMpfy_s,126953 +torch/_inductor/codegen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/codegen/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/aoti_hipify_utils.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/codegen_device_driver.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/common.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_gemm_template.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_micro_gemm.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_template.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_template_kernel.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_utils.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_wrapper_cpu.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cpp_wrapper_cuda.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/cuda_combined_scheduling.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/debug_utils.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/halide.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/memory_planning.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/multi_kernel.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/simd.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/triton.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/triton_combo_kernel.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/triton_split_scan.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/triton_utils.cpython-310.pyc,, +torch/_inductor/codegen/__pycache__/wrapper.cpython-310.pyc,, +torch/_inductor/codegen/aoti_hipify_utils.py,sha256=lRTDWgj8hsdyNstTOiV0T9jcosEohOELVUIjCwKbQVI,1296 +torch/_inductor/codegen/aoti_runtime/implementation.cpp,sha256=tDUF_wcnVyJ1vUp9IshGLWJTdX40fu_yIrKG2zY2jDQ,3063 +torch/_inductor/codegen/aoti_runtime/interface.cpp,sha256=kgdAcedjoKkBx6CB6rXvrc4W2qrvH9JzgqLTXrXHL_Q,12901 +torch/_inductor/codegen/codegen_device_driver.py,sha256=FSjRcZz7IkHXtWRYCyI16joMw3Ph415zWCyO9aV4wsw,3428 +torch/_inductor/codegen/common.py,sha256=-On6QQzDTnEkRCiZRhU87PX74koYEYbWXGzHRFEgFOY,76103 +torch/_inductor/codegen/cpp.py,sha256=OKEbSCiABbJB5qTAXoQ_4R8MiIUe9qyZXZGJTPm6W6U,197259 +torch/_inductor/codegen/cpp_gemm_template.py,sha256=KMuz7192qhuUgpNY8sAB78tB1lVm-pIp4oMBMRyhpsk,46120 +torch/_inductor/codegen/cpp_micro_gemm.py,sha256=ZuVLdsI10Lw21RYQvoxN_mZE_VGMesmgievOkdjg2Is,27881 +torch/_inductor/codegen/cpp_prefix.h,sha256=B5zY1n6-OlLgsdPd2IMfEnFDjtuQizJzz_cVqqSVTOg,32020 +torch/_inductor/codegen/cpp_template.py,sha256=GTgeVfyqjA6Y7EF949O_yHaIZ1-qvXVmpSgh4O3bnRE,4316 +torch/_inductor/codegen/cpp_template_kernel.py,sha256=58mSLa-u2SScoA67Kz4Qn6wrpSRq356obRR9lRM0834,15350 +torch/_inductor/codegen/cpp_utils.py,sha256=f1lqOPiQkEEfq9iXR82bJdxNVxkvB28EN8CZfPW201k,32695 +torch/_inductor/codegen/cpp_wrapper_cpu.py,sha256=uRvyHUyANJk6RvJSSHAow6jm2QzaapEIjW1ypyuak-I,114477 +torch/_inductor/codegen/cpp_wrapper_cuda.py,sha256=vFkJJHwVIn7NRgzWAIXGI5VScwDyUrA0yICGVwsbJnM,15501 +torch/_inductor/codegen/cuda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/codegen/cuda/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cuda_cpp_scheduling.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cuda_env.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cuda_kernel.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cuda_template.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cutlass_epilogue_gen.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/cutlass_utils.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/device_op_overrides.cpython-310.pyc,, +torch/_inductor/codegen/cuda/__pycache__/gemm_template.cpython-310.pyc,, +torch/_inductor/codegen/cuda/cuda_cpp_scheduling.py,sha256=6WzFvWZoqXNRB2VpwzDTSOy3JioVdPVglQzlF5Slc8E,4312 +torch/_inductor/codegen/cuda/cuda_env.py,sha256=asixCmqKGVGqm8_6mmxn_d3V2fjEHgBnFkG8HjurV0s,1138 +torch/_inductor/codegen/cuda/cuda_kernel.py,sha256=FDuRW3AwK6YXGMP6LomR9Mi4OnlTpDJAHk-HrfHG2nM,13494 +torch/_inductor/codegen/cuda/cuda_template.py,sha256=0EOD6GMKEUL6U_hsfsSjN7woLVWzyxyMIm74tIXyq-M,8920 +torch/_inductor/codegen/cuda/cutlass_epilogue_gen.py,sha256=XaHvTvMKVx-ZR7QmGFzPeq_IFFR4OkX09meWYP_nCOo,14470 +torch/_inductor/codegen/cuda/cutlass_lib_extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/codegen/cuda/cutlass_lib_extensions/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/codegen/cuda/cutlass_lib_extensions/__pycache__/gemm_operation_extensions.cpython-310.pyc,, +torch/_inductor/codegen/cuda/cutlass_lib_extensions/gemm_operation_extensions.py,sha256=NDni3ToCsHJdjMbRT7LWaIBob463Uq4XW_1rhbNAJB0,9810 +torch/_inductor/codegen/cuda/cutlass_utils.py,sha256=xNmMulhaWEnYo_if22VkxWDpDtF5r-wF1C3xZMxOzOc,12061 +torch/_inductor/codegen/cuda/device_op_overrides.py,sha256=r1Fjm3aXSqxe-kn15TJ1G1U17hTce2RyWmuDbexLfBk,591 +torch/_inductor/codegen/cuda/gemm_template.py,sha256=8aYnTSvH2QOqnqj_BtqsifSfePmQSG1WQIIHSMzCRkQ,62478 +torch/_inductor/codegen/cuda_combined_scheduling.py,sha256=5rkfqa9OQSMkBQOI5j7dpsWly1NsUBJGfZ55MGQOTmU,3911 +torch/_inductor/codegen/debug_utils.py,sha256=BfD-2Z1iys9QyGl1_ltyUq8bl9FiMSIm9jR5jww1X_4,6646 +torch/_inductor/codegen/halide.py,sha256=n3QEas2C0JXVhBwGX0wUah-pb7EiIu_MVTCIAQDVo2Q,62274 +torch/_inductor/codegen/memory_planning.py,sha256=Mq4Xnh6395gECfKTT7y3tU5l7GXVGIkfiUsL0mJsL-Y,24920 +torch/_inductor/codegen/multi_kernel.py,sha256=vYU25LKgPf8UUiK4Wp4xI8-YDHaIvlTAo6erWNq5ToY,13497 +torch/_inductor/codegen/rocm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/codegen/rocm/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/ck_template.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/ck_universal_gemm_template.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/compile_command.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/rocm_benchmark_request.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/rocm_cpp_scheduling.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/rocm_kernel.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/rocm_template.cpython-310.pyc,, +torch/_inductor/codegen/rocm/__pycache__/rocm_template_buffer.cpython-310.pyc,, +torch/_inductor/codegen/rocm/ck_template.py,sha256=2tgNxbETSUJWJA1XiEdeHQuk3W948D9moiHMC4TLIM0,2963 +torch/_inductor/codegen/rocm/ck_universal_gemm_template.py,sha256=dpzubpPmWomYvyqZxpwIwWOYHko8TWIyWbSj3xTP8JU,15634 +torch/_inductor/codegen/rocm/compile_command.py,sha256=klNs6xm6mmTIKpQZXXFwrK1UI1BAuQLOi1Gpp-81dQQ,3762 +torch/_inductor/codegen/rocm/rocm_benchmark_request.py,sha256=StGvgMjGIuBOv4Sbg3IDpDLjA_2M7z0sLvdxFjX737Y,4845 +torch/_inductor/codegen/rocm/rocm_cpp_scheduling.py,sha256=nLcsZh4DdmT7Dr-fynqaPR9IJfjusJ-RQ5-FhavabSQ,3773 +torch/_inductor/codegen/rocm/rocm_kernel.py,sha256=Qpp14OZnvdhZ_dHjmjtQSuYr8IFCVHJxgrBOmXq3_Bk,8226 +torch/_inductor/codegen/rocm/rocm_template.py,sha256=CsGWFD0LhPHrL9Ji5OlsCajU2Jw7yXMO4un6hrjX4K4,5789 +torch/_inductor/codegen/rocm/rocm_template_buffer.py,sha256=N_5QNxQhIXYOh0mJaSsLZazJg4wZQ8tBwFaq5RqPQFc,630 +torch/_inductor/codegen/simd.py,sha256=hcvCiVfuYU7gWh0gkhRIg8vAiNyX_F_SlLP_fi4BxI8,71926 +torch/_inductor/codegen/triton.py,sha256=R-3D7PK67ileLEZTHsfzACoGVMMApixgmRxxY6tOTWc,123627 +torch/_inductor/codegen/triton_combo_kernel.py,sha256=CW2grrfs0MxUCvOtqEw7UttRGQsktaTRzI9nnyOu4Hw,46800 +torch/_inductor/codegen/triton_split_scan.py,sha256=NgCm5x7HHC5DzflZzvfyXMNeK_YyUxDAQq-yCHzzobo,6495 +torch/_inductor/codegen/triton_utils.py,sha256=mccb7iahmUFPaN7ySrGjYbHOqxkbhhTi71BADbmbYZc,5791 +torch/_inductor/codegen/wrapper.py,sha256=59dKmfrEteGOpONftheHuNtBXKvaS0dtpnfi1-PJOaA,81224 +torch/_inductor/codegen/xpu/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/codegen/xpu/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/codegen/xpu/__pycache__/device_op_overrides.cpython-310.pyc,, +torch/_inductor/codegen/xpu/device_op_overrides.py,sha256=aewx5sDrT8iSswEiafQteP_oJn_r26ppc5rh4h7CZdQ,584 +torch/_inductor/comm_analysis.py,sha256=ggFRjtJx5xdJPCS8_c1HPjdQcwL_Qg87cXocHjs3FsI,8286 +torch/_inductor/comms.py,sha256=FTanhccY0TdtB-5G-ZQU5s20se5gnyh02GxYO4szqUo,23636 +torch/_inductor/compile_fx.py,sha256=7hFfnTp0mkP7TSm0gZFUHB5v4J_deiu-HZqZxkHoIyo,62309 +torch/_inductor/compile_worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/compile_worker/__main__.py,sha256=dSi2wwrmDenQ_5PG28nk6KlbCLejO35lcQmBrrnQoAE,1261 +torch/_inductor/compile_worker/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/compile_worker/__pycache__/__main__.cpython-310.pyc,, +torch/_inductor/compile_worker/__pycache__/subproc_pool.cpython-310.pyc,, +torch/_inductor/compile_worker/__pycache__/watchdog.cpython-310.pyc,, +torch/_inductor/compile_worker/subproc_pool.py,sha256=obV0O8q33jsz7wUkVB4YXTTTMiwBLdXR9Jd_AXwv6vk,10802 +torch/_inductor/compile_worker/watchdog.py,sha256=nq7pEJfYP9XF5TDE-7pNTSLDnMHZJOek24Znqzvmb-k,1246 +torch/_inductor/config.py,sha256=cf-wi1-RqyLIQ68rETm5AR8u_Lf32MaCXQDzm7aR_go,48001 +torch/_inductor/constant_folding.py,sha256=cI65kc0HNH-Hml8jT05PjDjH1XDAHGAv-PESLbnNzxM,12326 +torch/_inductor/cpp_builder.py,sha256=uUfd4LTZiLOrUAUbe4iTNSDNP6pWS0aJ9_TmyT3taCs,52312 +torch/_inductor/cpu_vec_isa.py,sha256=EOMjmkUBsoVfG3fg57PMvtEFr06pRfzyObHMXEwJx74,11838 +torch/_inductor/cudagraph_trees.py,sha256=eQBss-xRIgdV5OLI6CqLA1cFiDun27x24eqmS401y6I,97957 +torch/_inductor/cudagraph_utils.py,sha256=QRuOmh20BoGIcPgeQvwWvoLprjB5WzvrjisiEt2cYpE,10877 +torch/_inductor/debug.py,sha256=qxCsUfg0aW9vFQDQvTZJF7TjL3pE70TcuSjA8Z-OzKo,22388 +torch/_inductor/decomposition.py,sha256=9dh9gGkiAE8ima9ffPpSowv3iEadCvUoi8cm_OxNduc,31397 +torch/_inductor/dependencies.py,sha256=j01JsxYvZEfPvE7HbR0zMT88bEmKQYLz9uvutHUMOyg,26565 +torch/_inductor/exc.py,sha256=h0jrs2bkQiLcGCL-YkzfjFl6I1skzlh5npCi0PymjHg,2836 +torch/_inductor/extern_node_serializer.py,sha256=8U53laLRbh8yjqgC38lA8R-VWYrmm3MKQmwKnOHLswc,859 +torch/_inductor/freezing.py,sha256=e8dl9oFKxV2SyTPtiJ8_ttBi0H7XkAXgu48qvnOKkVk,10085 +torch/_inductor/fx_passes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/fx_passes/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/b2b_gemm.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/binary_folding.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/ddp_fusion.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/decompose_mem_bound_mm.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/dedupe_symint_uses.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/efficient_conv_bn_eval.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/freezing_patterns.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/fuse_attention.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/group_batch_fusion.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/joint_graph.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/micro_pipeline_tp.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/misc_patterns.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/mkldnn_fusion.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/numeric_utils.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/pad_mm.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/post_grad.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/pre_grad.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/quantization.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/reinplace.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/replace_random.cpython-310.pyc,, +torch/_inductor/fx_passes/__pycache__/split_cat.cpython-310.pyc,, +torch/_inductor/fx_passes/b2b_gemm.py,sha256=a44mb9ZjJPCvxKTYUxk6AMh2wQ8nY18ijdK7qQHVXiQ,24596 +torch/_inductor/fx_passes/binary_folding.py,sha256=v0H6w5aObryQeKyPQOQDqU186LDI9bFnn6YJOYhtPGg,10624 +torch/_inductor/fx_passes/ddp_fusion.py,sha256=mFmZ4eIp4zv2HcWOuYPzgXU3G649Ixcnr0glIRXy-Xc,21197 +torch/_inductor/fx_passes/decompose_mem_bound_mm.py,sha256=25NXii5ZvaTZdMniqPS6kW1ukZzUVOtibtNHDhJspL4,4844 +torch/_inductor/fx_passes/dedupe_symint_uses.py,sha256=tyFhbVXPo64f85PLn9mWfchHHILK0NnNj2auzLIg2X8,2452 +torch/_inductor/fx_passes/efficient_conv_bn_eval.py,sha256=jsMXLn0mtZgu1NnpdwF0K-curdzKlmV294h9LkVw9So,14014 +torch/_inductor/fx_passes/freezing_patterns.py,sha256=uQTc3lmw9HJMRq08ET_qAySjN9KYGe1KMXQSuC0D5vs,6610 +torch/_inductor/fx_passes/fuse_attention.py,sha256=D68BeBvKD3ItwIu6Rtnu09enFkjgbSWqbCpw78m3ELg,29653 +torch/_inductor/fx_passes/group_batch_fusion.py,sha256=qQK1q0c1ihhUTzzVsScno2Yf3x0keZRfUGYKj-8Bdgw,52358 +torch/_inductor/fx_passes/joint_graph.py,sha256=g44euFWNtvRHiGPO7c-jhXeJUvj2SlZhKo8wpnPxWlY,25241 +torch/_inductor/fx_passes/micro_pipeline_tp.py,sha256=Uhl6vFSJbOMMk0V5PVJk2p26WwoZgT-nxrd0pRe-Nxs,28818 +torch/_inductor/fx_passes/misc_patterns.py,sha256=ziyx2K0GmkmnpASId-m77LiSRP83Xm6q1gRP8uys3Os,4771 +torch/_inductor/fx_passes/mkldnn_fusion.py,sha256=wbwSkukxjQam4GkhPSOVRKK1UFD7xkvYjKRr2vFBfcQ,49232 +torch/_inductor/fx_passes/numeric_utils.py,sha256=fVhJ8EqguMlQTiPsdgCmlbuIwjqWsU4LvHESafNMEMY,7221 +torch/_inductor/fx_passes/pad_mm.py,sha256=JcSq-VPpbghzTTg25zRmB6Q3CynLXZIQPiKLMxxQXpQ,27862 +torch/_inductor/fx_passes/post_grad.py,sha256=l-MHR2VlIfbxUIPKb7lA7X5vCz8mWRQFP9s6hVb-brE,42933 +torch/_inductor/fx_passes/pre_grad.py,sha256=HMlfp4vt5RLx2TF1RAk1csNLoSzssZSDEtnc120FzrE,29549 +torch/_inductor/fx_passes/quantization.py,sha256=x38669SRt6BnXEXLNrDPG4zSHcicY8J0wmDD31Pppv0,95130 +torch/_inductor/fx_passes/reinplace.py,sha256=WoAcHcpGrIbLncAl6JroxnsFM8cmSSCdWwsGTKs3-NU,26907 +torch/_inductor/fx_passes/replace_random.py,sha256=kAmO5rs-VvOI6mpWgf46ec4JKGFJscj2xvs3z7r8H08,4104 +torch/_inductor/fx_passes/serialized_patterns/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/fx_passes/serialized_patterns/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_1.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_10.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_11.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_12.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_13.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_14.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_15.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_16.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_17.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_18.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_19.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_2.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_3.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_4.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_5.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_6.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_7.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_8.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/_sfdp_pattern_9.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/addmm_pattern.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/bmm_pattern.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/__pycache__/mm_pattern.cpython-310.pyc,, +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_1.py,sha256=mfiDYbifL-bBBMWhnJYSEWl_1-pLY9i0oBtrSp64V8k,11161 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_10.py,sha256=do1sh0dYNR59rvUYfx3VVssWO39IZR7747BG2SDwb3g,14200 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_11.py,sha256=OtF9-4aPCqGomdvgLKAJAEnRx03hz3O6KVytXzM3ivM,13971 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_12.py,sha256=cK3LG2Q2YtHF4AC95fRF38am9ZlyLMp1Y-V-4XXVKGg,15241 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_13.py,sha256=riABFHI0voaPFScteVIoRtutwwEpguztjTuG-u97_Iw,7846 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_14.py,sha256=o9ST0KHnzKp6jQNc_4Ifqz4dXIkWOru2GmqZ15qFOUk,14307 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_15.py,sha256=dcky-P2Hky7E9GR70s82FgxiwYkNGLV12l_IsP76Tzk,16037 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_16.py,sha256=IVlXH1GGw1oEdfqGmJqADzI2V-RYRvy4ZwVbVVft7R8,43580 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_17.py,sha256=vQMlkIzW7B3jPbKZVal-HKr3luFEyoEzXuHjc2Cq5gI,17265 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_18.py,sha256=UvjClFasyH7-RPb4xZni4EEJ0cTBv3YfznAYfNXW_Ic,32720 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_19.py,sha256=73FaGLvS6u5X_GaJfwvVkGlPCb6fYYzuDNwJAigf5zE,14030 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_2.py,sha256=LvFtEWyMQ9laLxtXS_cd1OCrc1O1icTQ7yXiTQGriXk,11171 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_3.py,sha256=n14Di7_dlRvkxfK_50n2DYOEbjC-bqDzpCorJNEhbsw,12431 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_4.py,sha256=srq-S9xsoUIUvuLV-wXxikxLrqSGaS8BAqt_UfQh9eI,12395 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_5.py,sha256=xj5RejgSZy9Oh9sIuMKVHeeEWYFUAku9VmSt8AbPyTg,11397 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_6.py,sha256=TX8RkTQLt1EhePpSHkHcpzKAGSdgxJxVRNW9SjRMQF0,12625 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_7.py,sha256=O-cfVWDDfrPDcdJbd0sySeWAOIMaJeHXnSRVREpR20I,15420 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_8.py,sha256=cSfm0SGhnIl5_bQ99_k8Sq8WvivPlF-Cm346QmmDh0g,14188 +torch/_inductor/fx_passes/serialized_patterns/_sfdp_pattern_9.py,sha256=CFn6CWUrkd7Iaut3sHsUQix4u9CUJyh0UGz39e866TQ,15428 +torch/_inductor/fx_passes/serialized_patterns/addmm_pattern.py,sha256=VyTxQS2yILWJNoWZ85QDqr7trPRy9dl_nmlyDFS69q0,1842 +torch/_inductor/fx_passes/serialized_patterns/bmm_pattern.py,sha256=qd_4tLdmx1o2QOpWYs7a2-RHItbEL3bjmhme7xzF--4,1256 +torch/_inductor/fx_passes/serialized_patterns/mm_pattern.py,sha256=-Qph-N1XH_S5sWph_D4_D5lsdZUkjOUVFdLaHY00WFo,1244 +torch/_inductor/fx_passes/split_cat.py,sha256=Fn0KJXUTvPgwuBjiv53US1ckfL1Bs0o31H68j17y8Q8,100985 +torch/_inductor/fx_utils.py,sha256=IkVl-p71GQ_vI5nCW2Z7bZHoa8l4F1cX5MgroWtOLyo,8916 +torch/_inductor/graph.py,sha256=VG0Zc1lr34laS8PKAva54Zqwkclh8NY0lo60ZyJ0zkA,81547 +torch/_inductor/hooks.py,sha256=lyRzyXmeul-bHwVVMsChEshWylPo43wnZcIlBbLDgqY,645 +torch/_inductor/index_propagation.py,sha256=iMpxBTVQ3SxYW9C8AbL4eGHLb-PusGUmkGMBcqFJgMM,12928 +torch/_inductor/inductor_prims.py,sha256=FpVK-7xYs0MxudStUL7b5yt8Lc_gzDVkniexr8vXA3A,5642 +torch/_inductor/ir.py,sha256=ngFm2xlwi-VTv8wiUsfpQ29f6sd6TPa-IRFY-o_pLCI,240835 +torch/_inductor/jagged_lowerings.py,sha256=FGLLyaZFimtIH4vHCSCkIe6Ao997j1Sd-fo6s9bBAto,8823 +torch/_inductor/kernel/__init__.py,sha256=H6IReWIOt8twLwq3VA3cggOOQd5_EDJ7o48hU5K0RAM,57 +torch/_inductor/kernel/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/bmm.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/conv.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/flex_attention.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/flex_decoding.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/mm.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/mm_common.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/mm_plus_mm.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/mm_scaled.cpython-310.pyc,, +torch/_inductor/kernel/__pycache__/unpack_mixed_mm.cpython-310.pyc,, +torch/_inductor/kernel/bmm.py,sha256=URjdLCYV9NsM0Knt2N_8ucAagnQtpBIVOLZWI4IKJUM,6810 +torch/_inductor/kernel/conv.py,sha256=OuGcuNmwm6_4gRArIWbHkYi1HBQP6OmjYBQcgi1aDbM,20743 +torch/_inductor/kernel/flex_attention.py,sha256=LrSVzAjxmcBg_XoEpEs8DBzDyG54NMUNCWF2wZfkXqg,66550 +torch/_inductor/kernel/flex_decoding.py,sha256=IkCxuYjorqVBoVkaIJBVbg-qyvKYw52pby60YjymtUs,20302 +torch/_inductor/kernel/mm.py,sha256=SSX0tHdwSFyNkYAg87sTIMlB6L7kiunWqxyYAtJ0-Xk,25845 +torch/_inductor/kernel/mm_common.py,sha256=RjwaelsCGW_23f0L6BOlRkHpnsVJgRk30VaMWgC93vA,17273 +torch/_inductor/kernel/mm_plus_mm.py,sha256=abIajeOfckX2j0hBcFws0JnwBRMDppAwBs1OyA1Kmhw,7800 +torch/_inductor/kernel/mm_scaled.py,sha256=WsV9uayhdv_xtfAhEF2VOuVIvErOOYMwN9urKic5Nz0,10585 +torch/_inductor/kernel/unpack_mixed_mm.py,sha256=G6tIfdh0PhE6_o4236Ee3gAIHCffcDdfp7BvbLbmnoE,2962 +torch/_inductor/loop_body.py,sha256=IwwghxZrRk2dUJSJmACy6CrdFWSt85LAJkdFNpsa5gk,21828 +torch/_inductor/lowering.py,sha256=ulGBT8rfO9LtxEbx7_GHLDUxO7gqZN1T0Q5jEJzNdso,209697 +torch/_inductor/metrics.py,sha256=X7QYjaqL1sv13DZeeC9XvPJ3G_8QrHxfyTshG8baaTI,12970 +torch/_inductor/mkldnn_ir.py,sha256=mBjGU3oxIkQKoXeViHhqIsH89JiGzAZaztoZigdeIdI,59337 +torch/_inductor/mkldnn_lowerings.py,sha256=23WRf3NRsfyQmGOxJYOnqr42DNRqvtL09hXwTHgKItU,43927 +torch/_inductor/ops_handler.py,sha256=oLvy9VXUloBGKGuM3BfPtr6dbKYIn3d2fFe6WavN6Z8,31016 +torch/_inductor/optimize_indexing.py,sha256=qF-6r1GwU5Tlq8nSYZFeMymd9VKoR_dUoU-COkAxPtw,3950 +torch/_inductor/package/__init__.py,sha256=ubkupRpGNvZ2vAiNylY-j50Oa-78SSO0u1pV9kSACn8,48 +torch/_inductor/package/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/package/__pycache__/build_package.cpython-310.pyc,, +torch/_inductor/package/__pycache__/package.cpython-310.pyc,, +torch/_inductor/package/__pycache__/pt2_archive_constants.cpython-310.pyc,, +torch/_inductor/package/build_package.py,sha256=ZS3248m9MtGnCGG0YWrne2y7FDTIoF2ZWmKdxvIRzMg,329 +torch/_inductor/package/package.py,sha256=YK5YQdA5z7TIOgBZJgJSxGwFPFc4nJTBeT1uRJ8p5FE,8481 +torch/_inductor/package/pt2_archive_constants.py,sha256=0fEVbGgiMHyDurzE6Tc5RdqDcuo4GpvecBpVmuVPeKs,545 +torch/_inductor/pattern_matcher.py,sha256=YbczcU7IVjXh2WAPY49UhHxuak7cJCb5FUB9jgzWf18,69657 +torch/_inductor/quantized_lowerings.py,sha256=ITfGpuHUtn6j0cUeSYsGdJf4Dh9g1wqjaFBxly2vet8,2989 +torch/_inductor/remote_cache.py,sha256=0JaCCEYfVcfMlDV2_1hGUzBgT5nVfixzeorVEL-H90I,5715 +torch/_inductor/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_inductor/runtime/__pycache__/__init__.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/autotune_cache.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/benchmarking.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/compile_tasks.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/coordinate_descent_tuner.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/halide_helpers.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/hints.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/runtime_utils.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/triton_helpers.cpython-310.pyc,, +torch/_inductor/runtime/__pycache__/triton_heuristics.cpython-310.pyc,, +torch/_inductor/runtime/autotune_cache.py,sha256=vBQ3LgKBehxuCMPNr7IlL0r2ayw5mGVMNCBL7IuP_GQ,7564 +torch/_inductor/runtime/benchmarking.py,sha256=IHkKhrYRkXsD4gFLsH4sI3RY1_hDnGKi1jmYgEiDiQs,7680 +torch/_inductor/runtime/compile_tasks.py,sha256=Estyti5vzu0VdZVEA3FqHhbTQmQqySNiE04cjLE-Y0I,2075 +torch/_inductor/runtime/coordinate_descent_tuner.py,sha256=IOQhUjyEdK4OgCaRQOVoEAW2OWZM-cccVzVlRdu4YWI,10021 +torch/_inductor/runtime/halide_helpers.py,sha256=w_c4sNXCKxXI5KXxSJtQ6c4fLyANfXTsUGJ_WF40I8c,3540 +torch/_inductor/runtime/hints.py,sha256=sZLZGg-Xhdf1727fGF08jt3WBNGoqn_yXEs6ZbS-wX0,5567 +torch/_inductor/runtime/runtime_utils.py,sha256=0mai0Pc6bZ3rQT8X1qz8_8OEwsoEYCb_Rnf8Y8CT1Cw,3949 +torch/_inductor/runtime/triton_helpers.py,sha256=u9QlBnncP9AJc-eK4wZE5VbBwaG4ah2QZUrDg-JPDHM,16121 +torch/_inductor/runtime/triton_heuristics.py,sha256=SwfZz1dvBSQMZFG-eXVgNPj2tkrCyIu-69zp5XHcs-4,64338 +torch/_inductor/scheduler.py,sha256=M7OMzh_QeBl88iW-7Cql4xJrqpT0YPo3iICfxctVMgE,145902 +torch/_inductor/select_algorithm.py,sha256=DeBl16Wj2zy6O1HP8FTW9JnAtvpfDkoMHbnJNgXEvY4,61605 +torch/_inductor/sizevars.py,sha256=zoGTzrqXqLsIvTurWFTLdhZwdJaOemcY2XmZ5QOhYuU,34094 +torch/_inductor/subgraph_lowering.py,sha256=cXFaVmwiY5ZHSujGdOCHLgubry47rutfw2Hl-ixwOpw,4907 +torch/_inductor/test_case.py,sha256=5RPhaNLsO7fr4q4APNmlKG24cktUJuqhekjkRz-c9LY,994 +torch/_inductor/test_operators.py,sha256=PWf1zrubPdz6gHFuTa1wcDbjMoIGWkxCLWUlFjVukGo,816 +torch/_inductor/utils.py,sha256=AnpC86q7TrTJfTy2_7x4BrFMUL2HEVPu4TfapEwOjaU,63835 +torch/_inductor/virtualized.py,sha256=r_Rbnk2B-D9YX_1Z-XBzjzPQAULWG-JUb-u72-ssj8Q,12284 +torch/_inductor/wrapper_benchmark.py,sha256=7_9_yzPmnCsfSz3TJcrwxM9zELj-BIHuuSsJGGXzgE4,10449 +torch/_jit_internal.py,sha256=0hXbw5QiqjxQp2_foRBeCknkJEPrKgubAydmQVFvKJw,53643 +torch/_lazy/__init__.py,sha256=F0lAsONCnXzChhzI8m2QTyNg7WFrXHak0C38ZsPW2SI,1793 +torch/_lazy/__pycache__/__init__.cpython-310.pyc,, +torch/_lazy/__pycache__/closure.cpython-310.pyc,, +torch/_lazy/__pycache__/computation.cpython-310.pyc,, +torch/_lazy/__pycache__/config.cpython-310.pyc,, +torch/_lazy/__pycache__/debug.cpython-310.pyc,, +torch/_lazy/__pycache__/device_context.cpython-310.pyc,, +torch/_lazy/__pycache__/extract_compiled_graph.cpython-310.pyc,, +torch/_lazy/__pycache__/ir_cache.cpython-310.pyc,, +torch/_lazy/__pycache__/metrics.cpython-310.pyc,, +torch/_lazy/__pycache__/tensor_factory_functions.cpython-310.pyc,, +torch/_lazy/__pycache__/ts_backend.cpython-310.pyc,, +torch/_lazy/closure.py,sha256=6D49vdNnSafairB_HZo18IwSSppY1B-btsII3PCoOpU,5452 +torch/_lazy/computation.py,sha256=qGyb2-6Mk9HtK1ekYjC-RaRAFG8sUEAazCoO4BuCM1U,919 +torch/_lazy/config.py,sha256=sVrnixPoC3UnnFXsE0bGt5YiRapFAhb89lq1k_uh2-0,447 +torch/_lazy/debug.py,sha256=LOZyiVd9aFg1xYiUwq-FKA1jMxuGDyU0KcTpor81r1s,738 +torch/_lazy/device_context.py,sha256=wXHSvwE2qmYdYlSiBF9o6gUks5t8I4icOoK1Wr0dzWg,657 +torch/_lazy/extract_compiled_graph.py,sha256=UoLsImv81ujKWhdepd9Tn9t8Om_PG71w0JxQSXCuEdc,8435 +torch/_lazy/ir_cache.py,sha256=HcW7N_L3ff-7_dDnpTsuh0ZQHWU-CSkawOriqQQCoL4,348 +torch/_lazy/metrics.py,sha256=XT4Y9Loj4sLSAjhIVBiZ6pKrUXFNd4yBi1YvFoCm7to,545 +torch/_lazy/tensor_factory_functions.py,sha256=_vbfXc_XMUD6dXa3dP-BHlwmc1nWhnZ5R1SkY9sIZ98,1368 +torch/_lazy/ts_backend.py,sha256=BfAAT0WhImXNRhQp3Pfbp1tLuKuy7UBt4OziWTEYi9o,163 +torch/_library/__init__.py,sha256=1Xif0f7qcsIsoqMvLXRLYxVMrTjNxm4xvfSx8uN4Frk,256 +torch/_library/__pycache__/__init__.cpython-310.pyc,, +torch/_library/__pycache__/autograd.cpython-310.pyc,, +torch/_library/__pycache__/custom_ops.cpython-310.pyc,, +torch/_library/__pycache__/fake_class_registry.cpython-310.pyc,, +torch/_library/__pycache__/fake_impl.cpython-310.pyc,, +torch/_library/__pycache__/infer_schema.cpython-310.pyc,, +torch/_library/__pycache__/simple_registry.cpython-310.pyc,, +torch/_library/__pycache__/triton.cpython-310.pyc,, +torch/_library/__pycache__/utils.cpython-310.pyc,, +torch/_library/autograd.py,sha256=6xEfLD4fI-ABL3W0h9VOAmKv4cEIbhg-boacscU49sg,8811 +torch/_library/custom_ops.py,sha256=ojt4A-7hmjUClUeyCGv2JlNLPZqszSGg6fxloV1D2Ls,34167 +torch/_library/fake_class_registry.py,sha256=RQeeDN0tF6WOl6c_X-5AVSj63Eud_bSPj2Nch9qrLZQ,12021 +torch/_library/fake_impl.py,sha256=b3qwalMnEUsr9HWUZ8lKei4AOh55uJQwGgPw0U5ZwUY,7863 +torch/_library/infer_schema.py,sha256=wlwIz8flPCs4vvlZj7quB-wmsiLy_bFjHfg-M8VWUCE,10892 +torch/_library/simple_registry.py,sha256=tLgDcxC-NYFYo_eLx9i7-24mWhk_tOmAumVjeRIXAuI,2638 +torch/_library/triton.py,sha256=hTDGNDPXHKB483On9gz2B8QNZfDx4Buu7nRW0HymYSw,9283 +torch/_library/utils.py,sha256=o0juWQW8rx81BMCxmZ7rdXhJRs_GspcdP-G8wLI6H5M,10610 +torch/_linalg_utils.py,sha256=z4b43SVKj2ibbk-F-lZft8gKxcAQrhRO9h_fwbP5jnU,5171 +torch/_lobpcg.py,sha256=qfMSK3hLa-6WnqK1TiwUnywptP7w1V1Wf7Rb2KV7C-c,43548 +torch/_logging/__init__.py,sha256=BVwN4VV6WkrrehjU5XG0U4zQdjrc_tht179OQg69OEc,739 +torch/_logging/__pycache__/__init__.cpython-310.pyc,, +torch/_logging/__pycache__/_internal.cpython-310.pyc,, +torch/_logging/__pycache__/_registrations.cpython-310.pyc,, +torch/_logging/__pycache__/scribe.cpython-310.pyc,, +torch/_logging/__pycache__/structured.cpython-310.pyc,, +torch/_logging/_internal.py,sha256=WuvH4o4NJz3h6QSZO0ZBUopvZLT9dGzL1UsWg0ugkB4,41339 +torch/_logging/_registrations.py,sha256=oRzV-l_D5nZHoqAYE-jCvX9XP-mkLmHg9Oxse8YK1GQ,6424 +torch/_logging/scribe.py,sha256=Ukz7UvRJ0uqLHAVm8MPpYQakr2VP8y6zkYdm0Qk0ypw,2557 +torch/_logging/structured.py,sha256=d9uXMUDSCq3Z7wVMqY19R0Jjo4_4NC06O7RUYKIpkz0,1359 +torch/_lowrank.py,sha256=4fNhz1CrZP9o6eO4dmf-4qx5-ZZ6HSjdC1GFQ1SXupU,10561 +torch/_meta_registrations.py,sha256=TNLPaOTI-ElXTmZYaDRU1rkciXHDQDfJlMmCH8yCPm8,203287 +torch/_namedtensor_internals.py,sha256=EsJQ1IOY0FviEofxv3oCEctNUuQ_83vGoDxdZMT3XqE,5290 +torch/_numpy/__init__.py,sha256=bpeDB3XOafjktNj4KOvVeKopfQzI_munhA4k89ZdlW8,556 +torch/_numpy/__pycache__/__init__.cpython-310.pyc,, +torch/_numpy/__pycache__/_binary_ufuncs_impl.cpython-310.pyc,, +torch/_numpy/__pycache__/_casting_dicts.cpython-310.pyc,, +torch/_numpy/__pycache__/_dtypes.cpython-310.pyc,, +torch/_numpy/__pycache__/_dtypes_impl.cpython-310.pyc,, +torch/_numpy/__pycache__/_funcs.cpython-310.pyc,, +torch/_numpy/__pycache__/_funcs_impl.cpython-310.pyc,, +torch/_numpy/__pycache__/_getlimits.cpython-310.pyc,, +torch/_numpy/__pycache__/_ndarray.cpython-310.pyc,, +torch/_numpy/__pycache__/_normalizations.cpython-310.pyc,, +torch/_numpy/__pycache__/_reductions_impl.cpython-310.pyc,, +torch/_numpy/__pycache__/_ufuncs.cpython-310.pyc,, +torch/_numpy/__pycache__/_unary_ufuncs_impl.cpython-310.pyc,, +torch/_numpy/__pycache__/_util.cpython-310.pyc,, +torch/_numpy/__pycache__/fft.cpython-310.pyc,, +torch/_numpy/__pycache__/linalg.cpython-310.pyc,, +torch/_numpy/__pycache__/random.cpython-310.pyc,, +torch/_numpy/_binary_ufuncs_impl.py,sha256=h_xCtCyRAdTEB2T_r0AZ25G7u9IM3PRPr1mVv8Cr0qY,1871 +torch/_numpy/_casting_dicts.py,sha256=EmT0GYUz7Ii1iT1prWZgsahqtZZ069I1O0T2pzhSlOE,42478 +torch/_numpy/_dtypes.py,sha256=vA1enP8uKq8exmJPc4z-BlgCMS4GJYY4nuTkZBFzGgA,10326 +torch/_numpy/_dtypes_impl.py,sha256=0Kr3Ykf9BS0Lz-UhDyNuvV72T0VDf2lYgZgMdtPRpvM,5907 +torch/_numpy/_funcs.py,sha256=vT_F_qnfL3CJoZr_R1NEHvETRCnIoEOrNGXVP5o-5aM,2097 +torch/_numpy/_funcs_impl.py,sha256=62rLgWByVo-7IEOw6y_EfV4mpsezJtO5Mcs0I9WpdhE,59200 +torch/_numpy/_getlimits.py,sha256=QRUbPnrehERJA9Wk01SdUaF1F2FhksfgpewYN6SmtgE,269 +torch/_numpy/_ndarray.py,sha256=TeHKdlLSq1FoY9RCH4SntPTB366KRvy5pXxKj6bYWwk,16634 +torch/_numpy/_normalizations.py,sha256=s2H3P_X612bygUNSPrnaSJ8hKwD7nucv2WDKFA-Kq8Y,8249 +torch/_numpy/_reductions_impl.py,sha256=kRfjvIs-8OMgrX0riDZ_8p8vKtFtRs1usCWbBUBvxDI,11800 +torch/_numpy/_ufuncs.py,sha256=CJoWG7TWKRZ2tM7JV3qsZXpAYlB2iRzfogs7721xGl0,8366 +torch/_numpy/_unary_ufuncs_impl.py,sha256=IO2kPjYhNoKCyu_9NcMFQF57le1U9VjFmqQFbsWA3Jc,1161 +torch/_numpy/_util.py,sha256=uUCtxtI1k7zTKkuVzitV6ktcCMgDvgVSY4DXBlLhnZQ,7557 +torch/_numpy/fft.py,sha256=lqeN-889bRRT8VjzgAaXoK9aAz8wdQdoiUSVN-oIhRQ,2805 +torch/_numpy/linalg.py,sha256=DV3U74reWhgh8vO-JasLZ7AO0sJ1lmh94sBozXrQMjc,5582 +torch/_numpy/random.py,sha256=yWr3GUftkX9_FZ10phn_V5SWy1gbzpZpnSqVQgqlhwo,4650 +torch/_numpy/testing/__init__.py,sha256=t5Re9c4lijwKoegYKS74CFviLCfSDSWZMvw78ohKYss,375 +torch/_numpy/testing/__pycache__/__init__.cpython-310.pyc,, +torch/_numpy/testing/__pycache__/utils.cpython-310.pyc,, +torch/_numpy/testing/utils.py,sha256=VqWx-Dt-Lxk2YpUsfDt_0mpWa7ZDKjT1aTdVEbjy7pc,76373 +torch/_ops.py,sha256=ffnnmYtsI0P0rlRwp7IdRYI2up6NJwNcFKZwRBMIAow,55360 +torch/_prims/__init__.py,sha256=i-M1DBLEWoZ11d3NO9ySf9S_heEsexyJTHhZ4HcbwAI,86364 +torch/_prims/__pycache__/__init__.cpython-310.pyc,, +torch/_prims/__pycache__/context.cpython-310.pyc,, +torch/_prims/__pycache__/debug_prims.cpython-310.pyc,, +torch/_prims/__pycache__/executor.cpython-310.pyc,, +torch/_prims/__pycache__/rng_prims.cpython-310.pyc,, +torch/_prims/context.py,sha256=4_CjeHUStaIQ531YveKgRHw1dOaDiMS7pO27dM2LNrc,5735 +torch/_prims/debug_prims.py,sha256=kPvBHAnYylT2_k5BYj5iHWjbsTP5D5dZgRTzvKyS45o,1889 +torch/_prims/executor.py,sha256=BLqWj1R_rjEl1ePTtjb5aLItbUPN8DD00-VBSLaKxiw,1663 +torch/_prims/rng_prims.py,sha256=ZJAQy-yPMasdEDIn7sFHQfH-RpoBoixkUGH_CBtYIM0,11528 +torch/_prims_common/__init__.py,sha256=D_xBCN6BUozlQGcZSM3wbg2a33QkyVBXQRmBQM8xGhU,65506 +torch/_prims_common/__pycache__/__init__.cpython-310.pyc,, +torch/_prims_common/__pycache__/wrappers.cpython-310.pyc,, +torch/_prims_common/wrappers.py,sha256=h6xVnT9mnuzjJY39rUgtPrhiKoVZgHmuD7Hce5KyV_M,16430 +torch/_python_dispatcher.py,sha256=GJXb9hbGZqYODYLON8rJ8NbctPaX0F25kGdYQmEPMwk,7135 +torch/_refs/__init__.py,sha256=WvDLETPFICf5pnvApzkDbgyrojO77eiNFWo2D3PVG_4,211415 +torch/_refs/__pycache__/__init__.cpython-310.pyc,, +torch/_refs/__pycache__/_conversions.cpython-310.pyc,, +torch/_refs/__pycache__/fft.cpython-310.pyc,, +torch/_refs/_conversions.py,sha256=BkNyamBLOQLlKy6mB4BSdPRjAKP1ghZ5xcyQDkUs-_s,3533 +torch/_refs/fft.py,sha256=LT_TNhKbnR65U28y6RC6UrVhIugTxWHkHdu0639KMr8,17953 +torch/_refs/linalg/__init__.py,sha256=Ez-B8lHqw_Ncai9jZXTSn-z7sPQa1jlXv-At1wDdSuU,10502 +torch/_refs/linalg/__pycache__/__init__.cpython-310.pyc,, +torch/_refs/nn/__init__.py,sha256=QjSRul0GklLvoKJ8hThMpCTBjilOW8e72oW9Z3tT-HE,50 +torch/_refs/nn/__pycache__/__init__.cpython-310.pyc,, +torch/_refs/nn/functional/__init__.py,sha256=q8Z78GJ4t_0TRl9aGEhYpfIEiBrPo580khX5wQLlac8,42157 +torch/_refs/nn/functional/__pycache__/__init__.cpython-310.pyc,, +torch/_refs/special/__init__.py,sha256=s57LwYwpJ8NHPEG4ETIkvXgpvjI3e6GiwaytmqrO23Q,6789 +torch/_refs/special/__pycache__/__init__.cpython-310.pyc,, +torch/_size_docs.py,sha256=a9eJ9LdmD6fxUucakpXOitwcvUh7MstBnLUiyJInpo4,908 +torch/_sources.py,sha256=JiN2_s9ljGqWXqYWNTI0Nhi7J9HCa9GvUavkYb6yVzo,4454 +torch/_storage_docs.py,sha256=qk7arVcSHSVvsKgZQYtTt7jHuwF52QvbLz34pW96iz4,1377 +torch/_streambase.py,sha256=XA5sIHyX4jNjwuKgdCLSkZMJg-OAQCEbA8R3HWC_PW4,1108 +torch/_strobelight/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_strobelight/__pycache__/__init__.cpython-310.pyc,, +torch/_strobelight/__pycache__/cli_function_profiler.cpython-310.pyc,, +torch/_strobelight/__pycache__/compile_time_profiler.cpython-310.pyc,, +torch/_strobelight/cli_function_profiler.py,sha256=aMD5ISlMJAbPvI6kWtinABMnFVhCL3ZxStj7orpjJls,11477 +torch/_strobelight/compile_time_profiler.py,sha256=RB_sTcBUfFFR_OFySGv4ADHPOyX2DKHFJZ0iF3NHAMM,6197 +torch/_subclasses/__init__.py,sha256=7NDSoV4xw-GxCRtHasX_MFFLe_S1-1BHRFvoDcgE4J0,375 +torch/_subclasses/__pycache__/__init__.cpython-310.pyc,, +torch/_subclasses/__pycache__/_fake_tensor_utils.cpython-310.pyc,, +torch/_subclasses/__pycache__/fake_impls.cpython-310.pyc,, +torch/_subclasses/__pycache__/fake_tensor.cpython-310.pyc,, +torch/_subclasses/__pycache__/fake_utils.cpython-310.pyc,, +torch/_subclasses/__pycache__/functional_tensor.cpython-310.pyc,, +torch/_subclasses/__pycache__/meta_utils.cpython-310.pyc,, +torch/_subclasses/__pycache__/schema_check_mode.cpython-310.pyc,, +torch/_subclasses/_fake_tensor_utils.py,sha256=h9fb_EmwqPPZU_uGuwp7VKBEva14UytfOIPgwPjT0to,8512 +torch/_subclasses/fake_impls.py,sha256=yz4UoIQxdVxSWgc6m9h21dyKJAR1rwHbF8guC5YPrnE,32565 +torch/_subclasses/fake_tensor.py,sha256=sos1J1IdHz75kkhX1SvYbtGMsgBzmV2oPP4xJYMcDFE,94551 +torch/_subclasses/fake_utils.py,sha256=Ev01N3o0Vq8JXRSEqzOSCgiqYvDpJRrKDraibrpX-_0,7388 +torch/_subclasses/functional_tensor.py,sha256=_GB3_vYt1PO3wC6sxlSrTe1WPr6MGKDLEpqWkOXLf3M,36147 +torch/_subclasses/meta_utils.py,sha256=DJo0LOqc_DY_eTXGm8emyg3loDqsHgyCRBHxGqSlIS0,76148 +torch/_subclasses/schema_check_mode.py,sha256=8PLCWZovaeaD_HwP-UADADQulVQSwJqwfhSu8NvGIBM,8655 +torch/_tensor.py,sha256=Gq4Y-a6SUZzCwfaW2JEyehlLedykxQiKYcpOFTLfjtw,64095 +torch/_tensor_docs.py,sha256=jMixqlFmVramLt7-vf2S280hrz05ZYVdjoUnz1IQB6o,142642 +torch/_tensor_str.py,sha256=qh_7whSuOMSfwrJsbgyFkf9C2FW9Phuv5zGmu3_sfBk,26980 +torch/_torch_docs.py,sha256=DiTi2tXxMt-tCCcvzryYXaCdG4skIuj6qvHxlN8Lcg0,420987 +torch/_utils.py,sha256=EM2FSEObqu6FIYW_HzQoyqCkiBF4lfk9TBpmx1nEoso,36971 +torch/_utils_internal.py,sha256=f-6t3GQBZpWH8xabVXv08AkRKyb_FCrk6oe9HJDu5-Y,11744 +torch/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/_vendor/__pycache__/__init__.cpython-310.pyc,, +torch/_vendor/packaging/__init__.py,sha256=EhCMuCSz60IgQJ93b_4wJyAoHpU9J-uddG4QaMT0Pu4,496 +torch/_vendor/packaging/__pycache__/__init__.cpython-310.pyc,, +torch/_vendor/packaging/__pycache__/_structures.cpython-310.pyc,, +torch/_vendor/packaging/__pycache__/version.cpython-310.pyc,, +torch/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +torch/_vendor/packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236 +torch/_vmap_internals.py,sha256=lWdrkQFNC_aidmKUSjtaD_HTErKlr4Cu6OrxytSKOKk,9464 +torch/_weights_only_unpickler.py,sha256=6joGFZIlSzfpdUJR0ka3gakq0L8XvMnuen0OIQdztCw,16161 +torch/amp/__init__.py,sha256=OuEdgK_zzuRWo5_vfM_VhTooVAm1BXTBkUWxLkY6zlU,181 +torch/amp/__pycache__/__init__.cpython-310.pyc,, +torch/amp/__pycache__/autocast_mode.cpython-310.pyc,, +torch/amp/__pycache__/grad_scaler.cpython-310.pyc,, +torch/amp/autocast_mode.py,sha256=9foGwV08q6nH6htpTYg7uyWliyXa1yqA4lrwJm0StMY,22079 +torch/amp/grad_scaler.py,sha256=60vJH9qYFbpAIG4tu2j5hFuFYi_uOpJhHyJ7WEWyB4A,30391 +torch/ao/__init__.py,sha256=EAzMQqTA846YhRXQQXdYgOZz9cVeJkhxW4diWQBXLG8,427 +torch/ao/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/__init__.py,sha256=vLzhDJU0TjahMIHiTa7AuSAYj2-0xQI62RFsZoL_W1o,528 +torch/ao/nn/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/__init__.py,sha256=NoXiZE3HP-YyIkiDngjkIROoZy_MzcI2sqeZDWGmmH0,949 +torch/ao/nn/intrinsic/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/modules/__init__.py,sha256=FlHZWKH0Xumwd5xBvZgyeGkbYehn1oDCUnAqZ_c4WN4,655 +torch/ao/nn/intrinsic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/modules/__pycache__/fused.cpython-310.pyc,, +torch/ao/nn/intrinsic/modules/fused.py,sha256=8y9XVWBq2JV0yOhxkHr6wurXcSrjumyPF9WOEhjITTQ,9739 +torch/ao/nn/intrinsic/qat/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/intrinsic/qat/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/qat/modules/__init__.py,sha256=OIDHW4Q4Mjd_Y_eR1UWDLuPOVEIJOJJj5MbbaQRdI1A,547 +torch/ao/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-310.pyc,, +torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-310.pyc,, +torch/ao/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/ao/nn/intrinsic/qat/modules/conv_fused.py,sha256=Sqtq1o6qLDfgbfRAPD1QZyThIghCAcwqTXDh-VUYP_E,31254 +torch/ao/nn/intrinsic/qat/modules/linear_fused.py,sha256=BbJVioM3GtqUv-IztpsattWsK8bfVIehxqorB3PXCPA,6417 +torch/ao/nn/intrinsic/qat/modules/linear_relu.py,sha256=uutsUVhfy4tuxWtiTNampU_jQY5qtkpOqvFbwHY5jRA,1685 +torch/ao/nn/intrinsic/quantized/__init__.py,sha256=AyOx7RavGaehtAWX6dEUBQcrl23h2_S3Q9ZAWYimCLQ,236 +torch/ao/nn/intrinsic/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/dynamic/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/dynamic/modules/__init__.py,sha256=kbhh2dL33XSuVEAZNtSHMmt1papX7az6WlVALevMgZA,70 +torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/dynamic/modules/linear_relu.py,sha256=Rd5UpqoAW1EZKV9U_O6vfHuny7pi496YipAqjef1cnA,2000 +torch/ao/nn/intrinsic/quantized/modules/__init__.py,sha256=8aofpYeXIA8WQaao5pvOjWtLnf4rmL9xlJO9hZ8aras,409 +torch/ao/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_add.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/ao/nn/intrinsic/quantized/modules/bn_relu.py,sha256=gcKTbETyammm9FcQ3GtaPpWsS3TerD8L3tdfk3sz3ko,3231 +torch/ao/nn/intrinsic/quantized/modules/conv_add.py,sha256=N1SsxDj7iONNW9jD5Lh_HT-qVX372WQ8MBYPBtKhFZ4,4328 +torch/ao/nn/intrinsic/quantized/modules/conv_relu.py,sha256=8SgAgTtc0c41HQ-u1FBkzD0gpzQ26lGgVi-9NaE2g24,8370 +torch/ao/nn/intrinsic/quantized/modules/linear_relu.py,sha256=SYMSU0JlnJkmBfY-PTzEmgexYdM201hR9gLkVfOUKdI,6754 +torch/ao/nn/qat/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/qat/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/qat/dynamic/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/qat/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/qat/dynamic/modules/__init__.py,sha256=_5hfV0E5b71TgYVQPrPF9QZNrnw5XPuZppEwj0QItaM,50 +torch/ao/nn/qat/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/qat/dynamic/modules/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/qat/dynamic/modules/linear.py,sha256=1ChEKvIgqA5CBvplJtsR8XrepbZ2_TNJtprKybVVOgc,1008 +torch/ao/nn/qat/modules/__init__.py,sha256=RmKZ7d0ds96EIRSqAGGTrDwuXVWerE_S_NCWuEuX_20,228 +torch/ao/nn/qat/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/qat/modules/__pycache__/conv.cpython-310.pyc,, +torch/ao/nn/qat/modules/__pycache__/embedding_ops.cpython-310.pyc,, +torch/ao/nn/qat/modules/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/qat/modules/conv.py,sha256=3m4-sWbmXlETXg48f9L4L-lfnqWfESG6E_EBfc18JWw,9593 +torch/ao/nn/qat/modules/embedding_ops.py,sha256=V56bIJnqj2Y5aATojcSemQYSrx_5VMHTD0bXxGuoNXM,7815 +torch/ao/nn/qat/modules/linear.py,sha256=JHD4Gp0UmhSKqLC38YHKmJVvuPO5rTVRU767TyAB5nw,3050 +torch/ao/nn/quantizable/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/quantizable/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantizable/modules/__init__.py,sha256=N6niR-YsfqqP_5a8ksCJ8BL-Ol1Mch_412g-I4Z8VeQ,145 +torch/ao/nn/quantizable/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantizable/modules/__pycache__/activation.cpython-310.pyc,, +torch/ao/nn/quantizable/modules/__pycache__/rnn.cpython-310.pyc,, +torch/ao/nn/quantizable/modules/activation.py,sha256=Aa1-6xFj2EOqRCfFGDWhRDb4H5AuMlDcXHxxbZgaX-Y,23142 +torch/ao/nn/quantizable/modules/rnn.py,sha256=QK6vA918SivDLmmmsEttOfCOJbpmBlkHcuSGl0ILHzI,17718 +torch/ao/nn/quantized/__init__.py,sha256=NtHecsYVtg9e1IhTCdvoXSV-oQEXRQnt54dZlScPo6I,686 +torch/ao/nn/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/__pycache__/functional.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/__init__.py,sha256=M0iylhjuqtPcbO2pAVdDg9n9e1ET359nQ9txOEErmMo,37 +torch/ao/nn/quantized/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/modules/__init__.py,sha256=VzgogVUFAj-yE8a5Vmjiq5TV3SP51t0wXslabLVPITw,413 +torch/ao/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/modules/__pycache__/conv.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/modules/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-310.pyc,, +torch/ao/nn/quantized/dynamic/modules/conv.py,sha256=ofA_r1xptsP7p5s1I1Uykb8Qm01t5oznFevJhES6qDg,17907 +torch/ao/nn/quantized/dynamic/modules/linear.py,sha256=I0U8Jf6tiOpBgeBTWJ9elbJGwGvWIKbyavhAtIMivFg,6333 +torch/ao/nn/quantized/dynamic/modules/rnn.py,sha256=uDsJ2VBXW87mYXXoX9OAunfkgFbTm2Bh7sBG2tOtLSY,51339 +torch/ao/nn/quantized/functional.py,sha256=_mTFrs4h6MBjSKd5WVCtTubThCfISgAb3svT29FjlMA,29595 +torch/ao/nn/quantized/modules/__init__.py,sha256=vFYtEc5icLCdS6a_I7dJKANpFSmowEehAdgKXrYxX60,4521 +torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/activation.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/conv.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/functional_modules.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/normalization.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/rnn.cpython-310.pyc,, +torch/ao/nn/quantized/modules/__pycache__/utils.cpython-310.pyc,, +torch/ao/nn/quantized/modules/activation.py,sha256=OLHZVC8UAmCsDtVcnjvkiOIsCIaJu9hluWqC9mW-gbE,11591 +torch/ao/nn/quantized/modules/batchnorm.py,sha256=dGqNkPw-8OjPSAgDOXGzxWxZjhTmJl3FbVWZJKh0R38,4375 +torch/ao/nn/quantized/modules/conv.py,sha256=JRk8E_Sf-U4qGRWmk6gX2FqKIwU_7HAnuHS2oKGCoF4,42558 +torch/ao/nn/quantized/modules/dropout.py,sha256=5nPYMBmjOyEzOZj0lz_CtUmJASXrrEr63YJnVytPTQs,806 +torch/ao/nn/quantized/modules/embedding_ops.py,sha256=lUU9dMDMj1pejePlRd4_XQVL8BR8W9EuQ1jdf5q66X0,14502 +torch/ao/nn/quantized/modules/functional_modules.py,sha256=2J24nYGkO6UrGxzshMGRYxXMk0KmuTTn0CN74FfIKHM,9246 +torch/ao/nn/quantized/modules/linear.py,sha256=93W_dJigvvDu15G14Bkx8PPrdMumHmthDAokch3D59U,13632 +torch/ao/nn/quantized/modules/normalization.py,sha256=CxDQtLZpAMg3dsEDUdaiCymebZpVwM8gp7BJybFRrLw,9538 +torch/ao/nn/quantized/modules/rnn.py,sha256=2SKlwNQtry8QYzFRo9jguMiqlBDXVjFzhgxYQ8ellVU,1817 +torch/ao/nn/quantized/modules/utils.py,sha256=rB3XTOd-17-iDGSmTUqcM-gd913629uTTjwdjAPIy64,4695 +torch/ao/nn/quantized/reference/__init__.py,sha256=hu3hyozMJtkc86Bu1v8tmyayuxAdeDSv0sUiOeNAr_s,284 +torch/ao/nn/quantized/reference/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__init__.py,sha256=YS8MYKlqBPoA2UEBcS-MgbOzZ8_HDp-7ET4BC7KtCBk,494 +torch/ao/nn/quantized/reference/modules/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__pycache__/conv.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__pycache__/rnn.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__pycache__/sparse.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/__pycache__/utils.cpython-310.pyc,, +torch/ao/nn/quantized/reference/modules/conv.py,sha256=4MmNPnyWGV8MHY9cwDEp0EGvA38_g3NXQlCpfUdOhWQ,15245 +torch/ao/nn/quantized/reference/modules/linear.py,sha256=GNat94vlH2O01OuiIU6Hr2xGPKoaRePixtyUyEKXw14,2238 +torch/ao/nn/quantized/reference/modules/rnn.py,sha256=kMmt0diZHlG5fIk0PTDWbCi6fEp6i4f2LREWBYNRNoM,29591 +torch/ao/nn/quantized/reference/modules/sparse.py,sha256=guwRYTl1fvugzVb-b_uMa_P8u_0swdP7U_vvEGA0kHA,4679 +torch/ao/nn/quantized/reference/modules/utils.py,sha256=19OzQaJy7d73e9s0cKivB8zoLqQM6PwEBFlpeFlQr0c,15236 +torch/ao/nn/sparse/__init__.py,sha256=PfB-tgPOelyV_0eb_ipJK4LzPHwB5Z-wfJXeE_O3AK4,24 +torch/ao/nn/sparse/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/__init__.py,sha256=xO8RdXEcj7ggZh7-f2bReVBRO-F6PNBZjb5DBPwV52w,168 +torch/ao/nn/sparse/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/__pycache__/utils.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/dynamic/__init__.py,sha256=lYDGtZ8rNR56jJLk6s5WOJ65E7qqQ_LFICpuD76NWYI,57 +torch/ao/nn/sparse/quantized/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/dynamic/__pycache__/linear.cpython-310.pyc,, +torch/ao/nn/sparse/quantized/dynamic/linear.py,sha256=NC70dgKcus60JaiTpjLaJrXsdoy1bxBKH3W9mGzkJ7s,6326 +torch/ao/nn/sparse/quantized/linear.py,sha256=63EyBgp_ZOy__rXzUXkL53qhr02PvOcSbt9XqlGRFvw,9040 +torch/ao/nn/sparse/quantized/utils.py,sha256=W0MoHdPgrHp-P95ZK-MF5dVGgLYlyUX87AEGbqo3yTc,1873 +torch/ao/ns/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/ns/__pycache__/__init__.cpython-310.pyc,, +torch/ao/ns/__pycache__/_numeric_suite.cpython-310.pyc,, +torch/ao/ns/__pycache__/_numeric_suite_fx.cpython-310.pyc,, +torch/ao/ns/_numeric_suite.py,sha256=Ess9o_TneCCEMK_cK15tu_n2ihULt8fcc7x_9IIhX20,20028 +torch/ao/ns/_numeric_suite_fx.py,sha256=6dK6XwBP60lS6uK9usOl7XfUYlyEXJoxre4K4zBDsA8,41520 +torch/ao/ns/fx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/ns/fx/__pycache__/__init__.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/graph_matcher.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/graph_passes.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/mappings.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/n_shadows_utils.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/ns_types.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/pattern_utils.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/qconfig_multi_mapping.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/utils.cpython-310.pyc,, +torch/ao/ns/fx/__pycache__/weight_utils.cpython-310.pyc,, +torch/ao/ns/fx/graph_matcher.py,sha256=CGYyODDIEWJoG9naXW-BNYbCOwOO71tYyJA1Yij6_T4,19284 +torch/ao/ns/fx/graph_passes.py,sha256=JfQuHb6paEfmD04pcGe-w5iQN_bs5VsA6uFT59GPNnE,44425 +torch/ao/ns/fx/mappings.py,sha256=Na60TYMV4qsrLUHT24ZsWOa-q6dt0nhicWJF_s2qoM0,18260 +torch/ao/ns/fx/n_shadows_utils.py,sha256=TZblJvibFNjRGkDKHBSeEvpOMG5_w0cMBWB4M1neyXQ,51288 +torch/ao/ns/fx/ns_types.py,sha256=QS5Z_8o_hT3DzOko0kQcKHR1w9m3YMhnQMIPe8cKpFc,2339 +torch/ao/ns/fx/pattern_utils.py,sha256=kouXEpppszIeOseZ06qOor8i3NtlPapRSj9fKEU0q4c,8393 +torch/ao/ns/fx/qconfig_multi_mapping.py,sha256=0WlNI3ezH5fu6DVF12os3qQ4koCJRG7f-jv6X54hGQQ,10195 +torch/ao/ns/fx/utils.py,sha256=H3AQorK3N9l0whco1wHTjLKDvjJtH8oVoOspiPlk6E0,20673 +torch/ao/ns/fx/weight_utils.py,sha256=pkC06fV1q1YfQV9MlC50zT0CIEYxbgic2cioUia3LvA,11191 +torch/ao/pruning/__init__.py,sha256=dp2CA7CO_FRHTE7E0Ft495uR5IlUi8HsnG8ujLsz6OA,640 +torch/ao/pruning/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/__pycache__/_mappings.cpython-310.pyc,, +torch/ao/pruning/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/_experimental/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/activation_sparsifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/_experimental/activation_sparsifier/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/activation_sparsifier/__pycache__/activation_sparsifier.cpython-310.pyc,, +torch/ao/pruning/_experimental/activation_sparsifier/activation_sparsifier.py,sha256=SQBZwgRsNIi2gWMPECYGEfiHtjxhU-oDpdUWDuGu0Zs,19017 +torch/ao/pruning/_experimental/data_scheduler/__init__.py,sha256=q_95mAMpHldGWwLAgYS-F07ReGKDLIAZLsP7BtlmgTE,92 +torch/ao/pruning/_experimental/data_scheduler/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_scheduler/__pycache__/base_data_scheduler.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_scheduler/base_data_scheduler.py,sha256=dhGRwHDd261cEpqU3b22IBBbg_Lx5T9YzSgoKVIzmWc,7609 +torch/ao/pruning/_experimental/data_sparsifier/__init__.py,sha256=9ktAif-dttGBmiw745tN2WtUTOTZch0E_HA4cE1bfTo,174 +torch/ao/pruning/_experimental/data_sparsifier/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/__pycache__/base_data_sparsifier.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/__pycache__/data_norm_sparsifier.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/__pycache__/quantization_utils.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/base_data_sparsifier.py,sha256=dISKwbvGf8VCI7NXZvw4lcqf5gl3bswpScweThJ4Si4,13363 +torch/ao/pruning/_experimental/data_sparsifier/data_norm_sparsifier.py,sha256=DLzn4WcXM01u9rQEsXGW80RSXOwjPiIlbq6aAs7AKc4,7746 +torch/ao/pruning/_experimental/data_sparsifier/lightning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/_experimental/data_sparsifier/lightning/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/__pycache__/_data_sparstity_utils.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/__pycache__/data_sparsity.cpython-310.pyc,, +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/_data_sparstity_utils.py,sha256=Vtb__BJ_qrCy7TBisG4BM8qNXzDYrl-xNDVLbHCsYIY,1636 +torch/ao/pruning/_experimental/data_sparsifier/lightning/callbacks/data_sparsity.py,sha256=5xakcT8hZLE-SwAhc0mNeTdAchJijWdYcjI1YpyJU_w,6617 +torch/ao/pruning/_experimental/data_sparsifier/quantization_utils.py,sha256=ID2sfmm80YzcC-S886GRGmu9jCzXS6RHGZL-voPm72A,5898 +torch/ao/pruning/_experimental/pruner/FPGM_pruner.py,sha256=OSakliVFwWe_NtFJsPQ0GDSCI25NdlYpGc_VsNSF0vk,3416 +torch/ao/pruning/_experimental/pruner/__init__.py,sha256=abfUoG48Kc6ddQZmFAkYjQH5J8Hae8xJiqxz89MTgDE,260 +torch/ao/pruning/_experimental/pruner/__pycache__/FPGM_pruner.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/base_structured_sparsifier.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/lstm_saliency_pruner.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/match_utils.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/parametrization.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/prune_functions.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/__pycache__/saliency_pruner.cpython-310.pyc,, +torch/ao/pruning/_experimental/pruner/base_structured_sparsifier.py,sha256=pClt81eSzjxyaOf_HHk432Vwwv9HLark25VtcsO-Bec,10945 +torch/ao/pruning/_experimental/pruner/lstm_saliency_pruner.py,sha256=xbNRyMqNQzHlfDa-P1mp0ACXvCw4eIUb3_fKIEQmG30,2141 +torch/ao/pruning/_experimental/pruner/match_utils.py,sha256=Q5ZSFhXiaJxwQdE7ZYIB-E1fEaR_jiER53eMSg4v5yE,1999 +torch/ao/pruning/_experimental/pruner/parametrization.py,sha256=2B0P-ow2zk2ITLq4xutBIG9taMUzNFpi_qjet5knIxo,1844 +torch/ao/pruning/_experimental/pruner/prune_functions.py,sha256=6SQa12y_PuUvgMTm1j8d69ntQQr3gH5jtwQ7scrMOkw,19029 +torch/ao/pruning/_experimental/pruner/saliency_pruner.py,sha256=i60set06ICiSY3_G3_4NErJ0hHtwCSYDEHeZK6zc-vs,1400 +torch/ao/pruning/_mappings.py,sha256=O9aJePj2X0nV76KTcSU93V7GA8dxEzfr5G9QvjyWOqQ,597 +torch/ao/pruning/scheduler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/scheduler/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/scheduler/__pycache__/base_scheduler.cpython-310.pyc,, +torch/ao/pruning/scheduler/__pycache__/cubic_scheduler.cpython-310.pyc,, +torch/ao/pruning/scheduler/__pycache__/lambda_scheduler.cpython-310.pyc,, +torch/ao/pruning/scheduler/base_scheduler.py,sha256=RpKDJ5BGMs1ecYpQvGEVQC3PtwCh8glq--1_PNt5NWg,6526 +torch/ao/pruning/scheduler/cubic_scheduler.py,sha256=CVOqMeHZKtxMD7lMfjDzzEzMpqTUWOTKdcKix_2PuZQ,3844 +torch/ao/pruning/scheduler/lambda_scheduler.py,sha256=rgurOdXgFepvjQe_FtCoP-lwc3pogSxhhaikbzNKQk0,2116 +torch/ao/pruning/sparsifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/pruning/sparsifier/__pycache__/__init__.cpython-310.pyc,, +torch/ao/pruning/sparsifier/__pycache__/base_sparsifier.cpython-310.pyc,, +torch/ao/pruning/sparsifier/__pycache__/nearly_diagonal_sparsifier.cpython-310.pyc,, +torch/ao/pruning/sparsifier/__pycache__/utils.cpython-310.pyc,, +torch/ao/pruning/sparsifier/__pycache__/weight_norm_sparsifier.cpython-310.pyc,, +torch/ao/pruning/sparsifier/base_sparsifier.py,sha256=PvH1D8aNRxOpSPg7TZG3okv0A59dXks5lURGdm8aezQ,13712 +torch/ao/pruning/sparsifier/nearly_diagonal_sparsifier.py,sha256=Sm3we1tAWuODq3Og0GYMY6hI_7wWfvsM65ovzI7em5Q,2227 +torch/ao/pruning/sparsifier/utils.py,sha256=U_mnjGvx_Pv31Mgdc8wSTOfR9VKFZjy39rfppycCgbg,4814 +torch/ao/pruning/sparsifier/weight_norm_sparsifier.py,sha256=LTKAxmNDr_KPA4tW-U0SmLHQrYf7jkAvaYqgSgiTVgA,9261 +torch/ao/quantization/__init__.py,sha256=8k0ehNMzm3VGkr0QvgMjoO3781o0i9lD5soBAd3A3cU,7004 +torch/ao/quantization/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/__pycache__/_correct_bias.cpython-310.pyc,, +torch/ao/quantization/__pycache__/_equalize.cpython-310.pyc,, +torch/ao/quantization/__pycache__/_learnable_fake_quantize.cpython-310.pyc,, +torch/ao/quantization/__pycache__/fake_quantize.cpython-310.pyc,, +torch/ao/quantization/__pycache__/fuse_modules.cpython-310.pyc,, +torch/ao/quantization/__pycache__/fuser_method_mappings.cpython-310.pyc,, +torch/ao/quantization/__pycache__/observer.cpython-310.pyc,, +torch/ao/quantization/__pycache__/qconfig.cpython-310.pyc,, +torch/ao/quantization/__pycache__/qconfig_mapping.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quant_type.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quantization_mappings.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quantize.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quantize_fx.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quantize_jit.cpython-310.pyc,, +torch/ao/quantization/__pycache__/quantize_pt2e.cpython-310.pyc,, +torch/ao/quantization/__pycache__/stubs.cpython-310.pyc,, +torch/ao/quantization/__pycache__/utils.cpython-310.pyc,, +torch/ao/quantization/_correct_bias.py,sha256=dm5thzdvCLkGTYDbaMkksUxxtfnIaJgu1NrbanSy53w,5442 +torch/ao/quantization/_equalize.py,sha256=zQQflfjiWZwq7QNX2p2ZbpYsxN1KKS_8kL5Ko7H15tE,9519 +torch/ao/quantization/_learnable_fake_quantize.py,sha256=cdxZmbWaFXg9w1GGgtUqICpYJ2UXZiDgjz6SXT4m3Ww,7908 +torch/ao/quantization/backend_config/__init__.py,sha256=lnyxe_DTaYkdoSLhxNlUcMPlck2SAngmI8GNcYDHI4M,915 +torch/ao/quantization/backend_config/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/_common_operator_config_utils.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/_qnnpack_pt2e.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/backend_config.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/executorch.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/fbgemm.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/native.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/observation_type.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/onednn.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/qnnpack.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/tensorrt.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/utils.cpython-310.pyc,, +torch/ao/quantization/backend_config/__pycache__/x86.cpython-310.pyc,, +torch/ao/quantization/backend_config/_common_operator_config_utils.py,sha256=5-aJ_U_OWaRi0RD1M_8owxGOsUMlACqdIvL0bxqfDjM,27727 +torch/ao/quantization/backend_config/_qnnpack_pt2e.py,sha256=o87OrQ5hSN_falw7d1zoxdYe4yV_lafCCbfw-AqzCig,6478 +torch/ao/quantization/backend_config/backend_config.py,sha256=8DGErtqKvSwW_aB9s6YNQ5NlhYMEjRcHiFOTbXnz4Gs,31483 +torch/ao/quantization/backend_config/executorch.py,sha256=L69rOCnKETa-zb6kT40nrvjRH9IpYA8w9gpwoYlQF6w,17028 +torch/ao/quantization/backend_config/fbgemm.py,sha256=dCsGDn-PbASye1CPf_9poNM6eOMN10_NUFTQat95-0Q,4208 +torch/ao/quantization/backend_config/native.py,sha256=xKBjjC-uZ8qZsbHluftqC6Tn9vo_6ivcsVYFWLFFQtQ,8242 +torch/ao/quantization/backend_config/observation_type.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/quantization/backend_config/onednn.py,sha256=Uy-eKE5ewtXn8CZ56t3Q-AtJIf87aS2gRQ5qHQoSpbw,19252 +torch/ao/quantization/backend_config/qnnpack.py,sha256=PkPTzCS-Q3p8MiUq6mytJlFt-Zox7AO2kxbTg5Qx-AU,5400 +torch/ao/quantization/backend_config/tensorrt.py,sha256=Drb7vD3gWVGnSa41C6rYVR7po0qA420Nu3hoOH483-A,3021 +torch/ao/quantization/backend_config/utils.py,sha256=xJAbVaczsit7k1cwkZjujo66VBbHoqpWJBwYeoWb4ds,12508 +torch/ao/quantization/backend_config/x86.py,sha256=4geWLr6mkFhSEYiXFXpkB8NegS05EXhv2fYHTpajNyg,3869 +torch/ao/quantization/fake_quantize.py,sha256=z4ddOOy7n7Pl5SjA6ojutXrb65i3O-VhaSa8x-RJmiI,22769 +torch/ao/quantization/fuse_modules.py,sha256=E12u2bpKF5GiMPwNINu6WfNPo5VWgWDE14xfV_Vxj78,6862 +torch/ao/quantization/fuser_method_mappings.py,sha256=XTNBZ_p51y6SgbUGY7RPECN6OA-Q-OnIag_e2488nns,10440 +torch/ao/quantization/fx/__init__.py,sha256=65h6iR_5XARcYpGS8A2qRza_2y7cvqCowqkwwziYz8M,81 +torch/ao/quantization/fx/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/_decomposed.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/_equalize.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/_lower_to_native_backend.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/convert.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/custom_config.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/fuse.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/fuse_handler.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/graph_module.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/lower_to_fbgemm.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/lower_to_qnnpack.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/lstm_utils.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/match_utils.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/pattern_utils.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/prepare.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/qconfig_mapping_utils.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/quantize_handler.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/tracer.cpython-310.pyc,, +torch/ao/quantization/fx/__pycache__/utils.cpython-310.pyc,, +torch/ao/quantization/fx/_decomposed.py,sha256=58g8U2VT3h5y_xjmEsfY3hB46IrA0SZIRKyGRSRGYLI,41315 +torch/ao/quantization/fx/_equalize.py,sha256=wiAU09BGWY8Q16h-5hckIx3frySbX-b0zgXjqYZqdBA,37838 +torch/ao/quantization/fx/_lower_to_native_backend.py,sha256=wHu4_4dc9r_iFyqBJ27QSX-n6KIQPlD70VuKIfGE3h4,53182 +torch/ao/quantization/fx/_model_report/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/quantization/fx/_model_report/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/fx/_model_report/__pycache__/detector.cpython-310.pyc,, +torch/ao/quantization/fx/_model_report/__pycache__/model_report.cpython-310.pyc,, +torch/ao/quantization/fx/_model_report/__pycache__/model_report_observer.cpython-310.pyc,, +torch/ao/quantization/fx/_model_report/__pycache__/model_report_visualizer.cpython-310.pyc,, +torch/ao/quantization/fx/_model_report/detector.py,sha256=o5KxMfTscQ2pyZ2zRS9uILNaQph1Lz7V3oNxV1LmvP4,76422 +torch/ao/quantization/fx/_model_report/model_report.py,sha256=pKwoA07vSmQLmUlhqR99avVLAjetFiWZchPSzsuG1uc,29507 +torch/ao/quantization/fx/_model_report/model_report_observer.py,sha256=Ft8eZAF7NCprEFdxwa35h8efXI3-q2OivU7FE6KP2Ow,12058 +torch/ao/quantization/fx/_model_report/model_report_visualizer.py,sha256=t1Nf233eL4v9ey9BYZNxOBYWnzsGNHKtDYKBx33QFOA,32675 +torch/ao/quantization/fx/convert.py,sha256=fYrTkYAo0cC0cHNIhr3GFZL-0Y0_48COASWmxAMSuiU,57380 +torch/ao/quantization/fx/custom_config.py,sha256=U_ovUZlWGa-Ls6zNKmxgDKKs0rlNibqsOAugflkHKOY,21860 +torch/ao/quantization/fx/fuse.py,sha256=P3fIdFXT3xdHUJB4jsVRtl4Q-LiHwW-SiWDMR--aIAg,7275 +torch/ao/quantization/fx/fuse_handler.py,sha256=kJanP32TE-iI9UYSLGuvVjUZwRNf8xwvcv-pwcaDqOw,4719 +torch/ao/quantization/fx/graph_module.py,sha256=eYTHcbjM6mys25LpIlCBmaHcdk8_n67_w1UwNwmQEMM,6625 +torch/ao/quantization/fx/lower_to_fbgemm.py,sha256=y38V3OPQT0VNYMUUmmspmTgyHqEA8jQvm61YkMT1lfc,556 +torch/ao/quantization/fx/lower_to_qnnpack.py,sha256=7N785si10wpZuKheJuvKlvqPThD35MpWEdzTj_9V0Ew,559 +torch/ao/quantization/fx/lstm_utils.py,sha256=P0pIkVCo-E7I_lCNRUBY9kxfG9uBRugoKxNvF8aGg74,9262 +torch/ao/quantization/fx/match_utils.py,sha256=hC_BKPz7NCyJcu_LiP5vNpZbq9kWgkK-zVWu3CpCnko,8863 +torch/ao/quantization/fx/pattern_utils.py,sha256=4VnEGRJ1Shnav9vCIVBTr2iSxHIuTtUr516weuCUEn0,3671 +torch/ao/quantization/fx/prepare.py,sha256=hqgcpkkNZPoyxeyRuIkHvqYVJM2j8Irg1wf5wrLX99U,87766 +torch/ao/quantization/fx/qconfig_mapping_utils.py,sha256=wJ-SCtAnomlJL4BtuDX90FNZ-mjqrxDx6ZzL0N0o314,15397 +torch/ao/quantization/fx/quantize_handler.py,sha256=SqJ2GGOtQkNJNxXrYK0Whw8k7GbpYR-d42l03wTrAss,7298 +torch/ao/quantization/fx/tracer.py,sha256=39ChI4RKd_8QwNNXUe3nw8_s1zJ9vVb7LrIEmAjKOTo,1694 +torch/ao/quantization/fx/utils.py,sha256=LBKGogY8TdsBRqxS7dltQu8uGR5YJbDT7gafxbSD1GU,37600 +torch/ao/quantization/observer.py,sha256=EuZOMdW-2sVeX3wNj_rZtAGxIv5uLKBupo803rXy7BI,66606 +torch/ao/quantization/pt2e/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/ao/quantization/pt2e/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/_numeric_debugger.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/duplicate_dq_pass.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/export_utils.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/graph_utils.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/port_metadata_pass.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/prepare.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/qat_utils.cpython-310.pyc,, +torch/ao/quantization/pt2e/__pycache__/utils.cpython-310.pyc,, +torch/ao/quantization/pt2e/_numeric_debugger.py,sha256=kqumKsZrJExW95Dh4oInvwFBv1qHfbRIcoKH1WS7E-Q,8008 +torch/ao/quantization/pt2e/duplicate_dq_pass.py,sha256=JHeTrZppUg8G7BJRrwo9g0Def6sbT3Ga170Afl4wvvM,3122 +torch/ao/quantization/pt2e/export_utils.py,sha256=cAA4T0XMiZVSKCTE5bH_1OF8I18vo9uy-xH91sEk5ao,7379 +torch/ao/quantization/pt2e/graph_utils.py,sha256=ZpvkXkNL1jqkvOXXNQ9ymZ5oizOYEfSd6a49AL8M110,3939 +torch/ao/quantization/pt2e/port_metadata_pass.py,sha256=VOEsMFlNqUZXeX__yo45XoohxAg0S0Z0L2x6jfIoR6o,9085 +torch/ao/quantization/pt2e/prepare.py,sha256=2vFJF4AzXiNTmLsMic72CS7C3JZf6FRniOamOPTxBhA,21408 +torch/ao/quantization/pt2e/qat_utils.py,sha256=8kLeZwEfxLeMzvqRIrFuG91KsXhMSK-JYEg8WRbbt4I,35579 +torch/ao/quantization/pt2e/representation/__init__.py,sha256=Srf_T8fMTpFi64ZAgQom0S1A4CLFfKrtwPB_edoMu78,110 +torch/ao/quantization/pt2e/representation/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/pt2e/representation/__pycache__/rewrite.cpython-310.pyc,, +torch/ao/quantization/pt2e/representation/rewrite.py,sha256=HXXHxAmiGApCvLHC2hdh7dCCWpv_a0dAdcXRqay8NUI,27475 +torch/ao/quantization/pt2e/utils.py,sha256=LNk5kOOzmKEjvfzqKkcVtzRv4AcOzGuD5wbdyS7Qgvk,23524 +torch/ao/quantization/qconfig.py,sha256=ZdLW6_3Mh2nxsgLqY5sGlh6KR5sBJ9QzyIHua0Z-SVA,24295 +torch/ao/quantization/qconfig_mapping.py,sha256=-TkN_DakfsPAeqsrfWujebyriBsMXjErcm1SA3Z36ZE,14828 +torch/ao/quantization/quant_type.py,sha256=fY7JDEr_PzBFymxy9fQmnRIFKiQoL2QoKJtxZKSJsoc,760 +torch/ao/quantization/quantization_mappings.py,sha256=UXFwyjLaw8A1SnFzrabKE-x9vU4mt5Y32uJXPpnqhnA,13858 +torch/ao/quantization/quantize.py,sha256=uUeSi-yGtV_SslRtlTF7PEW5szi1jOrLVwhF2C9zq-E,30599 +torch/ao/quantization/quantize_fx.py,sha256=gFH1tsm-hwAA8jAOGtV90RiDwVnAv-trdppIZdzmsgo,32260 +torch/ao/quantization/quantize_jit.py,sha256=Zf0_hkq6JOf4JHWqtlt3N2zLMmVsNptLMWpI8pnyTak,14602 +torch/ao/quantization/quantize_pt2e.py,sha256=Cab37tpcIEgdgisRaENTapT652tifdx5ymULBZwOB8A,9257 +torch/ao/quantization/quantizer/__init__.py,sha256=JCNfOoUz3U_LyPipiVeO4o0LRncgqc6kiuv5lAF2i7E,455 +torch/ao/quantization/quantizer/__pycache__/__init__.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/composable_quantizer.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/embedding_quantizer.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/quantizer.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/utils.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/x86_inductor_quantizer.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/xnnpack_quantizer.cpython-310.pyc,, +torch/ao/quantization/quantizer/__pycache__/xnnpack_quantizer_utils.cpython-310.pyc,, +torch/ao/quantization/quantizer/composable_quantizer.py,sha256=umny-owvuqwfLGyV9CVQYubOIyj9fG_m9WFxcaB0snQ,2993 +torch/ao/quantization/quantizer/embedding_quantizer.py,sha256=8_lNH8T7d_6Yol0MhuQiNbcEejb-LussQhkzauYFzOs,3486 +torch/ao/quantization/quantizer/quantizer.py,sha256=OVrmvlBI3C4v_J4uMrugCjn9K1tD2QuwScoxs1FzaCA,5665 +torch/ao/quantization/quantizer/utils.py,sha256=05e3-3h-wGWWTYUnXWhd7T0oSg2wiaxumGYPV5aPikE,3249 +torch/ao/quantization/quantizer/x86_inductor_quantizer.py,sha256=H5m5VbKnmLTNN_mbu_PUgxP5vPV5kip19u5lCaA7UE4,66106 +torch/ao/quantization/quantizer/xnnpack_quantizer.py,sha256=490shvDMrxGEd820weJ7fSnxN-cnfzw-fXBGTD1K9X0,15747 +torch/ao/quantization/quantizer/xnnpack_quantizer_utils.py,sha256=nC46uG8H2mlv9XWXJMlbMJGDazY9NwnM8OarKNqZGAo,39216 +torch/ao/quantization/stubs.py,sha256=_UcX_udS8DIeTLbaIthvFlyjkgiKYpQmQvUrJ__OTaI,2040 +torch/ao/quantization/utils.py,sha256=Pzs0r5ntGRZKIr8JJ0qH2nclxPmizE9nSTIQnqMNjuo,27911 +torch/autograd/__init__.py,sha256=mPhtKV44b2R60jTsY-NstLXWrMPfaWVGwDvWK_Z6dQM,25094 +torch/autograd/__pycache__/__init__.cpython-310.pyc,, +torch/autograd/__pycache__/anomaly_mode.cpython-310.pyc,, +torch/autograd/__pycache__/forward_ad.cpython-310.pyc,, +torch/autograd/__pycache__/function.cpython-310.pyc,, +torch/autograd/__pycache__/functional.cpython-310.pyc,, +torch/autograd/__pycache__/grad_mode.cpython-310.pyc,, +torch/autograd/__pycache__/gradcheck.cpython-310.pyc,, +torch/autograd/__pycache__/graph.cpython-310.pyc,, +torch/autograd/__pycache__/profiler.cpython-310.pyc,, +torch/autograd/__pycache__/profiler_legacy.cpython-310.pyc,, +torch/autograd/__pycache__/profiler_util.cpython-310.pyc,, +torch/autograd/__pycache__/variable.cpython-310.pyc,, +torch/autograd/_functions/__init__.py,sha256=sdKJj6Dia1vNRSolyufzRKdn5qHkgCBCUI3OBm-PGW0,36 +torch/autograd/_functions/__pycache__/__init__.cpython-310.pyc,, +torch/autograd/_functions/__pycache__/tensor.cpython-310.pyc,, +torch/autograd/_functions/__pycache__/utils.cpython-310.pyc,, +torch/autograd/_functions/tensor.py,sha256=Ak95MjJnF4gQJw9V44srNX-NuU1wdoQtgfhVJl-NLeU,2190 +torch/autograd/_functions/utils.py,sha256=P-0ZjGaBNWvC_GPa97alBYOybjs_GY4XxnbaI_zenwU,2057 +torch/autograd/anomaly_mode.py,sha256=4yheU1PJy-LWTzYjZjPgCCH52tBfyW78C-F6xqmN_8U,4951 +torch/autograd/forward_ad.py,sha256=A15bdSa7H0wprjX06TplPF9EDtH8jgnc-5aUzcc83GU,7681 +torch/autograd/function.py,sha256=gXfOpuWVsqkrEWh_4dsEvACGYFtL1ecGayboCBLb4N4,33273 +torch/autograd/functional.py,sha256=Kh0Pj0jlhjysRQO1neLnEsVspO1b98iK2wJBs0bG47k,52287 +torch/autograd/grad_mode.py,sha256=3mFoSn9z6zYcD0fCIVmiv8Z99veN36XxmASbBlTx9Kc,13001 +torch/autograd/gradcheck.py,sha256=3BUlrAMr-6iX8knGQw8kFKxxTyjnanw4JJBYW2MVeUY,90652 +torch/autograd/graph.py,sha256=cSAZCneLQptBeem6ZsPyCkPEDQXoJwiO5o_Bjpy8yE8,29979 +torch/autograd/profiler.py,sha256=gJXcbJk_ZbJzG4fY8l7W0W_oPB1RMPtxzvlEmKBcBWw,47325 +torch/autograd/profiler_legacy.py,sha256=88GAar3tnGYc4mz-R9C9CUHUV8xtRZXVPQEvqv6om38,11544 +torch/autograd/profiler_util.py,sha256=STflJmKyk-fpC3Qfpr9Zit-yWxqDEkzUYO7v7vNPOzo,40013 +torch/autograd/variable.py,sha256=N0cAiO8ZPZ6rtRtU-hJYH_6NVt-zun16CmO_KrZV7-0,391 +torch/backends/__init__.py,sha256=mhmRgY-atauW5wgnwUwvTHPqzoZaxKqFjBLFFKuLYpA,1751 +torch/backends/__pycache__/__init__.cpython-310.pyc,, +torch/backends/_coreml/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/backends/_coreml/__pycache__/__init__.cpython-310.pyc,, +torch/backends/_coreml/__pycache__/preprocess.cpython-310.pyc,, +torch/backends/_coreml/preprocess.py,sha256=yQgZAYZBg5uzL-kkZSlVAT7pxsZEzJfYfRYESDlz_gc,4248 +torch/backends/_nnapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/backends/_nnapi/__pycache__/__init__.cpython-310.pyc,, +torch/backends/_nnapi/__pycache__/prepare.cpython-310.pyc,, +torch/backends/_nnapi/__pycache__/serializer.cpython-310.pyc,, +torch/backends/_nnapi/prepare.py,sha256=-_BIyTSWh4yruonhQyYiSH8BdIoA-qzFn12cxSJclzU,6565 +torch/backends/_nnapi/serializer.py,sha256=RjvdYPHnq_SLZneaSk8tT1CO9iYSjIZKjhH2FBZX9mQ,82935 +torch/backends/cpu/__init__.py,sha256=mSjDKAku8EPXNHxNMgHgli8gnAuX-4Kene95aqF5w2o,299 +torch/backends/cpu/__pycache__/__init__.cpython-310.pyc,, +torch/backends/cuda/__init__.py,sha256=X9TNSWVVXyjt8i5tVu-A6MUwBF8QboHMROxZKUpjrgA,16448 +torch/backends/cuda/__pycache__/__init__.cpython-310.pyc,, +torch/backends/cudnn/__init__.py,sha256=bMwIEv7Nb-4-qLEkQ87AuIBqH6Wv7k-Tpq2U2G-kC0s,6603 +torch/backends/cudnn/__pycache__/__init__.cpython-310.pyc,, +torch/backends/cudnn/__pycache__/rnn.cpython-310.pyc,, +torch/backends/cudnn/rnn.py,sha256=FO3LenxuwfEy6KVlQAs8SmAxrOLrMr8kNxJ7PKf2kRE,2061 +torch/backends/cusparselt/__init__.py,sha256=qD-zoJvCAVLnlc65BqQt0XRSPgpmeF14hzxA4pz68vM,820 +torch/backends/cusparselt/__pycache__/__init__.cpython-310.pyc,, +torch/backends/mha/__init__.py,sha256=6cNW5xOJZ1jTo_RN_rrXk8orFkhxMi4utP--JYtw7Zo,716 +torch/backends/mha/__pycache__/__init__.cpython-310.pyc,, +torch/backends/mkl/__init__.py,sha256=IxRqKwR_STL0QASEkBXrVjt-t-mS4SuOZB2ku5eluY8,1782 +torch/backends/mkl/__pycache__/__init__.cpython-310.pyc,, +torch/backends/mkldnn/__init__.py,sha256=iyBgJBnZMEKlcMYgYLIFcM7J2ABda4k6FHuN8uq8oHE,3157 +torch/backends/mkldnn/__pycache__/__init__.cpython-310.pyc,, +torch/backends/mps/__init__.py,sha256=pu9IZfa4iMvb1wQzb2HcOfVn08dbVQj3OTAw436I2lc,1642 +torch/backends/mps/__pycache__/__init__.cpython-310.pyc,, +torch/backends/nnpack/__init__.py,sha256=6cApdM4SVAbns4TuVAuFI9sXskhu7piYVsmP4rqpdOA,837 +torch/backends/nnpack/__pycache__/__init__.cpython-310.pyc,, +torch/backends/openmp/__init__.py,sha256=h6ebEMpGQavTuGZ3eJVK4JMUMMUgTYcrmvHxh_Fcgy8,157 +torch/backends/openmp/__pycache__/__init__.cpython-310.pyc,, +torch/backends/opt_einsum/__init__.py,sha256=AQS42SAjIIkRiV1X_LB4kouLxcnZDg3g6lnDIa_bBE4,3452 +torch/backends/opt_einsum/__pycache__/__init__.cpython-310.pyc,, +torch/backends/quantized/__init__.py,sha256=nqfl7kWwLy4yRxNWTln5RiailNpFy2mIvqNaJP41_P0,1885 +torch/backends/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/backends/xeon/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/backends/xeon/__pycache__/__init__.cpython-310.pyc,, +torch/backends/xeon/__pycache__/run_cpu.cpython-310.pyc,, +torch/backends/xeon/run_cpu.py,sha256=kahC0s6XNlzJxtAKbV8rrYZgyT0DnDn24jeD8SHTBOE,37567 +torch/backends/xnnpack/__init__.py,sha256=BNL7CNyTlaTNxu4JVDE8VZgiu16mRO6JpZeQ0Ns3Nr8,702 +torch/backends/xnnpack/__pycache__/__init__.cpython-310.pyc,, +torch/bin/protoc,sha256=M5CHOy2lbBOXrew3KPFYjFHhgvFbEj07TU8kjTHB9No,5330888 +torch/bin/protoc-3.13.0.0,sha256=M5CHOy2lbBOXrew3KPFYjFHhgvFbEj07TU8kjTHB9No,5330888 +torch/bin/torch_shm_manager,sha256=Dg3zQDukdI85K4zF61ZdFTtO3lyTh0L1fxVl9SQM8Ws,80456 +torch/compiler/__init__.py,sha256=f3Qe5Qxk0U8TBEQeWIJ2WT7fHIWSAYg8Zq8MGVlXXtg,12159 +torch/compiler/__pycache__/__init__.cpython-310.pyc,, +torch/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/contrib/__pycache__/__init__.cpython-310.pyc,, +torch/contrib/__pycache__/_tensorboard_vis.cpython-310.pyc,, +torch/contrib/_tensorboard_vis.py,sha256=qACmgpnhedP3UNkhkQ4wWIFRIJD6RJTiiXBE2JMi1Pw,5943 +torch/cpu/__init__.py,sha256=BQEw_kCttb-VCcuJnG3A0jAPWmaPZfxi5_b8VoHPDU8,4379 +torch/cpu/__pycache__/__init__.cpython-310.pyc,, +torch/cpu/amp/__init__.py,sha256=E_HPtKk9IO3AW-gyneT7K5wtjnFZHx9GkvH424D_kb8,72 +torch/cpu/amp/__pycache__/__init__.cpython-310.pyc,, +torch/cpu/amp/__pycache__/autocast_mode.cpython-310.pyc,, +torch/cpu/amp/__pycache__/grad_scaler.cpython-310.pyc,, +torch/cpu/amp/autocast_mode.py,sha256=K5VNaqoZMb_ZvyxVYvqmcddx2FNNXWJ3QWoyrHYYhI0,1521 +torch/cpu/amp/grad_scaler.py,sha256=hT5iBOSV285sW4X7s00Mfg7Pi_eJ-sKCS5BT1qSrz-U,958 +torch/cuda/__init__.py,sha256=ynzdmnZrfsVDP60QA7DgyB90QH2x9FLFgV8loR52ODs,53831 +torch/cuda/__pycache__/__init__.cpython-310.pyc,, +torch/cuda/__pycache__/_gpu_trace.cpython-310.pyc,, +torch/cuda/__pycache__/_memory_viz.cpython-310.pyc,, +torch/cuda/__pycache__/_sanitizer.cpython-310.pyc,, +torch/cuda/__pycache__/_utils.cpython-310.pyc,, +torch/cuda/__pycache__/comm.cpython-310.pyc,, +torch/cuda/__pycache__/error.cpython-310.pyc,, +torch/cuda/__pycache__/gds.cpython-310.pyc,, +torch/cuda/__pycache__/graphs.cpython-310.pyc,, +torch/cuda/__pycache__/jiterator.cpython-310.pyc,, +torch/cuda/__pycache__/memory.cpython-310.pyc,, +torch/cuda/__pycache__/nccl.cpython-310.pyc,, +torch/cuda/__pycache__/nvtx.cpython-310.pyc,, +torch/cuda/__pycache__/profiler.cpython-310.pyc,, +torch/cuda/__pycache__/random.cpython-310.pyc,, +torch/cuda/__pycache__/sparse.cpython-310.pyc,, +torch/cuda/__pycache__/streams.cpython-310.pyc,, +torch/cuda/__pycache__/tunable.cpython-310.pyc,, +torch/cuda/_gpu_trace.py,sha256=Xoewdq6BtoNF5PI-wKafc3PRKHb2Na1MHbgQi-Xr8YI,2383 +torch/cuda/_memory_viz.py,sha256=VN33YC95l1RAswOG_pyK4KmmLDT0xaf64sYxmlu53kg,24931 +torch/cuda/_sanitizer.py,sha256=JbgRvhkaew7ter3Z71YmtHQUbSJyyQ6T3Gb5PYBcC2Y,22429 +torch/cuda/_utils.py,sha256=_lWqK6eEa4-odb_CBYKyWjUY-iVr3vZwWHNqeTS9mVM,1597 +torch/cuda/amp/__init__.py,sha256=yWSy9txcjVNn_uuZjKdCodbaY6LgX0hok9LujNSzXUY,267 +torch/cuda/amp/__pycache__/__init__.cpython-310.pyc,, +torch/cuda/amp/__pycache__/autocast_mode.cpython-310.pyc,, +torch/cuda/amp/__pycache__/common.cpython-310.pyc,, +torch/cuda/amp/__pycache__/grad_scaler.cpython-310.pyc,, +torch/cuda/amp/autocast_mode.py,sha256=lQjFh0vpiPbm-7SnM1EwhMGNJs8Xg4F_WGD1ydybnL8,2819 +torch/cuda/amp/common.py,sha256=rPnt6OLsROsahVbMyX5w5Fl3R4G6VUiSl5uioKuDEFg,230 +torch/cuda/amp/grad_scaler.py,sha256=WTG3BGVvo4ifizDYTNTq18lRp_CkqVbYS7Kv5N9n6ag,1073 +torch/cuda/comm.py,sha256=DjabyiIf7WmKgD84HmP4_XYd3Cau_7X_E7dcW1UAUjg,344 +torch/cuda/error.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/cuda/gds.py,sha256=A9c1tySQsiSWfYv-d9hNbfM4w0IEgHCnIZX7t8HYhhc,4438 +torch/cuda/graphs.py,sha256=sHTC_VzGvRZFzEkZC0kl1yy435U6kFdR9H7IKlFy_Nk,22196 +torch/cuda/jiterator.py,sha256=PepIcsj9xyvhuHbG8g6fVoPGg1c-W8fPPsxLd6BSRG8,6834 +torch/cuda/memory.py,sha256=hQ4xqHxFwehj5MyN0eFf0mmjgA5KmUelNRiJE64uE50,39869 +torch/cuda/nccl.py,sha256=Z_1NOA-o9-pWG6KwEDMgIhBtXOE7UtPdEX94cLP8xHU,4550 +torch/cuda/nvtx.py,sha256=l7WU-1dih-ABd1HJmF2-WE7FICwRia7EfVNq8Gjmb9Q,2443 +torch/cuda/profiler.py,sha256=pf33aSDw7WVqCTuI6rWbV3IlU5mAuUx46-vHRkqHsCw,2401 +torch/cuda/random.py,sha256=bwC9zIqQRINcLiS8DUiDZ8Bf-01VrN9ZdbKvXa_rIjw,5287 +torch/cuda/sparse.py,sha256=H912FRisikGM9SSVDQQq-YX5TNIilBYBZs1AwmIv3GQ,67 +torch/cuda/streams.py,sha256=jA1XtBd2nMQOFLmKU-BP4sES2X4h7-A0ivl5vPxN0gQ,8414 +torch/cuda/tunable.py,sha256=4GQza0imWhbjbMVI7_rxZSncBDmYK79mWEsQakRdW7A,10211 +torch/distributed/__init__.py,sha256=vVOIjpK35Y1QfHFTAeoSjfsg_heKM-n79pVepAy4Cyk,4965 +torch/distributed/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/__pycache__/_checkpointable.cpython-310.pyc,, +torch/distributed/__pycache__/_composable_state.cpython-310.pyc,, +torch/distributed/__pycache__/_functional_collectives.cpython-310.pyc,, +torch/distributed/__pycache__/_functional_collectives_impl.cpython-310.pyc,, +torch/distributed/__pycache__/_state_dict_utils.cpython-310.pyc,, +torch/distributed/__pycache__/argparse_util.cpython-310.pyc,, +torch/distributed/__pycache__/c10d_logger.cpython-310.pyc,, +torch/distributed/__pycache__/collective_utils.cpython-310.pyc,, +torch/distributed/__pycache__/constants.cpython-310.pyc,, +torch/distributed/__pycache__/device_mesh.cpython-310.pyc,, +torch/distributed/__pycache__/distributed_c10d.cpython-310.pyc,, +torch/distributed/__pycache__/launch.cpython-310.pyc,, +torch/distributed/__pycache__/logging_handlers.cpython-310.pyc,, +torch/distributed/__pycache__/remote_device.cpython-310.pyc,, +torch/distributed/__pycache__/rendezvous.cpython-310.pyc,, +torch/distributed/__pycache__/run.cpython-310.pyc,, +torch/distributed/__pycache__/utils.cpython-310.pyc,, +torch/distributed/_checkpointable.py,sha256=QWmnj_7x_MnnCib0kv-jT82E5grgH2C_gcVfrw-wYJQ,1286 +torch/distributed/_composable/__init__.py,sha256=m_X5LTwtmai9gMvopIlzj-N32YkCV1aTokhVNgUf8LU,162 +torch/distributed/_composable/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_composable/__pycache__/checkpoint_activation.cpython-310.pyc,, +torch/distributed/_composable/__pycache__/contract.cpython-310.pyc,, +torch/distributed/_composable/__pycache__/fully_shard.cpython-310.pyc,, +torch/distributed/_composable/__pycache__/replicate.cpython-310.pyc,, +torch/distributed/_composable/checkpoint_activation.py,sha256=vs7EcKU-B4Zpvo9teGloJ2RkaEkWUS_mrA09zcwv-L4,4550 +torch/distributed/_composable/contract.py,sha256=1gDWea7T7S9KvAXCcQNu4EBn0_n-GoMVTusmUbZ64vI,9546 +torch/distributed/_composable/fsdp/__init__.py,sha256=sIx93iGzCKEai-Pk4xlLrxxcFs6tCBtzuYdhdmf9ZfY,156 +torch/distributed/_composable/fsdp/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_api.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_collectives.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_common.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_init.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_param.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_param_group.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/_fsdp_state.cpython-310.pyc,, +torch/distributed/_composable/fsdp/__pycache__/fully_shard.cpython-310.pyc,, +torch/distributed/_composable/fsdp/_fsdp_api.py,sha256=AUD1HMRxW8PYfPqNt131KG9F-u2FzMJIkdAWIuuluSU,3636 +torch/distributed/_composable/fsdp/_fsdp_collectives.py,sha256=-4XF7wN78f1_3K8fgQMwtHnb_ZhBkQY5HonOPGfTz4o,19588 +torch/distributed/_composable/fsdp/_fsdp_common.py,sha256=npQLECYhN6THt80X3ZOhK3OcZ5p1uIfflzofPBlfjBw,4946 +torch/distributed/_composable/fsdp/_fsdp_init.py,sha256=QGqb3M-NuBEXcdbukTPOqnWzMpWANMI-4Z8RP1J2MTQ,6776 +torch/distributed/_composable/fsdp/_fsdp_param.py,sha256=ZVCf33omYjWHCoCVJ2jgBWwvOd1fhjcIKHcdaoxTh5s,34797 +torch/distributed/_composable/fsdp/_fsdp_param_group.py,sha256=2Rsz7qPmJ2pnC7cp-E2Pgl0stEN2MmL6Q85zsq-Nc_o,27587 +torch/distributed/_composable/fsdp/_fsdp_state.py,sha256=psOc8JH676WUDE5LUq2KcnLopDZCDXiOE6cMCCZykPw,16767 +torch/distributed/_composable/fsdp/fully_shard.py,sha256=jr5Auins6E_UDqLO1CNYEy62Qa4Tf7aM6YXcaOBvS8Q,19543 +torch/distributed/_composable/fully_shard.py,sha256=07Oaaje8onf65CAvKgh27NEfCEeCZfcxfuAJLayT6g0,5225 +torch/distributed/_composable/replicate.py,sha256=462oW2YaYxa7hQN8zwBOJd1n23tqPT6TD-ZlWJDOBlk,9333 +torch/distributed/_composable_state.py,sha256=LV7Lb610CsRZ7SxFp7mLz-5K0cwu7LP5aKil_ouDKsg,1124 +torch/distributed/_functional_collectives.py,sha256=AkHnZF4xUo11HQSVNbi2y4Gqpq3ew7GR9AG2zp4dDic,42330 +torch/distributed/_functional_collectives_impl.py,sha256=kpPK6wsdncY-w-TkPVJezWBEjGfUYMPgtyB3T4dM37o,3229 +torch/distributed/_shard/__init__.py,sha256=9mrjpti4iifWb8jnGoZypHH0ovdZg3A2KQnxg6BACKI,87 +torch/distributed/_shard/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/_utils.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/api.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/common_op_utils.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/metadata.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/op_registry_utils.cpython-310.pyc,, +torch/distributed/_shard/__pycache__/sharder.cpython-310.pyc,, +torch/distributed/_shard/_utils.py,sha256=QQLpCihQ51J7kvRwQ2JHVK2FrCHqJLu70jYflQ8WbLE,1061 +torch/distributed/_shard/api.py,sha256=A1hRT4iD5mEqqFScDl6crTy_IHLGf3yyl-C80-83THQ,12395 +torch/distributed/_shard/checkpoint/__init__.py,sha256=kILV09-2Glss17itgmv1SbBq7hp2Y7HLUugCZRGYCFE,584 +torch/distributed/_shard/checkpoint/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/common_op_utils.py,sha256=i1pNW63w07q_g1TWw-kFxgQk60mQhLYbuYjZXLg_T5k,2179 +torch/distributed/_shard/metadata.py,sha256=fyCWeU2WqC1JHJQTfcBsIrJG8zWb4HWBYeUZecdCb_Y,2221 +torch/distributed/_shard/op_registry_utils.py,sha256=aK7aBTzgR298AvchxXB1VNdeIT6UCO5emnOoOadjM-I,1031 +torch/distributed/_shard/sharded_optim/__init__.py,sha256=f9x5lss59UIuzBIGCzqKCIieVLwkX0IsWAdo7vcIpm8,1849 +torch/distributed/_shard/sharded_optim/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharded_optim/__pycache__/api.cpython-310.pyc,, +torch/distributed/_shard/sharded_optim/api.py,sha256=O_tb_HJ8h_TyHUV-NAN4K1YVaNxAVGqO5F_y-CLVceU,4245 +torch/distributed/_shard/sharded_tensor/__init__.py,sha256=lOJn8hXNON9IjrKWfIwCzwln8c_nngOhlvAXMm2Ffoc,19260 +torch/distributed/_shard/sharded_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/api.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/logger.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/logging_handlers.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/metadata.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/reshard.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/shard.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/__pycache__/utils.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__init__.py,sha256=NW4TXyw5RGFCbWfikCuxxCOrWuVIUovX06jI8PgcqNM,498 +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/_common.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/binary_cmp.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/init.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/misc_ops.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/__pycache__/tensor_ops.cpython-310.pyc,, +torch/distributed/_shard/sharded_tensor/_ops/_common.py,sha256=QtHsy0BJrgpmgLTvLNtOAQ0E-EK7KW6nQwKd3KK78k0,4196 +torch/distributed/_shard/sharded_tensor/_ops/binary_cmp.py,sha256=CkiqLbNQRCughKRkxJJ-AiNavPQ0zlysR6cpjDD_uU8,2754 +torch/distributed/_shard/sharded_tensor/_ops/init.py,sha256=Ny1FXXrRJ4xWIHae7Hk-66YLZTHykllEdVPcuL4akFs,5486 +torch/distributed/_shard/sharded_tensor/_ops/misc_ops.py,sha256=lp_SXuhFUagN_5bxfmZVeqY0SCMhgnGeLGxLq38qvsg,497 +torch/distributed/_shard/sharded_tensor/_ops/tensor_ops.py,sha256=p2jSxGb29roANyx527ZnztUZyZQrvZxfumzxg3IFrsk,7728 +torch/distributed/_shard/sharded_tensor/api.py,sha256=WGb4hvo2YuS8AEw1iPW1B6BVkVyurYfyG6JL648f8CQ,52106 +torch/distributed/_shard/sharded_tensor/logger.py,sha256=OtID66csbTNKNVB307nS6CAL5EFSCd00JVkMWP8smic,1115 +torch/distributed/_shard/sharded_tensor/logging_handlers.py,sha256=atN4o8Q5hENaz5nAt_oApbO4JLkMr4XAz9zFvGQMKwU,389 +torch/distributed/_shard/sharded_tensor/metadata.py,sha256=PccEPCTMsZ8UTBKpVfeVtgk_LdEKp-4t-jXrd515Agw,3022 +torch/distributed/_shard/sharded_tensor/reshard.py,sha256=RTMZxgotSUBWMLjIgSeb3dRQ2GQj_SHFjVkFfFnBJRc,10805 +torch/distributed/_shard/sharded_tensor/shard.py,sha256=iwuIKAHnqoHNPrHevLop0QYOv5qSNezIF-DIUa3Lrpo,2393 +torch/distributed/_shard/sharded_tensor/utils.py,sha256=34tYEie-45nevMSlzbWM6Y76OaWP_wV3r0JViVb9x1I,9493 +torch/distributed/_shard/sharder.py,sha256=nErlvDuMU29IUB6oWB295FaXYj8H42FuDQJXdepvQ1E,901 +torch/distributed/_shard/sharding_plan/__init__.py,sha256=9w-j8bY8VJ0dGT_ueJR2yTfzuech180Xkq0XELn1tKs,47 +torch/distributed/_shard/sharding_plan/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharding_plan/__pycache__/api.cpython-310.pyc,, +torch/distributed/_shard/sharding_plan/api.py,sha256=VxE8ZHg0d3Oqu-ycwg2KJdXb9pCI2q2iq69fL_XbI8M,3661 +torch/distributed/_shard/sharding_spec/__init__.py,sha256=hih75sKTSXPxdyUOS24Vl6K3Q3ht-74l33uAjM4I2Kw,291 +torch/distributed/_shard/sharding_spec/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/__pycache__/_internals.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/__pycache__/api.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/__pycache__/chunk_sharding_spec.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/_internals.py,sha256=uXJOMnwptKkmij39Rzol6Eeqnjv0PqP2qKW5Ut5Y-xc,7995 +torch/distributed/_shard/sharding_spec/api.py,sha256=aoDb-7KMTfJmZ03rlhlwZyv4mhDurqmljdaVcGtMxW8,9833 +torch/distributed/_shard/sharding_spec/chunk_sharding_spec.py,sha256=HKmEpZu6rF48bK7W1L4Ice1bbZyK86UFVYCXE_NmlJQ,8945 +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/__pycache__/_common.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/__pycache__/embedding.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/__pycache__/embedding_bag.cpython-310.pyc,, +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/_common.py,sha256=5I4n8vJ2TEFRQnyGglsgiI4IMhWJtiu0e_7Z7fiU0bU,13029 +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/embedding.py,sha256=vCSV8WXu8DSO9BQon8hNE8mf4xiqbTAvNv6o_3jpj-U,11209 +torch/distributed/_shard/sharding_spec/chunk_sharding_spec_ops/embedding_bag.py,sha256=MbwwHTcGrkbIrzEKoEZ7HWLY9N4fOiO6GMHjVmRQhZU,18325 +torch/distributed/_sharded_tensor/__init__.py,sha256=FBxS-XUyzsCTAVl6aOpNcTHIiRloihL6-v-KwT7Uz8I,615 +torch/distributed/_sharded_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_sharding_spec/__init__.py,sha256=PR7htIt_o1lIdWq_dvH3aXMHA22fj59dOpLT15vx3zE,646 +torch/distributed/_sharding_spec/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_state_dict_utils.py,sha256=Wq6sLxsF7nEvylCjnh52DWp8VluCSoqQJKCUlplHeG0,26396 +torch/distributed/_symmetric_memory/__init__.py,sha256=T6WEG28hkqdnJZa6M9eLMS5q2Y_xapWCONbci4RaD9o,34243 +torch/distributed/_symmetric_memory/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_tensor/__init__.py,sha256=ugiA22sfcEubBtHDk67pE4mj5Vmp0e4yWVStwOjJaIw,968 +torch/distributed/_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_tensor/__pycache__/api.cpython-310.pyc,, +torch/distributed/_tensor/__pycache__/placement_types.cpython-310.pyc,, +torch/distributed/_tensor/api.py,sha256=ErftfbUVGCDavz7FfrQ0rEVlfYC0XmcTiWyjb4IZ1BY,300 +torch/distributed/_tensor/placement_types.py,sha256=pvi_dHBNCsZl1C-tAM9sdfYOiKb0Jwr3SqyAWMKgfo8,384 +torch/distributed/_tools/__init__.py,sha256=izvlaus-_E9m9rCMU8rp5UDqw55LGkTvQeL0ULqx5Xs,208 +torch/distributed/_tools/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/_tools/__pycache__/fsdp2_mem_tracker.cpython-310.pyc,, +torch/distributed/_tools/__pycache__/mem_tracker.cpython-310.pyc,, +torch/distributed/_tools/__pycache__/memory_tracker.cpython-310.pyc,, +torch/distributed/_tools/__pycache__/mod_tracker.cpython-310.pyc,, +torch/distributed/_tools/__pycache__/runtime_estimator.cpython-310.pyc,, +torch/distributed/_tools/fsdp2_mem_tracker.py,sha256=RS_CzFYBzKCjNpXRRLtI4xHQlQ6uLN0yvP9x075oE_A,26016 +torch/distributed/_tools/mem_tracker.py,sha256=6of80n4yx0PM1VFwLjs0F06lb485ltu7rcffrs8ZT_4,42110 +torch/distributed/_tools/memory_tracker.py,sha256=923bbeT4E8kmXKbx_-wlm0EAq6O6-GQNYiTvIQfh1EQ,11492 +torch/distributed/_tools/mod_tracker.py,sha256=avAeIQIZWBuHv0qGJs1CZRSSNGbApMLrZcU7GVfQZmA,9542 +torch/distributed/_tools/runtime_estimator.py,sha256=0Il5iFBRuBVf-H5c7c57-Kk5tudfpaQ0ch6Fm330FQY,21168 +torch/distributed/algorithms/__init__.py,sha256=e5HJMsdSDkPOxNWYUh9YvO26GDODG5PBedTP2-Y16Nw,43 +torch/distributed/algorithms/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/__pycache__/join.cpython-310.pyc,, +torch/distributed/algorithms/_checkpoint/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/algorithms/_checkpoint/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/_checkpoint/__pycache__/checkpoint_wrapper.cpython-310.pyc,, +torch/distributed/algorithms/_checkpoint/checkpoint_wrapper.py,sha256=1TrZ35GgGezHFy0Wry6bpUt_NySeeoFvLx-MPOLu72w,12285 +torch/distributed/algorithms/_comm_hooks/__init__.py,sha256=LkI4VBMJ6_1KGG3Nz0bPxGFOO-ZjfBj-38FNROlJt8Q,131 +torch/distributed/algorithms/_comm_hooks/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/_comm_hooks/__pycache__/default_hooks.cpython-310.pyc,, +torch/distributed/algorithms/_comm_hooks/default_hooks.py,sha256=6C4jVnO8ncKtSUg8-RKTCSKbNpy1rtOUjS1QSbbML-E,7643 +torch/distributed/algorithms/_optimizer_overlap/__init__.py,sha256=zuKlfE0DcQCZm0av9HrJZfXH6R5AzV2K3xrEAAgoVsk,52 +torch/distributed/algorithms/_optimizer_overlap/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/_optimizer_overlap/__pycache__/optimizer_overlap.cpython-310.pyc,, +torch/distributed/algorithms/_optimizer_overlap/optimizer_overlap.py,sha256=HqbCTVDhQh01ATPcOVVMDOzQC-yz9ocpYwv6w2qyWBY,3783 +torch/distributed/algorithms/_quantization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/algorithms/_quantization/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/_quantization/__pycache__/quantization.cpython-310.pyc,, +torch/distributed/algorithms/_quantization/quantization.py,sha256=2tNy-iPauktflIPNYk401kaO8DEqpAFb_ehWc3F0y80,5610 +torch/distributed/algorithms/ddp_comm_hooks/__init__.py,sha256=dJBDKPeI4qsoX1s2brunjzHedyVGpgbjxL7DT7tyREQ,3597 +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/ddp_zero_hook.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/debugging_hooks.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/default_hooks.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/mixed_precision_hooks.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/optimizer_overlap_hooks.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/post_localSGD_hook.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/powerSGD_hook.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/__pycache__/quantization_hooks.cpython-310.pyc,, +torch/distributed/algorithms/ddp_comm_hooks/ddp_zero_hook.py,sha256=1mwYaadpSNwaPf2dDuasZLff4xjZ69uMJCBQ_wk-SqA,19646 +torch/distributed/algorithms/ddp_comm_hooks/debugging_hooks.py,sha256=X49n0X3TnX650I-DeU-XtL_ssKjdYsn0RYyhB2Ndcms,1115 +torch/distributed/algorithms/ddp_comm_hooks/default_hooks.py,sha256=xOprmHXwaS7-9tWb6KH4z6Mus6MYnFsc7SnEz0JSHPQ,8624 +torch/distributed/algorithms/ddp_comm_hooks/mixed_precision_hooks.py,sha256=stSL1UqCAPWibwSnkT-uN_PDw7W3jHhUdcNqmqtSLpQ,3405 +torch/distributed/algorithms/ddp_comm_hooks/optimizer_overlap_hooks.py,sha256=26bwQbtQkC7k0deL7LeXvj2EzC7LeDN0gPDJnJxAAHc,6123 +torch/distributed/algorithms/ddp_comm_hooks/post_localSGD_hook.py,sha256=QstL3X3P0c5WQa5pd9QL-Hlx_hy5znrZcfYd048PF_Q,5124 +torch/distributed/algorithms/ddp_comm_hooks/powerSGD_hook.py,sha256=oU2woy5LwH7_80mQ3zAAzLc6sHyh500w6kpTwszUnFE,40339 +torch/distributed/algorithms/ddp_comm_hooks/quantization_hooks.py,sha256=Ce1gjdaZljeWe5PmiVIhn4g3B0_jeKOghQRGHxapJcc,8224 +torch/distributed/algorithms/join.py,sha256=x-4jxBLteF2mVG3OWehBvuB7L-QXsAvntuam5M1C2BQ,13406 +torch/distributed/algorithms/model_averaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/algorithms/model_averaging/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/algorithms/model_averaging/__pycache__/averagers.cpython-310.pyc,, +torch/distributed/algorithms/model_averaging/__pycache__/hierarchical_model_averager.cpython-310.pyc,, +torch/distributed/algorithms/model_averaging/__pycache__/utils.cpython-310.pyc,, +torch/distributed/algorithms/model_averaging/averagers.py,sha256=1DZVt4W335zYEVX0Z-9PeIaKfdNUzEJAPtym69LcX60,5236 +torch/distributed/algorithms/model_averaging/hierarchical_model_averager.py,sha256=Bj9F7XdxLaEdE09donzh4Hv4RMGlyBUIRSiOdTUfy7w,9773 +torch/distributed/algorithms/model_averaging/utils.py,sha256=LxGaOuf0VOcUEQ80OeSqwAH4iUSq6O3uhWDa8vmV2DA,3122 +torch/distributed/argparse_util.py,sha256=NFxjw2asYt06aFffcu6rhJpC3FFthwKLu9wvWEm_qQU,3903 +torch/distributed/autograd/__init__.py,sha256=XZ0DochOG4HofBislMoYDkesBinL1HCCSyPRlOotv3I,1647 +torch/distributed/autograd/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/c10d_logger.py,sha256=KhW28Dx3Yc81VeIu7GeWNE0NnrhqwVbBp_uFL9UKMB8,3338 +torch/distributed/checkpoint/__init__.py,sha256=tFaRRe6lAKQZHoIGHvNGf7iJJHDmRwlNu1FzHKYp8Qk,594 +torch/distributed/checkpoint/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_checkpointer.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_dedup_save_plans.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_dedup_tensors.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_fsspec_filesystem.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_nested_dict.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_sharded_tensor_utils.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_storage_utils.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_traverse.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/_version.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/api.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/default_planner.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/filesystem.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/format_utils.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/logger.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/logging_handlers.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/metadata.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/optimizer.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/planner.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/planner_helpers.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/resharding.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/staging.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/state_dict.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/state_dict_loader.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/state_dict_saver.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/stateful.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/storage.cpython-310.pyc,, +torch/distributed/checkpoint/__pycache__/utils.cpython-310.pyc,, +torch/distributed/checkpoint/_checkpointer.py,sha256=PMd53KDgEEwnaWclZyCPcNHXEO001R7ZpqH7zlqHDOs,3668 +torch/distributed/checkpoint/_dedup_save_plans.py,sha256=vxGmetrI_DpK9WrotSVPmARCfoxt-Ctzh0kFcPp3pkE,2338 +torch/distributed/checkpoint/_dedup_tensors.py,sha256=_xzkanVCgegSCb7qp0kct8OiIevEsHqSaP1SdFL2Iro,2003 +torch/distributed/checkpoint/_fsspec_filesystem.py,sha256=XTEEKe2ZGJWMpdfhqZa0AX_MGhb1VX7tyodfNr6UPUY,4247 +torch/distributed/checkpoint/_nested_dict.py,sha256=Wmy0VhjeBAYLvjFIe0L-Oq6vGcVdL38PNNmA8ibvqCQ,2304 +torch/distributed/checkpoint/_sharded_tensor_utils.py,sha256=HZOdtN7SH2D79rgdm2g7BDEacEwTxkAJWZhNtSVv6O0,4144 +torch/distributed/checkpoint/_storage_utils.py,sha256=n_KJFyQ18Vr1moCfiWznJ57u_IFSHnFScpJgewjsNoY,1421 +torch/distributed/checkpoint/_traverse.py,sha256=UayHOKmtxmitQLIDHU6Mss9a10bf1oak-V5QAE_BAAc,6907 +torch/distributed/checkpoint/_version.py,sha256=SVGnUOT2LrrOFXySPqUstbulvBXbaUFSyx4e-hEkcH4,122 +torch/distributed/checkpoint/api.py,sha256=rgvp3qykv_l7v42mbyMDO7bfyn7zFHwIOmrPcBe8Nek,1450 +torch/distributed/checkpoint/default_planner.py,sha256=dEfWrluOuJoyHrKCdYeWRmckL4F2v3zUp1uB5Dz4gS8,20201 +torch/distributed/checkpoint/filesystem.py,sha256=F9-jIjCarxo-8xf1dn9wAMxf3B41bCNsKZqB-20B6Ds,25810 +torch/distributed/checkpoint/format_utils.py,sha256=5jc5nCxJ4GgxamqyuEABqDxujKgXhYe6rb2sVVsN0qs,10246 +torch/distributed/checkpoint/logger.py,sha256=m7iBYH2UBQxVzpLwxt92TVzk7Gisqi1Swp9fxXhkLPY,3211 +torch/distributed/checkpoint/logging_handlers.py,sha256=n2LWmU-hbTZ5jJwEKBKkZJiqjzIYaAkCBdTbc_XhAi8,244 +torch/distributed/checkpoint/metadata.py,sha256=qzkwhLpdwdKiRFzmdsliuBkwFq0taC9Hr2L-V5QW1RY,5529 +torch/distributed/checkpoint/optimizer.py,sha256=cfBQeDwFzp76YuORUjQ3hOnGYchg4Kt-suNsJfxc9bE,13144 +torch/distributed/checkpoint/planner.py,sha256=JdyJea3ZA5ckhTzSpbhq5cyIPZZVo3efApY3u3tPBbY,14908 +torch/distributed/checkpoint/planner_helpers.py,sha256=N76M-52IdUFcr477ZRWzHNqvVaaoMTnvTFEFTAHjsMY,12968 +torch/distributed/checkpoint/resharding.py,sha256=1pM5fwlGSOWyxrEUTd9SnVT7DzAXNUMHKmxv6x7ImrQ,2326 +torch/distributed/checkpoint/staging.py,sha256=weyJOXgKN6jYGFjo_oIaSLMpx_tswc02IUvlfXthOrI,4899 +torch/distributed/checkpoint/state_dict.py,sha256=oTjaKY6m1-j2-TRToinGQ08KqnfzHsggTGl_JMKzQFI,52332 +torch/distributed/checkpoint/state_dict_loader.py,sha256=9f5WfcDfa2vyxBZeZh5XNF8y_M0Wz86ZB8VZVYExgpU,11627 +torch/distributed/checkpoint/state_dict_saver.py,sha256=IrDG5pZ2PkGSQqoUwUmtmg6jK3hpt0dhINZcApNud0U,12683 +torch/distributed/checkpoint/stateful.py,sha256=wr5Iby0bbQyvnS6ONSZC-B74Ay4X32xypTadZC2sFKw,1067 +torch/distributed/checkpoint/storage.py,sha256=njO7cOf6cEywBh4F1fq_HV3vWFyTJyHrNBWIW5RYUyI,9731 +torch/distributed/checkpoint/utils.py,sha256=PBpi3Ciuhe6hB5FC2e_TP8c7XbT4lOkTNn8k5y_D2OI,14451 +torch/distributed/collective_utils.py,sha256=2JIphZeRwqZXI14Q6n9oVl4sGox7-nbCeNe3RecJA4U,7286 +torch/distributed/constants.py,sha256=L80KqHvVxI65aUYs1wLIeQnfiOFdQpo6JfcrAJvDDM8,1229 +torch/distributed/device_mesh.py,sha256=iwvHnIx9Jj1KiiFLpvJjiNWGHtNmKm1XAboOCoHtKFc,44530 +torch/distributed/distributed_c10d.py,sha256=i7XH_kp1J4GuQnKCJZqmfLSiiw3cJxJ40NkDxfx94co,193084 +torch/distributed/elastic/__init__.py,sha256=QMPilFK2QkDj895i4fqrtE5bzyO55GJkV4t4EM_1Qzw,3654 +torch/distributed/elastic/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/__pycache__/control_plane.cpython-310.pyc,, +torch/distributed/elastic/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/elastic/agent/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/agent/server/__init__.py,sha256=p6OmZ3UC2RN276H3JLUZm0ygZ1GBWRcOOq4OQwozTo4,1401 +torch/distributed/elastic/agent/server/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/agent/server/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/agent/server/__pycache__/health_check_server.cpython-310.pyc,, +torch/distributed/elastic/agent/server/__pycache__/local_elastic_agent.cpython-310.pyc,, +torch/distributed/elastic/agent/server/api.py,sha256=uSBFw2DkppqFFPetti8psE3CX6U5W4YBoUVwdjSZr-Y,35983 +torch/distributed/elastic/agent/server/health_check_server.py,sha256=XDdLOQE5JE7U35Vyl2OcPd9FwX74kjvSBDFbNF05Qc8,1679 +torch/distributed/elastic/agent/server/local_elastic_agent.py,sha256=bAMdJSQ6IBOn7hE68YKADv3jlDdPVe_3DaXpMcm4RP0,16418 +torch/distributed/elastic/control_plane.py,sha256=ISwOnaX8Mte7SKbfYo_Zjh81L_6uA0UvUG-eLlVNeQw,1168 +torch/distributed/elastic/events/__init__.py,sha256=pOazpJ5sTV_gVX-HbGMv-Q9z6OlRnPY4KVHVkVUTu1I,5371 +torch/distributed/elastic/events/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/events/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/events/__pycache__/handlers.cpython-310.pyc,, +torch/distributed/elastic/events/api.py,sha256=6aiCkUZWsuyXWwb53Wuj1kRPGrCyi7NdU8FDdvppXSA,3253 +torch/distributed/elastic/events/handlers.py,sha256=MZLZ2QRkTo6n_XZq-9NY8cSPjOjJoPyfs3grPOLigIc,580 +torch/distributed/elastic/metrics/__init__.py,sha256=REox7KmfMq3DTEMul6rZgtOsoQAaQh5sJSbUBda0RO0,4880 +torch/distributed/elastic/metrics/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/metrics/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/metrics/api.py,sha256=H1vdFL5cZlKmlGR4ReSPL900qZztNUTZdRMjKt4ts84,5681 +torch/distributed/elastic/multiprocessing/__init__.py,sha256=CP1qNwma3a-nGtJZHvLm9A-HtnUvGINRzOw72r6KjhI,7417 +torch/distributed/elastic/multiprocessing/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/__pycache__/redirects.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/__pycache__/tail_log.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/api.py,sha256=8NKt6n14IVcJ-HYmPTqzESJG5jBQKI50UUA4Lg7n0_s,33753 +torch/distributed/elastic/multiprocessing/errors/__init__.py,sha256=xRvcIXynjTYOsCupPm-DcOaZ1-Pe8GSqe3-HDeaqNz0,14351 +torch/distributed/elastic/multiprocessing/errors/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/errors/__pycache__/error_handler.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/errors/__pycache__/handlers.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/errors/error_handler.py,sha256=X2y8pxDqyOYpsBHx11gaCQ2Zk4Kaqo9eRZPvph2dFeY,6609 +torch/distributed/elastic/multiprocessing/errors/handlers.py,sha256=VtcYAyN_QiqCXmR_QwEM8sXNUGAa1XcAa23pd4vD_NU,475 +torch/distributed/elastic/multiprocessing/redirects.py,sha256=areeeQwpnChPbp0uUJ7i-cyc7w8KEqn8FtyVIT57OJ0,2764 +torch/distributed/elastic/multiprocessing/subprocess_handler/__init__.py,sha256=pz2OzruD8OBIFFxiI1_TLy0Bd8LDBKNLowzW1_jZUSs,523 +torch/distributed/elastic/multiprocessing/subprocess_handler/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/subprocess_handler/__pycache__/handlers.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/subprocess_handler/__pycache__/subprocess_handler.cpython-310.pyc,, +torch/distributed/elastic/multiprocessing/subprocess_handler/handlers.py,sha256=80xkU6CU2Yij5OPpPtzLZn2t81HBkGPU6VnNu7EUAKM,787 +torch/distributed/elastic/multiprocessing/subprocess_handler/subprocess_handler.py,sha256=n68Tn-wCQQ_cOcy-8h8eJnk_4tI3iiZfeEI0vN0iCaw,2449 +torch/distributed/elastic/multiprocessing/tail_log.py,sha256=LXysjyseeaed5nqma8_l9SMDQT6sPTM7L08tVX-YBmE,4959 +torch/distributed/elastic/rendezvous/__init__.py,sha256=v_AqGtNzcANKXq99SXUpcGjfJ_8fHkwPyG_unHHEYbA,6242 +torch/distributed/elastic/rendezvous/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/c10d_rendezvous_backend.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/dynamic_rendezvous.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/etcd_rendezvous.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/etcd_rendezvous_backend.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/etcd_server.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/etcd_store.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/registry.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/static_tcp_rendezvous.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/__pycache__/utils.cpython-310.pyc,, +torch/distributed/elastic/rendezvous/api.py,sha256=ulADJ7YMSxUiw4sQPqb4fgYBBq5ZhTJh14jyMqw4TOY,12792 +torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py,sha256=13wAqC7ub0V3Hudg2u58pewgRGBz04p-yW7vmuRN9bs,10907 +torch/distributed/elastic/rendezvous/dynamic_rendezvous.py,sha256=HwbjI_9eqQXYFTxpjHplQI3aWNlRKEDIV0b0L1TxfRw,48122 +torch/distributed/elastic/rendezvous/etcd_rendezvous.py,sha256=xUhePywna0tFEYKMXd7ZVfU1ZZ7Jj2kzoP-ZvhdfZYg,43455 +torch/distributed/elastic/rendezvous/etcd_rendezvous_backend.py,sha256=KwjQEVtgJT1uHL0SW7HQ-eYwQeS0R-185GSiLZXLOOk,7437 +torch/distributed/elastic/rendezvous/etcd_server.py,sha256=I-G9eR5HXzXXyRYJZNLFft49G5yxEtEME9S7u9uJh48,8437 +torch/distributed/elastic/rendezvous/etcd_store.py,sha256=pJ2SC8THsDvAw0i1GPKVj-hqNckd2kJBvGtNmtWlpSU,6986 +torch/distributed/elastic/rendezvous/registry.py,sha256=ilyP2AfwBqk-XfSVkx9z47up8_mx0QG30ZzCWHj9haA,2236 +torch/distributed/elastic/rendezvous/static_tcp_rendezvous.py,sha256=lx_9W3SwGVYqFINdYjlJ6ettZq0WSvZVsPB_CDFjsf0,3665 +torch/distributed/elastic/rendezvous/utils.py,sha256=Y9K8OYdFyTnUP_fp5QNYUfXf9S4cxPckXnMgbgIpH6s,8403 +torch/distributed/elastic/timer/__init__.py,sha256=IQ79GKrQX_UKab_Lbp0uqMuLgb2QzDQgd9EX7NJO5aU,1750 +torch/distributed/elastic/timer/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/timer/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/timer/__pycache__/debug_info_logging.cpython-310.pyc,, +torch/distributed/elastic/timer/__pycache__/file_based_local_timer.cpython-310.pyc,, +torch/distributed/elastic/timer/__pycache__/local_timer.cpython-310.pyc,, +torch/distributed/elastic/timer/api.py,sha256=Cm9liKaSA1u1gy_yIOrv_toQzFLi3-XKfC8hV4bYaqQ,9612 +torch/distributed/elastic/timer/debug_info_logging.py,sha256=0_StL9NAQdbjMFQmOZAQU68xu8l3ajB0q4q6Vf-6Ruk,640 +torch/distributed/elastic/timer/file_based_local_timer.py,sha256=f8u5vFw6YaqjYUI5EmGg00czN5waLT9_dZnIcSbgqnk,14919 +torch/distributed/elastic/timer/local_timer.py,sha256=tVrX8z1X_M_T2xNFCPvNfqqsb2SrGWN5aBPArKL8RLc,4309 +torch/distributed/elastic/utils/__init__.py,sha256=ztvtzgzf5pR1ixbbYfY0lGx85VhyhJuHH8er7-XNVpE,318 +torch/distributed/elastic/utils/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/utils/__pycache__/api.cpython-310.pyc,, +torch/distributed/elastic/utils/__pycache__/distributed.cpython-310.pyc,, +torch/distributed/elastic/utils/__pycache__/log_level.cpython-310.pyc,, +torch/distributed/elastic/utils/__pycache__/logging.cpython-310.pyc,, +torch/distributed/elastic/utils/__pycache__/store.cpython-310.pyc,, +torch/distributed/elastic/utils/api.py,sha256=gg93l9Ghdyo7oGV571eEZanmbH5nwC9SrnjaUPmQ1bk,1716 +torch/distributed/elastic/utils/data/__init__.py,sha256=tF96JUuxZmWRkvGOzP7tbZT6s8CKNz29F6O_OYTcL3o,372 +torch/distributed/elastic/utils/data/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/elastic/utils/data/__pycache__/cycling_iterator.cpython-310.pyc,, +torch/distributed/elastic/utils/data/__pycache__/elastic_distributed_sampler.cpython-310.pyc,, +torch/distributed/elastic/utils/data/cycling_iterator.py,sha256=XmmZOf2ZvWOdMtd9DMKyw9oqgDE786yL7eV3WPASnE8,1403 +torch/distributed/elastic/utils/data/elastic_distributed_sampler.py,sha256=RjGv37NESgrl2AmsIczmbovYAjtOOadzEdTqKqr4i90,2483 +torch/distributed/elastic/utils/distributed.py,sha256=CR4GYHNfUi6Do0EYSUvbDWvwJbsIqXgfLfN67yOz0wg,5921 +torch/distributed/elastic/utils/log_level.py,sha256=1pOw0YanV5aPrMGwqkATk8OTXm6iLF2iuUYKbUf1b6Q,339 +torch/distributed/elastic/utils/logging.py,sha256=mckRPREG5SeNhbx8l2x3ys-cNYbR3tN0MjO3jENxw1U,2262 +torch/distributed/elastic/utils/store.py,sha256=o6yLXglfaNN6RREQLtg0-KeZ4kj4E3idFE3S7Ck25m8,7253 +torch/distributed/fsdp/__init__.py,sha256=esXWeauFRI3wad6d2c_xR0q-gw4uNmuOTqlfrya-CHQ,939 +torch/distributed/fsdp/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_common_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_debug_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_dynamo_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_exec_order_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_flat_param.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_fsdp_extensions.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_init_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_limiter_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_optim_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_runtime_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_shard_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_state_dict_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_trace_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_traversal_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_unshard_param_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/_wrap_utils.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/api.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/fully_sharded_data_parallel.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/sharded_grad_scaler.cpython-310.pyc,, +torch/distributed/fsdp/__pycache__/wrap.cpython-310.pyc,, +torch/distributed/fsdp/_common_utils.py,sha256=2slQwQT_oiClKbPdcZlyMNJaA3PzjHbxwACPgQh0u-k,22447 +torch/distributed/fsdp/_debug_utils.py,sha256=C_facfUsbZNEY5qIubEzyWMBITdv2YOutoIi4Am8cLE,5706 +torch/distributed/fsdp/_dynamo_utils.py,sha256=KMPZ7fapnSDPkhe7EWV_MuPniiY9-5WNS4KXZ-qQCnA,2674 +torch/distributed/fsdp/_exec_order_utils.py,sha256=_DzM2rHs2kzwPR3JOuWbv2mLz-kApJcO2ItuOJBzYlU,16122 +torch/distributed/fsdp/_flat_param.py,sha256=JdwoNV1XXtiBGGRJWQksvXc6X3jouvW2Xc9c8syV174,120751 +torch/distributed/fsdp/_fsdp_extensions.py,sha256=rdVGHxyfcXnNeMHet_LD6L5PX31bsaol_MK6aChOICE,4969 +torch/distributed/fsdp/_init_utils.py,sha256=wAD5DeWKWwuPH3_h_DjTt25Lm0EgBZR0rs58iC_1fB0,46274 +torch/distributed/fsdp/_limiter_utils.py,sha256=nQD4KvZ7HAX-14AoUyTZ0XfBQaQmq02KgHiHfZM3t9g,1081 +torch/distributed/fsdp/_optim_utils.py,sha256=udVgjPxbTzhD2RUFXpe2TqDGQRj3XUYtOXIkuJlkZ_o,86954 +torch/distributed/fsdp/_runtime_utils.py,sha256=OocqYQFc_WeDhSqVYO1YWtYrseLbsG876-1-YpPCi4U,66201 +torch/distributed/fsdp/_shard_utils.py,sha256=UvmJhRSZ_oS00wVBtqElOpT5L71cfhAdptvhOCa6kIU,4623 +torch/distributed/fsdp/_state_dict_utils.py,sha256=ecREIK_qC87S-UB-tQb6bRp3qf30Kf4cobDm3vMUv40,34190 +torch/distributed/fsdp/_trace_utils.py,sha256=sv0lkKAw6yCHXz_RZNJtZOOLmD7Ma1sy7BeVkODFbdg,10776 +torch/distributed/fsdp/_traversal_utils.py,sha256=scEaKCmS4eUIOs0nUEDDAE5C23BPi8eSsNF5ngQ_yVE,4641 +torch/distributed/fsdp/_unshard_param_utils.py,sha256=j02wyHu7s7Od4WzICswwkBZd0PBDmkHnuuRpsiFJ4i4,11497 +torch/distributed/fsdp/_wrap_utils.py,sha256=mQerWiCbhHcTLEDL8MUOuYzqnzbBsWFsOKbS5MCjFB8,10925 +torch/distributed/fsdp/api.py,sha256=CymuvEwdwch2upiM2laMkvHaXAhSDq4bnBD5bfIUaBE,18880 +torch/distributed/fsdp/fully_sharded_data_parallel.py,sha256=A9Qk2msMsLUPnxA3qh7UBARVyFWb4YpbzGIiWv1iTP4,100299 +torch/distributed/fsdp/sharded_grad_scaler.py,sha256=jvGAR5mOVxaU1S0RC7W_f2WWibwBB010Ek_SMnlrKqc,17568 +torch/distributed/fsdp/wrap.py,sha256=_jxDP4wsL6rwhajm1cJaZ3SmeFOVbcaLzSXmU8k-Ktg,22632 +torch/distributed/launch.py,sha256=Yrg9dknLJ63CthE0XyKNvRIq-N6txLsn4bJOzKzY5do,7610 +torch/distributed/launcher/__init__.py,sha256=V0J5Y0SVG1ybhGeHnjarMc4dcOgyhRsHpnP7lBMyB48,349 +torch/distributed/launcher/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/launcher/__pycache__/api.cpython-310.pyc,, +torch/distributed/launcher/api.py,sha256=qIJQwtYoE0D96Zvopt8u4P2ZQ44kUImmaxKnzChgZbc,10983 +torch/distributed/logging_handlers.py,sha256=atN4o8Q5hENaz5nAt_oApbO4JLkMr4XAz9zFvGQMKwU,389 +torch/distributed/nn/__init__.py,sha256=Ta4l0Jr2gOK7eQR-e32stWaDKPOlV_Kii0SExydXk_Q,145 +torch/distributed/nn/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/nn/__pycache__/functional.cpython-310.pyc,, +torch/distributed/nn/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/nn/api/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/nn/api/__pycache__/remote_module.cpython-310.pyc,, +torch/distributed/nn/api/remote_module.py,sha256=VcnES6JaK17fEF-9dAzv93H5NxR9fWqdYjLwVoHn7q8,31308 +torch/distributed/nn/functional.py,sha256=vuSx_oHokfmdVl4yOsprvaf9f1aVM80gZnzYLx2I9B4,15178 +torch/distributed/nn/jit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/nn/jit/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/nn/jit/__pycache__/instantiator.cpython-310.pyc,, +torch/distributed/nn/jit/instantiator.py,sha256=RAphnHZOQTNWTXlWugYKciPpRmSFS5SkogdNCVdW234,5459 +torch/distributed/nn/jit/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/distributed/nn/jit/templates/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/nn/jit/templates/__pycache__/remote_module_template.cpython-310.pyc,, +torch/distributed/nn/jit/templates/remote_module_template.py,sha256=D_GFa70DG1KY1GwGwFAqvCC9gE9CwZSe1BJ-jHc2YMw,3463 +torch/distributed/optim/__init__.py,sha256=mTALoKVvXKfZvqmymzuf-VBY4pJhqsEYZ5leJApr51w,1781 +torch/distributed/optim/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/optim/__pycache__/apply_optimizer_in_backward.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_adadelta.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_adagrad.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_adam.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_adamax.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_adamw.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_rmsprop.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_rprop.cpython-310.pyc,, +torch/distributed/optim/__pycache__/functional_sgd.cpython-310.pyc,, +torch/distributed/optim/__pycache__/named_optimizer.cpython-310.pyc,, +torch/distributed/optim/__pycache__/optimizer.cpython-310.pyc,, +torch/distributed/optim/__pycache__/post_localSGD_optimizer.cpython-310.pyc,, +torch/distributed/optim/__pycache__/utils.cpython-310.pyc,, +torch/distributed/optim/__pycache__/zero_redundancy_optimizer.cpython-310.pyc,, +torch/distributed/optim/apply_optimizer_in_backward.py,sha256=p2XiAnb3R--5JDM8D4f4yvsDM5s6l2CWnhD6DW-WvCE,5202 +torch/distributed/optim/functional_adadelta.py,sha256=3F_bEKNREg3xAoMkl-WoDfSD9FFzIbXvSHCHbwMrcbs,3776 +torch/distributed/optim/functional_adagrad.py,sha256=_dXBIEiVt4CJ5_3nUKnvM_6f2s1B4LFLBDWU2pcGv7w,4126 +torch/distributed/optim/functional_adam.py,sha256=Rb5u5TrZdaeNYJ6sdzYribqg0t0p6f1RP3eKiy62NWE,7254 +torch/distributed/optim/functional_adamax.py,sha256=p5MsXByY2Hf4WFXuW0eQnglX0FWk6vZVC9jMJXCcP7o,4480 +torch/distributed/optim/functional_adamw.py,sha256=6HKvMqhjws9Jh1Fh2HQi9dFC8KqsckAg0o-ZheP0e9U,7374 +torch/distributed/optim/functional_rmsprop.py,sha256=M51PdBZqhcWBTOt5Q9V_heHVM7cyRVRAZz2ASmAdajU,4506 +torch/distributed/optim/functional_rprop.py,sha256=7OHFrV6PQa9V9idczIo9SDU1FDt9NpwNtKgrB1AB5fY,3667 +torch/distributed/optim/functional_sgd.py,sha256=mBE5IcR-zbDhE0WzXC_ELD64JXZuokmXWPpoMn3nPdo,5760 +torch/distributed/optim/named_optimizer.py,sha256=MsmZ8PtBad-g_qrsbTIQCKzTFQGobQrEHS1PVgyToiM,14033 +torch/distributed/optim/optimizer.py,sha256=FUnCIpnwaBp7XWQ-qgHq9q_tOpq7JG3DIsT1tcpxThM,9861 +torch/distributed/optim/post_localSGD_optimizer.py,sha256=sz6bLYaqHrVJAv8h8mhPUDMnsbxeoVAytRsTyVJ45QA,4414 +torch/distributed/optim/utils.py,sha256=UPteKg6VvlaQvdwYllKRNcOTuwWwAcLtK1JYM9srlN4,2266 +torch/distributed/optim/zero_redundancy_optimizer.py,sha256=yRBb6-1i10BvRNBndygJaUgl7eiwCsb-lankDWk3hHQ,71922 +torch/distributed/pipelining/_IR.py,sha256=ccZlqm-JMjnPthLMqKJqF8aC47Nlsn_YdIn7KijLsQ8,48668 +torch/distributed/pipelining/__init__.py,sha256=-HDrm9iLtbRaVNZPRz9MpmAqTN2eJhCUJxoI-bCqLq0,661 +torch/distributed/pipelining/__pycache__/_IR.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/_backward.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/_debug.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/_unflatten.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/_utils.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/microbatch.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/schedules.cpython-310.pyc,, +torch/distributed/pipelining/__pycache__/stage.cpython-310.pyc,, +torch/distributed/pipelining/_backward.py,sha256=6Jxjo4D1ANvVAjye_2gHG4tuvB0gyG27BBa0cYtwujs,13706 +torch/distributed/pipelining/_debug.py,sha256=ziWAcOgfK2I65E9NVb1oZO5u239YHt4q4tjPs5mU048,557 +torch/distributed/pipelining/_unflatten.py,sha256=DX6WCH2Jaf9BCE54D8t7GUf8u_t4-AVnAWfLi5zB-IQ,741 +torch/distributed/pipelining/_utils.py,sha256=pK2dA2hPwPTMUGQ_DOEka30KpObHOdOpadwKXmjyNww,2611 +torch/distributed/pipelining/microbatch.py,sha256=xjDDOrxUFzSQ5fhmurfMN36tw7ODLVe7aJTG8PUsJto,16271 +torch/distributed/pipelining/schedules.py,sha256=_k7Sa9ccM31Rw9NQihJXuSZqANy2sl63xOsTfOO5Mss,89064 +torch/distributed/pipelining/stage.py,sha256=UFgNo6KAqIm8fcz-xpROxpQaW0gP1TJuOA1I0erp9po,58006 +torch/distributed/remote_device.py,sha256=1fVMJwZembXV3LVYIeA_cutnk-KEt3WVBkkD3LgUVGw,4600 +torch/distributed/rendezvous.py,sha256=tRgq_ZXUPonmyd-_N_G0LypBaM58VqDHHK5TxXl79lI,9866 +torch/distributed/rpc/__init__.py,sha256=53ys0mDatHsUH7RFVPBA5mp7ILIQRdnZd4XZmr8uSRc,9708 +torch/distributed/rpc/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/_utils.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/api.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/backend_registry.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/constants.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/functions.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/internal.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/options.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/rref_proxy.cpython-310.pyc,, +torch/distributed/rpc/__pycache__/server_process_global_profiler.cpython-310.pyc,, +torch/distributed/rpc/_testing/__init__.py,sha256=NkVbo5kj4-oj6GD5VltS0eUjHlTdOdURVlCZSz4f7Gw,499 +torch/distributed/rpc/_testing/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/rpc/_testing/__pycache__/faulty_agent_backend_registry.cpython-310.pyc,, +torch/distributed/rpc/_testing/faulty_agent_backend_registry.py,sha256=KmeZ2KHDwSWJB3mdfA06hdaabt25vKkkUU1h8SFVk1o,1639 +torch/distributed/rpc/_utils.py,sha256=BofSE2A4t3k8KvJDDPS05OO8rSMUYjmOZvuncpWUNtE,1645 +torch/distributed/rpc/api.py,sha256=7TpcEvZrNxb1KYZ9PNfZQpc2R00_lb3p3KDcmj0FWYs,36977 +torch/distributed/rpc/backend_registry.py,sha256=hCOxbh8vR2o2k6djtARMMRCN6iLZb1Uo2kw2hgMeP1c,16311 +torch/distributed/rpc/constants.py,sha256=hTjm5RGHOA6cQQaKIiOmIEtOMlX1g-cbYwqKYjvwRwE,828 +torch/distributed/rpc/functions.py,sha256=9sE_wYiqlfUmb6AFBFPmOq6PApwYKxosNOnteoy_0Oc,7272 +torch/distributed/rpc/internal.py,sha256=y8K05r6QPwRy2gKRLVMNG5xGCy2KvhqEpBv4S2vOxXs,11112 +torch/distributed/rpc/options.py,sha256=VopMzyp6LnmgxNsu3psyXVLQ3joa9EZrPE2SJtxAoS0,7073 +torch/distributed/rpc/rref_proxy.py,sha256=olBD28H2iKQvS8w1FcqGsr5M4_xYCSln5BeokLrPLJQ,2673 +torch/distributed/rpc/server_process_global_profiler.py,sha256=X503iJYV8Rayfzm6RqY1aSWzNdKEu5SSBMudhXeHE5E,8397 +torch/distributed/run.py,sha256=rsG9tWwMjndevRinAMKA_c7nEBLEx87gdoV-O-h905g,34317 +torch/distributed/tensor/__init__.py,sha256=VDFmYdccbjAE7eRGdfBIwjHUrHNX7i2QY6MXm8_0WYc,1845 +torch/distributed/tensor/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_api.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_collective_utils.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_dispatch.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_dtensor_spec.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_op_schema.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_random.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_redistribute.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_sharding_prop.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_shards_wrapper.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_tp_conv.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/_utils.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/device_mesh.cpython-310.pyc,, +torch/distributed/tensor/__pycache__/placement_types.cpython-310.pyc,, +torch/distributed/tensor/_api.py,sha256=QdjiVfikJa_k5j2wkuLCCE-5DeWhdSAWHpU2v3cbKn8,52812 +torch/distributed/tensor/_collective_utils.py,sha256=jwWKdVq0J0NUNkTdlLNCREBD-7kETOMdfOMDcyvO_hk,13522 +torch/distributed/tensor/_dispatch.py,sha256=1VqlCpkq_Y1hcQsrESf1BsEuxq3k5tl7qg4cZaFfH5w,21528 +torch/distributed/tensor/_dtensor_spec.py,sha256=EuI84FKMmJhCHwNK9SxIoPVUplM5YM54hzZP_HrXia0,10301 +torch/distributed/tensor/_op_schema.py,sha256=kWyWVtHM9Optw9TgHEiO5Mfax_VB5dqfWVCu-45WVOs,17157 +torch/distributed/tensor/_ops/__init__.py,sha256=rQDU5Pzj2vgBb7r4PjWGZlnviSO25O82OlfVSyZOhY4,427 +torch/distributed/tensor/_ops/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_common_rules.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_conv_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_einsum_strategy.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_embedding_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_experimental_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_math_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_matrix_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_pointwise_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_random_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_tensor_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/_view_ops.cpython-310.pyc,, +torch/distributed/tensor/_ops/__pycache__/utils.cpython-310.pyc,, +torch/distributed/tensor/_ops/_common_rules.py,sha256=Uw6QEOkFkUqBVieHutCQMoejxQl15dbKXN3RsbYv-vA,12024 +torch/distributed/tensor/_ops/_conv_ops.py,sha256=3i11dWloBwNsWOqgDJWDpil9BAgabfGpY3_JwUHWvzQ,3321 +torch/distributed/tensor/_ops/_einsum_strategy.py,sha256=peyO7zqYXr44fuoDQH3YvQADMCXFuRZBLvdXy1HADbg,6665 +torch/distributed/tensor/_ops/_embedding_ops.py,sha256=uVn4q2oownD2npC_6GiMiaSLvsTF-aEq6ySWVq21mrQ,10173 +torch/distributed/tensor/_ops/_experimental_ops.py,sha256=qWUqANmtMg9_tcekE8u6Jaa7DPUCglK6OjiZlTaaV8g,988 +torch/distributed/tensor/_ops/_math_ops.py,sha256=_vThCk-GVFH41Q69i7-YrR6cIXsi0vmc1A82jM83KOc,40413 +torch/distributed/tensor/_ops/_matrix_ops.py,sha256=K_N2ikx3lA-gy7MQAQiI1gH35OP7WVhA2Q4brpBnnDI,17437 +torch/distributed/tensor/_ops/_pointwise_ops.py,sha256=OQlC3z3dcigMR9yDffF82Z6XnkygpIjL1QIYAajrMrY,20739 +torch/distributed/tensor/_ops/_random_ops.py,sha256=PZqpl830Ae7xWCEj8zQ4zG7UaOvtNkF5P0ElITXhkyw,1213 +torch/distributed/tensor/_ops/_tensor_ops.py,sha256=sR4_mvR1u1NcluvSWKIrMm1C8ByTXRdEPrJd0LXRfX8,30221 +torch/distributed/tensor/_ops/_view_ops.py,sha256=EO0KOS36AiDPtzECEpfChi5yq3G5qELBFDogTAyeDsM,22820 +torch/distributed/tensor/_ops/utils.py,sha256=BELQvPnIKuirBQzMcIcWXsKtimbz_s9kqBY7zqha7j4,10269 +torch/distributed/tensor/_random.py,sha256=G1qeGtBV1ANBUqOmqFV8fCGs7LHEqzXr3DQjnOPuFoc,15882 +torch/distributed/tensor/_redistribute.py,sha256=WqugnAc-K1HyEDSR7B9YV4K6nCUL7FoPsBJnl9bi5sc,14405 +torch/distributed/tensor/_sharding_prop.py,sha256=MU1LHN2ZkkUqMurRZ31hEcgHEL4FV3Xs0_IY5T2ebRU,21536 +torch/distributed/tensor/_shards_wrapper.py,sha256=0XyqK_wvNo0ypfVMOvIrjHKR_-vWEWdKTb-IlkpmYng,11602 +torch/distributed/tensor/_tp_conv.py,sha256=9GY0dNF7AbjJQlxZNyxAUjmNahNOXh7w6wHY7ROYvQo,10165 +torch/distributed/tensor/_utils.py,sha256=SeMxVDAPQcpJZWJvg3uHicycHk9_2ntKS7o-LdFUBsk,14044 +torch/distributed/tensor/debug/__init__.py,sha256=UhAixmUlON2pwFNDHXlRQLafPRriRKOUySTf9VgG7fk,850 +torch/distributed/tensor/debug/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/tensor/debug/__pycache__/_comm_mode.cpython-310.pyc,, +torch/distributed/tensor/debug/__pycache__/_op_coverage.cpython-310.pyc,, +torch/distributed/tensor/debug/__pycache__/_visualize_sharding.cpython-310.pyc,, +torch/distributed/tensor/debug/_comm_mode.py,sha256=rhgusFgkHC-5UjWUoWOoICjfQPZEKRZBCPt8g7YHqSw,28763 +torch/distributed/tensor/debug/_op_coverage.py,sha256=--90iXGzphh2bxxDq4TuNhh5oRg0vtlhphHpAnRMOKY,3166 +torch/distributed/tensor/debug/_visualize_sharding.py,sha256=63LCLBkiuYrIhiYSMaUJU0exanGplwJgFNXF2kjRIr0,6496 +torch/distributed/tensor/device_mesh.py,sha256=MONPC_i1XphcY2E0AAte1MW668BTlUGugMSQzfJqzik,190 +torch/distributed/tensor/experimental/__init__.py,sha256=I7x--XOA9aBWpEMCLn_gTRXxmholA-s4CLWR6tD4EiA,1228 +torch/distributed/tensor/experimental/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/tensor/experimental/__pycache__/_attention.cpython-310.pyc,, +torch/distributed/tensor/experimental/__pycache__/_func_map.cpython-310.pyc,, +torch/distributed/tensor/experimental/__pycache__/_register_sharding.cpython-310.pyc,, +torch/distributed/tensor/experimental/__pycache__/_tp_transform.cpython-310.pyc,, +torch/distributed/tensor/experimental/_attention.py,sha256=OfMUN7AshP2dTZeF3YI9N_6H7epkdK4PvJJ8QsfHt10,29505 +torch/distributed/tensor/experimental/_func_map.py,sha256=yUHMwmO5w38koh5uM8xsznInorQq6Rxh8SWHTt6ooWA,10943 +torch/distributed/tensor/experimental/_register_sharding.py,sha256=KIa1Z9A6YEA6ZFYC8FQb5pwQNSKP_2s0iSHTG1RI4_k,5666 +torch/distributed/tensor/experimental/_tp_transform.py,sha256=qyHAOHiES67FzmDUA2W3Bots4260dgDzB-8BjYHbn2E,20403 +torch/distributed/tensor/parallel/__init__.py,sha256=h38VeP4BPyLTeuqGZalxRCEj3Avl7Kd0Gjybo0G9wFg,581 +torch/distributed/tensor/parallel/__pycache__/__init__.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/_data_parallel_utils.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/_utils.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/api.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/ddp.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/fsdp.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/input_reshard.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/loss.cpython-310.pyc,, +torch/distributed/tensor/parallel/__pycache__/style.cpython-310.pyc,, +torch/distributed/tensor/parallel/_data_parallel_utils.py,sha256=H-5w4s0X4KmRqY25_7IX4VDABpFKcjJDZ1lb3bqBg_Y,1517 +torch/distributed/tensor/parallel/_utils.py,sha256=0zrD1_GChRNNHSW6SerHE9us9TufDzCOzk_csJaHN4U,2320 +torch/distributed/tensor/parallel/api.py,sha256=0Boh9S0nnkd5i52wtHCLuKkQ93hJcKNBsnXVQah3EeQ,5264 +torch/distributed/tensor/parallel/ddp.py,sha256=EYfZcIReB3YBPkaZ2rf7t7_l3qHeulM5sfAgZzUsqrQ,3752 +torch/distributed/tensor/parallel/fsdp.py,sha256=0x-OkkfJcjPhwvQxmKkOvM9YPrtVWJi2bld-ifD_mUM,13652 +torch/distributed/tensor/parallel/input_reshard.py,sha256=PL6reF-WRraDSqO98N7skJKXBtZfjxIdkjb9ucUmafc,3774 +torch/distributed/tensor/parallel/loss.py,sha256=eGkh6dnceTpyNP_43PmoKFUrWJPu3OB7I5bc4aaqubc,17728 +torch/distributed/tensor/parallel/style.py,sha256=QTnpkXAB3EbcWJy7-79IL1wEe4S-AXjyAat34dLTAJY,28517 +torch/distributed/tensor/placement_types.py,sha256=g6ldbelpJMXnbioTAfX0Lk4f9XUJjJK-HdNyQv-wWf8,25534 +torch/distributed/utils.py,sha256=mTcGUCkUYvUcYwIXdJ90xVwsaICH8BF0I4aexr3OiK4,13372 +torch/distributions/__init__.py,sha256=OVXwzwRZa45Hln7PmpDUKI5D1eRDuHhZ8uBH2_aICDk,6036 +torch/distributions/__pycache__/__init__.cpython-310.pyc,, +torch/distributions/__pycache__/bernoulli.cpython-310.pyc,, +torch/distributions/__pycache__/beta.cpython-310.pyc,, +torch/distributions/__pycache__/binomial.cpython-310.pyc,, +torch/distributions/__pycache__/categorical.cpython-310.pyc,, +torch/distributions/__pycache__/cauchy.cpython-310.pyc,, +torch/distributions/__pycache__/chi2.cpython-310.pyc,, +torch/distributions/__pycache__/constraint_registry.cpython-310.pyc,, +torch/distributions/__pycache__/constraints.cpython-310.pyc,, +torch/distributions/__pycache__/continuous_bernoulli.cpython-310.pyc,, +torch/distributions/__pycache__/dirichlet.cpython-310.pyc,, +torch/distributions/__pycache__/distribution.cpython-310.pyc,, +torch/distributions/__pycache__/exp_family.cpython-310.pyc,, +torch/distributions/__pycache__/exponential.cpython-310.pyc,, +torch/distributions/__pycache__/fishersnedecor.cpython-310.pyc,, +torch/distributions/__pycache__/gamma.cpython-310.pyc,, +torch/distributions/__pycache__/geometric.cpython-310.pyc,, +torch/distributions/__pycache__/gumbel.cpython-310.pyc,, +torch/distributions/__pycache__/half_cauchy.cpython-310.pyc,, +torch/distributions/__pycache__/half_normal.cpython-310.pyc,, +torch/distributions/__pycache__/independent.cpython-310.pyc,, +torch/distributions/__pycache__/inverse_gamma.cpython-310.pyc,, +torch/distributions/__pycache__/kl.cpython-310.pyc,, +torch/distributions/__pycache__/kumaraswamy.cpython-310.pyc,, +torch/distributions/__pycache__/laplace.cpython-310.pyc,, +torch/distributions/__pycache__/lkj_cholesky.cpython-310.pyc,, +torch/distributions/__pycache__/log_normal.cpython-310.pyc,, +torch/distributions/__pycache__/logistic_normal.cpython-310.pyc,, +torch/distributions/__pycache__/lowrank_multivariate_normal.cpython-310.pyc,, +torch/distributions/__pycache__/mixture_same_family.cpython-310.pyc,, +torch/distributions/__pycache__/multinomial.cpython-310.pyc,, +torch/distributions/__pycache__/multivariate_normal.cpython-310.pyc,, +torch/distributions/__pycache__/negative_binomial.cpython-310.pyc,, +torch/distributions/__pycache__/normal.cpython-310.pyc,, +torch/distributions/__pycache__/one_hot_categorical.cpython-310.pyc,, +torch/distributions/__pycache__/pareto.cpython-310.pyc,, +torch/distributions/__pycache__/poisson.cpython-310.pyc,, +torch/distributions/__pycache__/relaxed_bernoulli.cpython-310.pyc,, +torch/distributions/__pycache__/relaxed_categorical.cpython-310.pyc,, +torch/distributions/__pycache__/studentT.cpython-310.pyc,, +torch/distributions/__pycache__/transformed_distribution.cpython-310.pyc,, +torch/distributions/__pycache__/transforms.cpython-310.pyc,, +torch/distributions/__pycache__/uniform.cpython-310.pyc,, +torch/distributions/__pycache__/utils.cpython-310.pyc,, +torch/distributions/__pycache__/von_mises.cpython-310.pyc,, +torch/distributions/__pycache__/weibull.cpython-310.pyc,, +torch/distributions/__pycache__/wishart.cpython-310.pyc,, +torch/distributions/bernoulli.py,sha256=XOrIoblhDu33w5D-86YzFSzeXC6eVSYaoeziu911IBs,4199 +torch/distributions/beta.py,sha256=AKQ-8HdIf-agnXwA8lTYlxLy98ajNd4tccEVdFnhEDQ,3753 +torch/distributions/binomial.py,sha256=jaFQttkRcJC5YckXT1qHe4GROlSNkA7Fr2pKmTPG6wA,6005 +torch/distributions/categorical.py,sha256=KKekaaVoADwixBb0EBKJszLHAljsYJzBqvbGceXvbC8,5805 +torch/distributions/cauchy.py,sha256=XpYjhY82J1_B0lKw8rxGt6jOCGxsZyxofZqPYQXwtB0,3011 +torch/distributions/chi2.py,sha256=Mi-hlL85pQU2G4t-3oIA5UQHXA0PvbXRTSya2jEGnVo,1002 +torch/distributions/constraint_registry.py,sha256=aFm5Nxaymc68nMAG_tCyy0exhoPvHOumZCq45F1swWc,10329 +torch/distributions/constraints.py,sha256=0181trze9WuLJ3qBeHhF9JpCCfi-7OBn92oRDhYQOKs,19376 +torch/distributions/continuous_bernoulli.py,sha256=TgZzCJIqAIMZykL2PPPPqxdFDy0Sf7A2hn2yOBKD3pE,8771 +torch/distributions/dirichlet.py,sha256=kHOfunGjO5BAgPBGbXdJcTbQY3cSyGTMer9QQC-v9yU,4254 +torch/distributions/distribution.py,sha256=EdjsDqD6e8UBDfL5vZNfuHjX6APdJOd_2rB_N44jm0M,12379 +torch/distributions/exp_family.py,sha256=feF46vt3pwninw2fkRyoLFMWlREoEGdI9W4GorLhUTk,2381 +torch/distributions/exponential.py,sha256=SwWxvH7iN6tTlaz3Mmdbx5glk4gogHRGzymy6ZN0ydg,2521 +torch/distributions/fishersnedecor.py,sha256=k0zHlRayFmwrIV689si_2-egtBG5ybS4J1FVAHi_fpc,3517 +torch/distributions/gamma.py,sha256=Ylsyx72CJWypdNAEAuPYD_9BSv8aImqXGrkLV8JCgzA,3689 +torch/distributions/geometric.py,sha256=xaiMpY2-YyVSaAxVrfrz_QFZ6BW035XXs1h4A5VeaXw,4680 +torch/distributions/gumbel.py,sha256=TSKDl5Tvv7rHd_9i3TCR0J9pai1sP9a8CQmdFc4X1Fo,2798 +torch/distributions/half_cauchy.py,sha256=XJ8h45fpTb44TqLZH-xsnC5DRWlUWKJTjgyGAg3sBAg,2423 +torch/distributions/half_normal.py,sha256=ezEwgpcrnpAbeEnScZ-r_iIqtN-AZc6ZwYlrmUA_kM0,2188 +torch/distributions/independent.py,sha256=XEcWzrPDn35-OMgdrP0rRcWg5-EYmJHC5gOoG3P9skg,4688 +torch/distributions/inverse_gamma.py,sha256=q7KVvP47lWx2IK0BoDFnD3pRKP8reB3pG1YKU2mpNhk,2505 +torch/distributions/kl.py,sha256=vhqaO-JT9JOEUaR5Yb4lBGJDRo83WHopZIA3SB7kkeI,31731 +torch/distributions/kumaraswamy.py,sha256=NSPHFa3pzTpdJuZ06gXwTtqO6c7ukp_h4jUe4iMVXPE,3523 +torch/distributions/laplace.py,sha256=bMdzCvSSpxcr4jN2H2t2ZAboAHPOW5g7SQC-l64GtJY,3315 +torch/distributions/lkj_cholesky.py,sha256=DYiivVGh2w8R8nnloumTvJ8lP9k0fVNatVdVg2F9bsE,6401 +torch/distributions/log_normal.py,sha256=kQLmAa2PiiHh5HJhgJXYHKTD_pXlVGoCkJ-rNs1XsvM,1957 +torch/distributions/logistic_normal.py,sha256=JdGUjdFvXLh13YKSio42hQK6cNWW7BWrZnmB9te9h3U,1977 +torch/distributions/lowrank_multivariate_normal.py,sha256=i0xFzwUD59yiXVfSrLD3FuGMLSPXAUBAmoJjWrhH_Mo,9863 +torch/distributions/mixture_same_family.py,sha256=OOglbCHuqyqlGOIdW6CP_2tXcDYJyEvx7oqOZc7uaaM,8489 +torch/distributions/multinomial.py,sha256=QlzCNqJJVJ9f1UHyrf15i0G9KG42MNGbNEHXWCZr07A,5428 +torch/distributions/multivariate_normal.py,sha256=-5eCLThUPs7_TZ6nQWcvPVJhCMp0midVYc6fp_0vcdM,10829 +torch/distributions/negative_binomial.py,sha256=FDOV7sDtAktX4Ci0APN1Z2_jkn_6RPDS_YFXkXmwhhM,4699 +torch/distributions/normal.py,sha256=m-PyqfibRoUHIagsyauenLLII01rMZIK9zayQq13pd0,3632 +torch/distributions/one_hot_categorical.py,sha256=ni6zF-wfKdtmgDt17gJ-7ZfPoVQeJsUeROPW_RUAtWg,4763 +torch/distributions/pareto.py,sha256=hUZYFGggE8Fc85YRlbsPClFGOh2fxaMdP0mEa2GzWvg,2203 +torch/distributions/poisson.py,sha256=8_I9v8vQnNU1jzevfv8_NGDVyqo6QTv6vLmBQjS3Hss,2245 +torch/distributions/relaxed_bernoulli.py,sha256=ryvvGgBacGs9nLa1NY3c2HbazKeWJbRwxRk3t5bmsao,5453 +torch/distributions/relaxed_categorical.py,sha256=p3scFX_PhORu69DvSPhwN_gCp-WJcV0LZ_MquynywfM,5248 +torch/distributions/studentT.py,sha256=YqbPXVyBR8zMOBanhZA6-MNEhxDD4po787pA86Qv3P0,3939 +torch/distributions/transformed_distribution.py,sha256=wk91QGs0aan_LrzG2nAq6LGD1nNPe-xpAg1VNIR1Rb0,8695 +torch/distributions/transforms.py,sha256=j1-7A0Ke751CdXaVtZ7vQkF9PdO5efIBZcXi0wFlmK0,40962 +torch/distributions/uniform.py,sha256=QcbuFENqUpy6wFT5lp7nFZUF2nKru7-PrMDL71HfeYw,3323 +torch/distributions/utils.py,sha256=Lzh9wCUEGx5z14lbN5jdEoZUJkvtYN3iv6G_R4j2V_Q,7086 +torch/distributions/von_mises.py,sha256=CcsdWfHN3KRZR2xnSeCnzvx6UZdRfD8X1jmiyvjLaSk,6089 +torch/distributions/weibull.py,sha256=NBumWSweqywWFbOPWoiCXjv7BStb-SOm5pkR7iQQOyI,3097 +torch/distributions/wishart.py,sha256=F_q4OKWhySEEB1nqhMD6ewXsQ1UcMHJwrrLYoJz-54s,13717 +torch/export/__init__.py,sha256=WoV72rapqqakjwMVolD_vQ1cGjVJYkt5cBF_tHOLhCQ,20208 +torch/export/__pycache__/__init__.cpython-310.pyc,, +torch/export/__pycache__/_remove_auto_functionalized_pass.cpython-310.pyc,, +torch/export/__pycache__/_remove_effect_tokens_pass.cpython-310.pyc,, +torch/export/__pycache__/_safeguard.cpython-310.pyc,, +torch/export/__pycache__/_trace.cpython-310.pyc,, +torch/export/__pycache__/_tree_utils.cpython-310.pyc,, +torch/export/__pycache__/_unlift.cpython-310.pyc,, +torch/export/__pycache__/custom_obj.cpython-310.pyc,, +torch/export/__pycache__/dynamic_shapes.cpython-310.pyc,, +torch/export/__pycache__/exported_program.cpython-310.pyc,, +torch/export/__pycache__/graph_signature.cpython-310.pyc,, +torch/export/__pycache__/unflatten.cpython-310.pyc,, +torch/export/_remove_auto_functionalized_pass.py,sha256=LbnKw0JfJJWxmUgpLj_7JtJkx8zMF5DYpHpHn5QAL7g,1952 +torch/export/_remove_effect_tokens_pass.py,sha256=2049cunx9ItdC8k_01zByHsyk5q-IWafJkROBSSgXfI,5943 +torch/export/_safeguard.py,sha256=7peuxoS51C1SC2h-4XAVDbZIu772gPUzRRTrDCzo14Q,1956 +torch/export/_trace.py,sha256=IWIPOfdTlDUy1ZU6mtzp5qg4kxTThUE9A1THe-7Y5Tc,72530 +torch/export/_tree_utils.py,sha256=E2SxZ08IqAl-CZXmZU21V38GdPeV5gGanWQlJnQ3K74,2230 +torch/export/_unlift.py,sha256=5tbD_3xoWvT7rwtLhnMrzIMAdkAtxq2Lflj174iyW68,12540 +torch/export/custom_obj.py,sha256=H6j0qawn-qiMlmafZDdjAuOOQY7YvQ7pbhiFW71hOMY,296 +torch/export/dynamic_shapes.py,sha256=s4KjJQWhy5XkyywijHizg4z7MeSufz0CNOZGL-pAn5A,49762 +torch/export/experimental/__init__.py,sha256=cMVQVB9yO1oQV9nGzoYNuSWaEL_lwn2xMxaUyse21bY,2521 +torch/export/experimental/__pycache__/__init__.cpython-310.pyc,, +torch/export/exported_program.py,sha256=yeVxpa2yYo1ZRXjDvEhKOm23huWvRvYZXZTIVF__ThM,45205 +torch/export/graph_signature.py,sha256=_-cfw52gZxZwV28guJHmr6PO9C191TnUVT1xWjdBjFM,21466 +torch/export/passes/__init__.py,sha256=Z0P6v3cGhy1dINAt0Il4PWwE5o15ByhJXb4eneor2DU,2234 +torch/export/passes/__pycache__/__init__.cpython-310.pyc,, +torch/export/unflatten.py,sha256=yoA_VFccibpENJoGGAbaqwt4-dGmzPRA7Mu85xh7jeQ,50008 +torch/fft/__init__.py,sha256=X8JNOgYQ0V9PEDWhiqOr7WKJvcb4SXKLkMOb6XWB2i8,55060 +torch/fft/__pycache__/__init__.cpython-310.pyc,, +torch/func/__init__.py,sha256=Ww3pIuyf3N97iQ9lRRhFOK0jdfzpZ6PKnTjuEJPjieI,397 +torch/func/__pycache__/__init__.cpython-310.pyc,, +torch/functional.py,sha256=7VJ5K6W7-sBXQ7X1Nx8Tr7dzd28W_5RHQ0sA4AV5QnU,87110 +torch/futures/__init__.py,sha256=UN75-zDBROrczLgMPz7v4ljXDLH7z_5_YPBNCA01DKU,14419 +torch/futures/__pycache__/__init__.cpython-310.pyc,, +torch/fx/__init__.py,sha256=cIz_qpOKLOP1NRoLXUtAT3XG-Q-z1nF252x3_bP0Dzk,3818 +torch/fx/__init__.pyi,sha256=Byxep3rRGpbdjTM6JRhQr7vXPACh4jkDkIhpN1FF1jE,538 +torch/fx/__pycache__/__init__.cpython-310.pyc,, +torch/fx/__pycache__/_compatibility.cpython-310.pyc,, +torch/fx/__pycache__/_lazy_graph_module.cpython-310.pyc,, +torch/fx/__pycache__/_pytree.cpython-310.pyc,, +torch/fx/__pycache__/_symbolic_trace.cpython-310.pyc,, +torch/fx/__pycache__/_utils.cpython-310.pyc,, +torch/fx/__pycache__/annotate.cpython-310.pyc,, +torch/fx/__pycache__/config.cpython-310.pyc,, +torch/fx/__pycache__/graph.cpython-310.pyc,, +torch/fx/__pycache__/graph_module.cpython-310.pyc,, +torch/fx/__pycache__/immutable_collections.cpython-310.pyc,, +torch/fx/__pycache__/interpreter.cpython-310.pyc,, +torch/fx/__pycache__/node.cpython-310.pyc,, +torch/fx/__pycache__/operator_schemas.cpython-310.pyc,, +torch/fx/__pycache__/proxy.cpython-310.pyc,, +torch/fx/__pycache__/subgraph_rewriter.cpython-310.pyc,, +torch/fx/__pycache__/tensor_type.cpython-310.pyc,, +torch/fx/__pycache__/traceback.cpython-310.pyc,, +torch/fx/_compatibility.py,sha256=Sbkq3Pb2brnRNaMZlgd0jJgvqiZaP0-ImZO6RWAzRSc,1086 +torch/fx/_lazy_graph_module.py,sha256=HsZKAAQg-3n0DeNkIKZyvXRKl_jmPjfGr3RRlso4_lA,7174 +torch/fx/_pytree.py,sha256=3nwDFft6JBehHAarXb_XNCYLClERH7IUenCNm5hh-hg,3502 +torch/fx/_symbolic_trace.py,sha256=AUFa25tJR5PKutDlIoImgggpMnJ1Ma98YQyniMDWiFA,49509 +torch/fx/_utils.py,sha256=deBRCu96DvfubxtJ-vEzqxd9r_nmZwXZrBFCU5ge9aQ,1640 +torch/fx/annotate.py,sha256=LavwHxVrr5WDds9Trc-z67COCQ_hczyjf2CZfP8acgw,1314 +torch/fx/config.py,sha256=Vq4ADVOR87qBcbL9BMOX7DObs6TJQGEIiT3LivH4mkg,328 +torch/fx/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/experimental/__pycache__/__init__.cpython-310.pyc,, +torch/fx/experimental/__pycache__/_backward_state.cpython-310.pyc,, +torch/fx/experimental/__pycache__/_config.cpython-310.pyc,, +torch/fx/experimental/__pycache__/accelerator_partitioner.cpython-310.pyc,, +torch/fx/experimental/__pycache__/const_fold.cpython-310.pyc,, +torch/fx/experimental/__pycache__/debug.cpython-310.pyc,, +torch/fx/experimental/__pycache__/graph_gradual_typechecker.cpython-310.pyc,, +torch/fx/experimental/__pycache__/merge_matmul.cpython-310.pyc,, +torch/fx/experimental/__pycache__/meta_tracer.cpython-310.pyc,, +torch/fx/experimental/__pycache__/normalize.cpython-310.pyc,, +torch/fx/experimental/__pycache__/optimization.cpython-310.pyc,, +torch/fx/experimental/__pycache__/partitioner_utils.cpython-310.pyc,, +torch/fx/experimental/__pycache__/proxy_tensor.cpython-310.pyc,, +torch/fx/experimental/__pycache__/recording.cpython-310.pyc,, +torch/fx/experimental/__pycache__/refinement_types.cpython-310.pyc,, +torch/fx/experimental/__pycache__/rewriter.cpython-310.pyc,, +torch/fx/experimental/__pycache__/schema_type_annotation.cpython-310.pyc,, +torch/fx/experimental/__pycache__/sym_node.cpython-310.pyc,, +torch/fx/experimental/__pycache__/symbolic_shapes.cpython-310.pyc,, +torch/fx/experimental/__pycache__/unify_refinements.cpython-310.pyc,, +torch/fx/experimental/__pycache__/validator.cpython-310.pyc,, +torch/fx/experimental/_backward_state.py,sha256=TzC9Uin0ccyk7oG5z7HQPhRP6I0_6-HC28DOXpzUZzE,967 +torch/fx/experimental/_config.py,sha256=HGXZzo2y0bofvbjafpLNJ6wkzTHxhDZPqbGh0IaF7d4,3759 +torch/fx/experimental/accelerator_partitioner.py,sha256=qo3mSQyIllnjU1xiRhtEmYNj2i4CN17LymOiwiMyxxw,47965 +torch/fx/experimental/const_fold.py,sha256=2F9Xfr0yeXgJBvT61Avpkq-imA8xxB_3y2ylKLbMEVs,12145 +torch/fx/experimental/debug.py,sha256=D4leGqNwCBRKaxJwekeClZucw7e7SOgk7s69Setn-AY,832 +torch/fx/experimental/graph_gradual_typechecker.py,sha256=Hi7nCNxKkAM-HKkjdoxpTzXruavaSR0P0oybj9Vo-Ho,32370 +torch/fx/experimental/merge_matmul.py,sha256=Z1P0sGI_auCaqMXUfZgR9OR9woVAwXz9XGJv6__jmVg,6008 +torch/fx/experimental/meta_tracer.py,sha256=CjoSStOqagrt-D91VNAO-gTpiYsMCYOEVT3QXuWegsg,10159 +torch/fx/experimental/migrate_gradual_types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/experimental/migrate_gradual_types/__pycache__/__init__.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/constraint.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/constraint_generator.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/constraint_transformation.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/operation.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/transform_to_z3.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/util.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/__pycache__/z3_types.cpython-310.pyc,, +torch/fx/experimental/migrate_gradual_types/constraint.py,sha256=4DVttSrSd99_szvDVCQlH6fI0ZMFa90dY87E3cDuIaw,16416 +torch/fx/experimental/migrate_gradual_types/constraint_generator.py,sha256=ULtARmFv1WjbxBCLS7RLphVwfYjVcQEN0U40HgCSk88,47594 +torch/fx/experimental/migrate_gradual_types/constraint_transformation.py,sha256=5Vuh8jmrg1w5b6vPkV1MVQqhfTnGFmV_PYjAbC5ws5w,39376 +torch/fx/experimental/migrate_gradual_types/operation.py,sha256=-UL0xa0Ah1zRLpFjZ933xEBTGWYF36UVYYVyqJojGqU,287 +torch/fx/experimental/migrate_gradual_types/transform_to_z3.py,sha256=au4qT2IFd9BC7Gyn4KeArCto2T0T-vvkPB6A-CyQaBw,14731 +torch/fx/experimental/migrate_gradual_types/util.py,sha256=52Wvf-TgcrHDv_kohz8qwkLcJz2SAdBtb_qNortyR6Q,1348 +torch/fx/experimental/migrate_gradual_types/z3_types.py,sha256=w27YOUiMJ0idGox7f76kMPtOp7mKYqb_TQuOFlb5kX4,806 +torch/fx/experimental/normalize.py,sha256=FvgpiTI2wueIXOj_EyUPFMC0pn-ChaNz_kBoXM9OFDI,5505 +torch/fx/experimental/optimization.py,sha256=_Ci-ZJqaKdaYR8NVz0FG6Ij3rM2ObxKpH0kiBd1xEq4,16693 +torch/fx/experimental/partitioner_utils.py,sha256=DfqcJ_2WwyzuDotFtqmIOP2YJkgBQ7ZTEhqhUeVpWjM,12397 +torch/fx/experimental/proxy_tensor.py,sha256=GH46i9vSpfyQHhoP-7AwFd10qXDqmcp3HsC1cxDZUoo,81626 +torch/fx/experimental/recording.py,sha256=zt1Uy7MdrD6I4y7iq8EIr0-FqduXoQckN7YVC8f8WHQ,19281 +torch/fx/experimental/refinement_types.py,sha256=gWtNx5TwHQWEpTj6G6jRrJf31nfd3UpWDG7zaHKyTVA,432 +torch/fx/experimental/rewriter.py,sha256=gartBSZUXrQKxqrioV-fBzbRPcHN7CgVCm5K8WVH8Ig,5337 +torch/fx/experimental/schema_type_annotation.py,sha256=JjT2GdMbNEdkMVGGp255fuuw1V7vDMm409HKc73oUpE,5027 +torch/fx/experimental/sym_node.py,sha256=iKnJTh6GjRyRwveLdjmiAWeBE_jG2DhDnu_0bogAMns,50014 +torch/fx/experimental/symbolic_shapes.py,sha256=nHsC-T6Hz8_AemrjWCJ1R-6OyR_paKHrvc5I36s31ZU,250868 +torch/fx/experimental/unification/__init__.py,sha256=JSX_I7TpG_gVkuwRSoZquk9ND-shjCgrTEQytg9krq0,196 +torch/fx/experimental/unification/__pycache__/__init__.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/core.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/dispatch.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/match.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/more.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/unification_tools.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/utils.cpython-310.pyc,, +torch/fx/experimental/unification/__pycache__/variable.cpython-310.pyc,, +torch/fx/experimental/unification/core.py,sha256=9bNyxJn_2KZp7qBSIkCaII193npNkJJfPGFI7YLA748,2734 +torch/fx/experimental/unification/dispatch.py,sha256=Lkd0aJd7o8XM6GcnBmhK7O1HUlbQ_xKKdiZfj0Wo24U,191 +torch/fx/experimental/unification/match.py,sha256=N9n0wpLujF6dOFkQDOSdJk4jZsRq1BsIZ1cKMxlLD1k,3392 +torch/fx/experimental/unification/more.py,sha256=DMIUCCkGKr2au34vuhc9mymQTr_H5ze59ZRSvLSdmfY,2944 +torch/fx/experimental/unification/multipledispatch/__init__.py,sha256=zm7LfTwHQ3IoYiEBKKDFz8BH_m_8AfQ3zwjTRoH15ho,145 +torch/fx/experimental/unification/multipledispatch/__pycache__/__init__.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/__pycache__/conflict.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/__pycache__/core.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/__pycache__/dispatcher.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/__pycache__/utils.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/__pycache__/variadic.cpython-310.pyc,, +torch/fx/experimental/unification/multipledispatch/conflict.py,sha256=SxopAG9TPDEWRO7sf_MgsPfUsiG8K2LSt8QAQy3DAlM,4164 +torch/fx/experimental/unification/multipledispatch/core.py,sha256=uJeQtblgvXf0n-FBMLCgnX_J6jMMo4l2LMnNNiTB4YY,2697 +torch/fx/experimental/unification/multipledispatch/dispatcher.py,sha256=JVDp2vpFzSUuFh0gANxPlccwbcm73bJcKtAoX_TWT7A,13830 +torch/fx/experimental/unification/multipledispatch/utils.py,sha256=Kfyg4M5V04dU7DtebKWFhuVZCGLlIwBZDC6Xb8w4MOw,3793 +torch/fx/experimental/unification/multipledispatch/variadic.py,sha256=cl8IyYlTHi4Z3Cy1e1omtsoCUuCdDa-MjHgok5k6XRs,2969 +torch/fx/experimental/unification/unification_tools.py,sha256=1jMmdFRkYqIvrlIQ33BtAxncRDeyMAAbZLbqMNFK78I,10571 +torch/fx/experimental/unification/utils.py,sha256=w8NLzVhL1vhIlJ6P7qbFHDfbIlTTkLGHQOOzWf44u7E,2939 +torch/fx/experimental/unification/variable.py,sha256=zX5FkKHnFscXyBKD_UPEDDdHDTG7EGsgKCt5bEsbojU,2063 +torch/fx/experimental/unify_refinements.py,sha256=cAJNsYRzFe8n72E7Atfo-2fVUunBCKreUeMrqvt_rZQ,3148 +torch/fx/experimental/validator.py,sha256=JrEGrqtsQroH7BMNHJL15G61GYNuC0gREQ_24gfK9TM,31587 +torch/fx/graph.py,sha256=jOzUpKgvaAQzmsE5gpagzSgKH26cvCyQ9_kvWpMRLcU,73747 +torch/fx/graph_module.py,sha256=v14YWnxLnpbICPF-3MBfg7_qmedjI3Udx8hfB_F6c_s,37784 +torch/fx/immutable_collections.py,sha256=CwhL084SR_jreY-Rl1C5qYa2FjAD7oAVRye6W-yJ_Vo,2987 +torch/fx/interpreter.py,sha256=FcFgMX9VzY7oGrULkziLz6Ld_roryLHDzlf6oTPx8zM,22041 +torch/fx/node.py,sha256=TnxHnhjuZRVl8Y2BBsxndLyEpKsdOGKBy-GHot-x7iE,33934 +torch/fx/operator_schemas.py,sha256=y2lZOIQbzn7iO8brfEe446jNuPr8plJERGc-WhPjn7Y,20835 +torch/fx/passes/__init__.py,sha256=q1f9dqRWGQW86rvLjYtm89AqNw5ElllAyEA9u5ZOs3s,330 +torch/fx/passes/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/__pycache__/annotate_getitem_nodes.cpython-310.pyc,, +torch/fx/passes/__pycache__/fake_tensor_prop.cpython-310.pyc,, +torch/fx/passes/__pycache__/graph_drawer.cpython-310.pyc,, +torch/fx/passes/__pycache__/graph_manipulation.cpython-310.pyc,, +torch/fx/passes/__pycache__/graph_transform_observer.cpython-310.pyc,, +torch/fx/passes/__pycache__/net_min_base.cpython-310.pyc,, +torch/fx/passes/__pycache__/operator_support.cpython-310.pyc,, +torch/fx/passes/__pycache__/param_fetch.cpython-310.pyc,, +torch/fx/passes/__pycache__/pass_manager.cpython-310.pyc,, +torch/fx/passes/__pycache__/reinplace.cpython-310.pyc,, +torch/fx/passes/__pycache__/runtime_assert.cpython-310.pyc,, +torch/fx/passes/__pycache__/shape_prop.cpython-310.pyc,, +torch/fx/passes/__pycache__/split_module.cpython-310.pyc,, +torch/fx/passes/__pycache__/split_utils.cpython-310.pyc,, +torch/fx/passes/__pycache__/splitter_base.cpython-310.pyc,, +torch/fx/passes/__pycache__/tools_common.cpython-310.pyc,, +torch/fx/passes/annotate_getitem_nodes.py,sha256=E0brWyfqEc0ZcKl5-Y2Ab-1SBhIOrc_d_QJbuVf4KFs,1953 +torch/fx/passes/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/passes/backends/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/backends/__pycache__/cudagraphs.cpython-310.pyc,, +torch/fx/passes/backends/cudagraphs.py,sha256=795UvzZRSpOxkw8kqxeZnWW05pEcr-Wn3Vs7UbsNXLc,2067 +torch/fx/passes/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/passes/dialect/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/dialect/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/passes/dialect/common/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/dialect/common/__pycache__/cse_pass.cpython-310.pyc,, +torch/fx/passes/dialect/common/cse_pass.py,sha256=-skVnIcZtsYVVVLA7bfhtaFxyOX7yGtOjtjxWoC8Hzg,4938 +torch/fx/passes/fake_tensor_prop.py,sha256=r5qAJKmXCmQknZyUaESORiHcJn7SpFTl93SVCvaXz4E,2734 +torch/fx/passes/graph_drawer.py,sha256=d20CrfN1x-MK4MlfY--xzOZIJVdxSqn9uifM5TQZ5WI,17577 +torch/fx/passes/graph_manipulation.py,sha256=mwabj4CeQTS6-oAoUu0JqA6gayTU8p4s6Mo3kfKTtqw,4008 +torch/fx/passes/graph_transform_observer.py,sha256=PepSezrQ3ojNJgOTXDIT82P2sMtBDouwWbg5-M54gMw,2979 +torch/fx/passes/infra/__init__.py,sha256=edegnIAKakNdSXu64OLBr7POgNQUF2NDlOhK6y5Aftw,28 +torch/fx/passes/infra/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/infra/__pycache__/partitioner.cpython-310.pyc,, +torch/fx/passes/infra/__pycache__/pass_base.cpython-310.pyc,, +torch/fx/passes/infra/__pycache__/pass_manager.cpython-310.pyc,, +torch/fx/passes/infra/partitioner.py,sha256=a_5wWyrbBcKZP2o5rkPtuJVK0E7-485hEC96CL2sWbE,15890 +torch/fx/passes/infra/pass_base.py,sha256=d4KJ644krxet9guB5EoLWSWe89qJIONx75reUnJyQVQ,2478 +torch/fx/passes/infra/pass_manager.py,sha256=zf3jdnrryifpNBkLmGCnCh5flLEMRUQRwFVFpMlaGQw,10313 +torch/fx/passes/net_min_base.py,sha256=eNJ0EcZjTonsyewiRnZIDKWwXLX4qCn01U51ZIBn4Kk,34430 +torch/fx/passes/operator_support.py,sha256=HFj0BDJd3d7eV9SKluipWDxXlA07x63hHjXSitQLvag,7661 +torch/fx/passes/param_fetch.py,sha256=1W-DYjVRHQ99zk0djbwv66epYBJl7gBoTWbO1ejkrzg,3527 +torch/fx/passes/pass_manager.py,sha256=TrbkWjPtAZJmkmG2m--0c_Olr9JQVQVYeUgD4WSQcoA,7158 +torch/fx/passes/reinplace.py,sha256=CVPxm0QKUcJIWIo1nqOzPs55OdG4TOtzrOAavynAAzM,33395 +torch/fx/passes/runtime_assert.py,sha256=SotaJ-zhQHAX7o642aYManm6lRAHpGT_RaCFqMdLRYs,28061 +torch/fx/passes/shape_prop.py,sha256=tLGV8UH6seeZFkE9FkNIxMAgQ8FlsmfARD8xIrGt_6g,7337 +torch/fx/passes/split_module.py,sha256=GegGoJzzWa4c-tL1yRxZkdtDkDwuyheI6jK71zg59So,24454 +torch/fx/passes/split_utils.py,sha256=fBPh7ikayv6-TlQcv1GRfW25j9yejizSIjj3prxFp7k,11251 +torch/fx/passes/splitter_base.py,sha256=zMvcnjEcunB1hK4R9gZIDR_pKXFOO1dDWFaQhdPOjUQ,33363 +torch/fx/passes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/fx/passes/tests/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/tests/__pycache__/test_pass_manager.cpython-310.pyc,, +torch/fx/passes/tests/test_pass_manager.py,sha256=eUjb15IskaC7b-_Lm_DAA4zs_KL9WAcjPaPSTZosUpA,1899 +torch/fx/passes/tools_common.py,sha256=32KI-z7mcdsJDXqEXwMGDW9cqjzj4IbW5hRYIRdjiLU,11123 +torch/fx/passes/utils/__init__.py,sha256=e6gxht-nNUZK2J_E0AkiEpa6UdKlXJA5cwwiu5XbXVc,74 +torch/fx/passes/utils/__pycache__/__init__.cpython-310.pyc,, +torch/fx/passes/utils/__pycache__/common.cpython-310.pyc,, +torch/fx/passes/utils/__pycache__/fuser_utils.cpython-310.pyc,, +torch/fx/passes/utils/__pycache__/matcher_utils.cpython-310.pyc,, +torch/fx/passes/utils/__pycache__/matcher_with_name_node_map_utils.cpython-310.pyc,, +torch/fx/passes/utils/__pycache__/source_matcher_utils.cpython-310.pyc,, +torch/fx/passes/utils/common.py,sha256=kIPBtKCmGKsdtA2FCDReU-0XQkzyeczygJMjoDVofDQ,3139 +torch/fx/passes/utils/fuser_utils.py,sha256=JxDbgbrKeGFGL8BvSDokfMwcQ0O_fl1tnWEI1HqbNHc,8793 +torch/fx/passes/utils/matcher_utils.py,sha256=IiwTB3vHhJZoiP3dbARWuGfq8B6dzAuReRUt4POnwUs,17011 +torch/fx/passes/utils/matcher_with_name_node_map_utils.py,sha256=fZrpjZv7s8hqEe15DkDyrNQUxjsg2ieyeLS1GfQ_XwI,4225 +torch/fx/passes/utils/source_matcher_utils.py,sha256=ZGcgXtnotq8t6TCCryG46Z0K5NopT4xsHb7JEVkwY0Q,5609 +torch/fx/proxy.py,sha256=WVGCeiRtSz_efNNafxz_ILcUjxP8gsx0VxjI2YYutOo,23883 +torch/fx/subgraph_rewriter.py,sha256=Nv8KbbnCxz_TqEZqfFXnVfsTIO9rHw3FhfD9-kLLuFA,13824 +torch/fx/tensor_type.py,sha256=9vj89E2_eJ8V-ep6aMcC5MDCma8okjtLvvcMIB0Lsp8,2967 +torch/fx/traceback.py,sha256=tawUZRFPgswZz4OJ2mU7fJoDhM_0X1KKYbY3zea_gkk,3425 +torch/hub.py,sha256=Kye69VTKSJaRe8sBnWELut-hxphtCmMn1WsyFQ7hn_I,33357 +torch/include/ATen/ATen.h,sha256=mkjW3p2EH-mgdyzM_Nn8t3Mu9IneuDNjDyNHTlRYSrg,1107 +torch/include/ATen/AccumulateType.h,sha256=VypnaDmCLFj9qecSQgRTLbj6YEi4y7tTNeXN7lUTGFk,6184 +torch/include/ATen/ArrayRef.h,sha256=Oj53usfNJkz8L3fSOjmE20rpC0NUjhOVz4bmu68R28w,44 +torch/include/ATen/Backend.h,sha256=uKLWU9cBX4ldaPyhSACkAAe1BDHaJFKAS8yVHexzEuo,43 +torch/include/ATen/Backtrace.h,sha256=iQwNYKFPK-sNKn5Ngwy2vzkESKV84wJSLIQD1O7GGxg,46 +torch/include/ATen/BlasBackend.h,sha256=nsHKIgeHZxX-rj83dwSAXXY5Vd-Tu9piyXjN_orjKPk,608 +torch/include/ATen/CPUApplyUtils.h,sha256=mXDQF4MV8JMJ8ALxcWi_uoZdLn_6obxh6ONJ2gpT98g,10203 +torch/include/ATen/CPUFixedAllocator.h,sha256=ePZtym2ZPfh0EldwpzgFn23DohoHWM9Yx4Q5UJkUUnc,845 +torch/include/ATen/CPUFunctions.h,sha256=1sgkSeDNe-aEjWax1mFeJ5d5HkeiG-GEDPfYr_SaDDk,1954 +torch/include/ATen/CPUFunctions_inl.h,sha256=j41vToHcmdyAWSIyTHTvbeduuA9BdaA-3am4AsY94SM,27108 +torch/include/ATen/CPUGeneratorImpl.h,sha256=7odrNEIgXf-N7AQJmE8dsYgrNKvdiaTaW3kXQFIMiQo,1527 +torch/include/ATen/CUDAFunctions.h,sha256=1hx0BqsL13QwJEjYlV_YWSBw5Aweo2rjKuXAUAPALFY,1955 +torch/include/ATen/CUDAFunctions_inl.h,sha256=dDZGM5_gpIqdt3UkOxm-_vW8Km4PUjo6D3QBQa_vW7I,32354 +torch/include/ATen/CachedTensorUtils.h,sha256=A7gaS6-sexogUv0aYx8H9EYbRZxvaNXyNtgy4ovaZfM,1007 +torch/include/ATen/CollapseDims.h,sha256=YcFbH1j-lkxTcwHr5trswl3WJEZTp8VcW6sfsm11_oA,2560 +torch/include/ATen/CompositeExplicitAutogradFunctions.h,sha256=_61JCORNGlTwh3LSUebe-QyUeOUW703Bk0QeZEF9ZCs,1976 +torch/include/ATen/CompositeExplicitAutogradFunctions_inl.h,sha256=hGDgZ-68UUSIWPMGa3d42ct2s7DcQCecylqkC3HP-5E,40150 +torch/include/ATen/CompositeExplicitAutogradNonFunctionalFunctions.h,sha256=Yte08jyGIy0epK4J9_PsmIjjrhrTWwOR82C_SNuuqL8,1989 +torch/include/ATen/CompositeExplicitAutogradNonFunctionalFunctions_inl.h,sha256=jnt23hMGJcAgfx6SBp52_mLThvBI-AzDPfKFsWEIsPU,26244 +torch/include/ATen/CompositeImplicitAutogradFunctions.h,sha256=ashjfHtnYtMvgzkDnJ1QutpAIOTMbJ1RaZlW_3DGA1E,1976 +torch/include/ATen/CompositeImplicitAutogradFunctions_inl.h,sha256=JoHyg4mbS1fAWq50HUSOXtJBuXo0qo6QoBZsZFh9uUA,35024 +torch/include/ATen/CompositeImplicitAutogradNestedTensorFunctions.h,sha256=9M_qlGSuUPFomzb5lwBaQl10CYYGE046wscf8M6jQz0,1988 +torch/include/ATen/CompositeImplicitAutogradNestedTensorFunctions_inl.h,sha256=zPpmzVcthe0efVJmcYnz1EnWbn04TkoWlfsQ2-qlH-s,1126 +torch/include/ATen/Config.h,sha256=MXlRzKP69Hf0ljLkhCdDa5uUYcGhKUPR4fi6QC5Qgr0,737 +torch/include/ATen/Context.h,sha256=3gUgI9fwHwZZkh9yetyPFX3QQKIIJZilh8NGmoOZbbM,20915 +torch/include/ATen/DLConvertor.h,sha256=DIYUBdOoY8W9hlxJYiq1YManGWfiWcdZW0a-508Y_OI,834 +torch/include/ATen/Device.h,sha256=woU2HH6ul-aOLsILb-xFTxGbtA7mGnaMaJzzB-CrlBI,42 +torch/include/ATen/DeviceAccelerator.h,sha256=QT9vqAPntIyCRTrXVtRmGBQL4MfRwz-LamYk_Vf-M6g,911 +torch/include/ATen/DeviceGuard.h,sha256=KMbORha0ItibPkpq7GUK6iYrIiTdXIkvTUXgspoy7TI,1185 +torch/include/ATen/DimVector.h,sha256=hDtEu7TLm-rmIlopY3DudvGcP7BopSyWYAoce6SRrKM,46 +torch/include/ATen/Dimname.h,sha256=ODvWq78JatYnrvxjZb8taxy-nOBOC6VE0sZnSpxiwFs,31 +torch/include/ATen/Dispatch.h,sha256=faT6Cor2Yia95z1T6-C7qiJocZyQxGyE2WP4huBtwTQ,39757 +torch/include/ATen/Dispatch_v2.h,sha256=Kgf3eSoKmq9YRhFlgvrfJYcPt2OMGi5o3U4Mmr7tvGM,36313 +torch/include/ATen/DynamicLibrary.h,sha256=4MJsQ_OZ5MndH29Hi_FTPdxEEuCvMJM8LJdcHwqFMDE,572 +torch/include/ATen/EmptyTensor.h,sha256=iZqDKHcinOMLYmj1zOwdh9vFirz_WUWkP1csVsELkBg,4710 +torch/include/ATen/ExpandBase.h,sha256=nUXXodL4QrMGHb0xV9QpDjEJsG67gD6GHosIfNnrfxA,914 +torch/include/ATen/ExpandUtils.h,sha256=aJc-6ya0COOmngh-CLlD_Ky6FMSqc-ejklgqo73lGag,16306 +torch/include/ATen/Formatting.h,sha256=u9t8xVe-WAEXzWvHqFJ93K566ukel9wsbLhSEUeEqyc,34 +torch/include/ATen/FuncTorchTLS.h,sha256=1Rh_Sl0r0UOMpwzcl6_P1GEJXBgAtIGra6xE5t_B69o,1816 +torch/include/ATen/FunctionalStorageImpl.h,sha256=SrJ3IxrPQ69jrLrnqY-Ld-bdwEBhz2m6eLXtnQQhGtQ,7699 +torch/include/ATen/FunctionalTensorWrapper.h,sha256=mxra9DEOPxrjWYqNifEKV2eoDhSfiqR2XTinAtYt8VY,17769 +torch/include/ATen/Functions.h,sha256=HCd_si6r9za_0jI7fqKoU1v5h8FdF54PhJKmmlpCA9I,54583 +torch/include/ATen/Generator.h,sha256=KBeu-DIZBnbAWTvlDIPNIA7PMY3wFODHbAR0l-kToLc,46 +torch/include/ATen/InferSize.h,sha256=BygtgKi-U00QEVcGpjMJaaGhe82yZhi-YNuys1qaOJU,2633 +torch/include/ATen/InitialTensorOptions.h,sha256=Hwbswfmyx2diKA1DCTPukc246Fnm7kn_MOVXQmVrVDw,439 +torch/include/ATen/Layout.h,sha256=l9TpZf-gLtmQbbV7_dn0UFiOuV88PVS7V047YMCtM-A,42 +torch/include/ATen/LegacyBatchedFallback.h,sha256=QNBsZOnkz1buoh0I2HhKGyf3uxGh8NGcHq-AtADej7U,974 +torch/include/ATen/LegacyBatchedTensorImpl.h,sha256=LER0yf8y39WWD3WR27q_nJMXlAP6tf-026WbS3iRIHs,5559 +torch/include/ATen/LegacyVmapMode.h,sha256=w8ZTj-VP5xE5NMHEAGXqZE64gMm-i141FiOdWdNQU7Q,927 +torch/include/ATen/LegacyVmapTransforms.h,sha256=g0lBIYJ793Z5bMsvchyDxfExUt14w8ghWbjB-6t3nfU,7815 +torch/include/ATen/LinalgBackend.h,sha256=8FGeL3F7EUk5i_VVs5SHukfJhAQuXi7WtN5BNXXhDlw,719 +torch/include/ATen/MapAllocator.h,sha256=4CddWuFM6VHtKq3TQ4eQ9vXTxeGFU4iMu1oZk8oUy9c,3361 +torch/include/ATen/MatrixRef.h,sha256=56yuJvNZaJpzsDbk_FEL4I4Wz4meK-Wuv_PYf2QDpRc,2884 +torch/include/ATen/MemoryOverlap.h,sha256=VMIgtWjB4dUQORQL0AwDnVO7KBVmGYqQTaGhESKS5FM,1287 +torch/include/ATen/MetaFunctions.h,sha256=OaAEO2SIhM252jC0YqyLZReeVbr5SX_Kl68kDrpDY9s,1955 +torch/include/ATen/MetaFunctions_inl.h,sha256=pj5beI6eLLXZo6MeMZbQECljJ_5-4nafgy8NEasix68,15848 +torch/include/ATen/MethodOperators.h,sha256=zUlTJXKFi8LTThmkJ1ChMr33vmdTqoZoLjtMMeQUr7Q,15445 +torch/include/ATen/NamedTensor.h,sha256=DMs7TFFl5X2V_3sRIg9-qaiiC94x9s0n0SEdYf2SaHE,35 +torch/include/ATen/NamedTensorUtils.h,sha256=w-7v55EGa3EOn_jhvDyvCNz_nbTt4UnqmjRbx89nr70,6794 +torch/include/ATen/NativeFunctions.h,sha256=Du8OszRwG0_wDPvFhA-kYk-RcgoBlBwt1SLDqRaq6Ek,60233 +torch/include/ATen/NativeMetaFunctions.h,sha256=adrJqYPQlir3OSfdBpANtMVDtNe3nBxE8efA0_7qraA,56762 +torch/include/ATen/NestedTensorImpl.h,sha256=ny6fOf9Ah0DYzrMr5sTPyeGENXQePzGBeZdY21jhQzE,10222 +torch/include/ATen/NumericUtils.h,sha256=KsKuwyXANr3KcNudnqxhA6bE-uRNj2Mu3HXILz3ECvc,5137 +torch/include/ATen/OpMathType.h,sha256=mbrJU6cZdNFX-dJu_Je1FedoQCcHJsWRZjTIsZV8Lf0,1504 +torch/include/ATen/OpaqueTensorImpl.h,sha256=yelm7W9LY4haAf1jOqA_au0wkZkEgY3QMZHv0E6GbVU,6163 +torch/include/ATen/Operators.h,sha256=3O1PldWWVGUIMBrb-Nu129No9FkYtqsAt8JDBKPX71o,58354 +torch/include/ATen/PTThreadPool.h,sha256=X2FolpcFacvui54S5s9CQPBQ5gvbG8VoF5sH1jkGtkI,391 +torch/include/ATen/PadNd.h,sha256=3JsvcDMvpdMZYbxJchOajFJ9dSZkJPMRqoUteVexHGo,593 +torch/include/ATen/Parallel-inl.h,sha256=P18LiFR88qOlKWs3Eq6bp_K3si6VmNvy02kYsIT3tHM,2293 +torch/include/ATen/Parallel.h,sha256=1F6lCULV5R9bkPI24TxX-m6YDBtBNO66EfgS6TS4BRM,4743 +torch/include/ATen/ParallelFuture.h,sha256=-VXbafsV86rduj8z2iD-VgPvFTiguQksGZWbjEUIflE,292 +torch/include/ATen/ParallelNative.h,sha256=qFCBu3Ojiwqr5lR_vQ0FLSUdX9bCS5LduLvmuPJeVvA,292 +torch/include/ATen/ParallelOpenMP.h,sha256=VHE3xZuh-I2ijBlTBpsVUIAUfMpO3XQeajMJ7t0I-ao,1283 +torch/include/ATen/PythonTorchFunctionTLS.h,sha256=5mAho8FnYTQU8IyzgcwjBh5xH867BbAjXiI52-ZX0fw,1193 +torch/include/ATen/RedispatchFunctions.h,sha256=ZoUBSNC_-DzZheQi61TipwfWD7glIi8QzuGy7h-6PUg,2196047 +torch/include/ATen/RegistrationDeclarations.h,sha256=vW3LZ8uRWPmDOMNUulxwU-SwqmXc4FqqEatT_WnEPjQ,852480 +torch/include/ATen/SavedTensorHooks.h,sha256=jTwmpvX2ILDij2JcoskaiZvWbDLHLwpId8c44Y-e4OA,2406 +torch/include/ATen/Scalar.h,sha256=4uRhX5Y1wZouuAi4M8BriFoqJziRckfzIh1o7E3YS9k,44 +torch/include/ATen/ScalarOps.h,sha256=LAi47PbpJGUvoS3TObNAzA3mzkKbRCs8mA0XWuTPJBM,1595 +torch/include/ATen/ScalarType.h,sha256=YpUjgU8zTuDmlNt8pnUpNYjKVs-sJ-YUs4enjKJr72k,129 +torch/include/ATen/SequenceNumber.h,sha256=kkx0_BUyfdTbwboiGso0GwMNJG_KMxCjjibslHit6M0,333 +torch/include/ATen/SmallVector.h,sha256=b0zRaURtL0paOFhxGgB4flh3HczjPSX6gQ_GQzOOtHs,47 +torch/include/ATen/SparseCsrTensorImpl.h,sha256=mO4litPuIqLgmHKOQLdwZcrRB8ZdEyCfK52JPs35u2I,7109 +torch/include/ATen/SparseCsrTensorUtils.h,sha256=jYZzWdYsEY86SxokFwMIgOc7UvmlvRNdNGMGb2Z9NCA,16928 +torch/include/ATen/SparseTensorImpl.h,sha256=sF4xVfLp5t3h7KrF4MTWddnqcQx2Rh3kCNLF9BfLwlY,15308 +torch/include/ATen/Storage.h,sha256=EgzD3SO1khvg7pZDcTsqoI0FFSlsZobb6FadLbAhJ5c,43 +torch/include/ATen/StorageUtils.h,sha256=2YW2GztTWHfE493VWxPhtJDvHpExCtwFl69ZTwk_OzM,1308 +torch/include/ATen/Tensor.h,sha256=ercOzXbYX_U4x-QpJ9H5rNwZ8kR6vku_MYQlzRInW28,44 +torch/include/ATen/TensorAccessor.h,sha256=0oPU5QjZ5q_5qJ54cCEa18Zas4Z_IMTVAT09j39idII,51 +torch/include/ATen/TensorGeometry.h,sha256=EXJGUyagrTcaj22uEDZQlNmsfBP8C1zZd33cR3zsTXA,4273 +torch/include/ATen/TensorIndexing.h,sha256=RNXGzrsZpL1iA5HGT6LoMAmxW-sQoEbUVfp4eFQlgFY,23965 +torch/include/ATen/TensorIterator.h,sha256=dLSO1Bi8-qD0z8MR4NwO66bcl-UDrs6rp5lupxTOUHg,38542 +torch/include/ATen/TensorIteratorInternal.h,sha256=AJwnjP_kN7rvQREEyoH2iOcRTow0xL6Vp5_NCWXntQI,1931 +torch/include/ATen/TensorMeta.h,sha256=s1jIro_iR-HwLXP6sEIRNRz-sOKQeWzEQCN0lNtfW8E,5034 +torch/include/ATen/TensorNames.h,sha256=O492vZXH2YzIkKs08vpwRf0cvsguCXNZ8EZIvDsTUuk,2571 +torch/include/ATen/TensorOperators.h,sha256=jj-erbmyn_-Vt12rreAuRpXG1iQ-U6qwLSsy2c9Q9Mc,2491 +torch/include/ATen/TensorOptions.h,sha256=0YGOolxkjb6QzEmeZ6xuo0bamoTct5GvrHAtPoMH-fU,49 +torch/include/ATen/TensorSubclassLikeUtils.h,sha256=ANoBU9K1Tmnej3Or16DASLt4CjeN0EO-wQnVWDTI07E,3229 +torch/include/ATen/TensorUtils.h,sha256=ytjTPAxmSIj4FdOpMG47kMtcQ8WwakFLtAxadkJtzOo,5958 +torch/include/ATen/ThreadLocalPythonObjects.h,sha256=EqG46_18Hph7JIFewnv3wiJlY4j1uowbGNa3JR23xUM,607 +torch/include/ATen/ThreadLocalState.h,sha256=GBaopyEYpU9Y9kel_f8O4lK7waRu8ft7KqvNnN5DkEk,4033 +torch/include/ATen/TracerMode.h,sha256=kVg2ISLuWXLG6VkfKTDhQu6INAkEznaYN3C5vIlV0sw,5508 +torch/include/ATen/TypeDefault.h,sha256=1KUqlbYBvhQj6kuRqvVwK3GpVoCgYWNIYGvEv35vUPQ,666 +torch/include/ATen/Utils.h,sha256=JIgGFP50zB6RN43bDYCtzwjsgg2pbcad0E-c0_DGRBw,3486 +torch/include/ATen/Version.h,sha256=sTW04hFh7RGM5RkbXoQKLDcwTP4Ks7GNDyTLr3tQJdY,384 +torch/include/ATen/VmapGeneratedPlumbing.h,sha256=LKLIpU26-owML4B8gGxcodQB23lYaohvM-TSSF5IPjg,1721910 +torch/include/ATen/WrapDimUtils.h,sha256=QkNV0g_RpjyzIYPz_v6NwbuP9bFzhgWjMNiDbFmGXOU,4845 +torch/include/ATen/WrapDimUtilsMulti.h,sha256=1i_cHZn16UOSbcVH9A1P3rzHPlGt4T610zycW_Pbfe8,1075 +torch/include/ATen/autocast_mode.h,sha256=TkYyMfUmb0jryRYezAPlErP6MdOx9kUkZVqG-DVR2Tw,39706 +torch/include/ATen/ceil_div.h,sha256=TtkKnqzlAlpfznlNBIDaNUHxD8q1QRERVklj-VEm5Q4,497 +torch/include/ATen/code_template.h,sha256=ibHTnEjVLfA9ANIiS_4TenWL6wurRohu3TT3S4q5VoQ,6821 +torch/include/ATen/core/ATenGeneral.h,sha256=dcnASl2yEZMXGYbsBQkH1W-KLBoQTSSoEfujumNJHjM,45 +torch/include/ATen/core/ATenOpList.h,sha256=aTsrO4WWb4s2HIBiOyha28F9RXajxvzINAGv3eQ5AEs,246 +torch/include/ATen/core/ATen_fwd.h,sha256=dj37GXi3sI-7Hoz4M4JLEHO2wh-cIENzENiDcYIZqn8,1024 +torch/include/ATen/core/ATen_pch.h,sha256=RTFebKocSHUxO6GjXDqGendNDulyNfjHVR0i6_D96rs,5250 +torch/include/ATen/core/Array.h,sha256=FuocCsn_DODTuCNUEKiEJSvpowX0wS6gI0KAQOPJqs0,890 +torch/include/ATen/core/Backtrace.h,sha256=Vi1LiTrJX5BY0ADLxoJkwC7H27H30tTPM89xZ04A5GU,59 +torch/include/ATen/core/CachingHostAllocator.h,sha256=NEMzUbdo_qlZ72jk8ldH4VT-X41Cks74vihCQiU6Q8c,13434 +torch/include/ATen/core/CheckMemoryFormat.h,sha256=C6t19QPYF8PrL4HP8MROs0iLab6dfAM9BeDKtdZ2HYc,852 +torch/include/ATen/core/DeprecatedTypeProperties.h,sha256=CCHYGMiKiIY2LR7xVlHYrIztBDFrKy0T0kYXV1Kd0XQ,3879 +torch/include/ATen/core/DeprecatedTypePropertiesRegistry.h,sha256=BGXyKbHymMH77T9GtghhRz-FVnQThWT5WWLvuEqXfxE,844 +torch/include/ATen/core/Dict.h,sha256=8c-sA-w3r0A7Cj7sE-k3uqgZt3vr3j0JbWmoVpvaGjI,13234 +torch/include/ATen/core/Dict_inl.h,sha256=TYaiTIQJTvbfFym-bs8xc7W-7h_NsPdMAJ-OjqBx0NY,7518 +torch/include/ATen/core/DimVector.h,sha256=yOKoGLTzpQNSO-5Dc1iemVnT6Aa9mnjwtT6JpBhOfXs,279 +torch/include/ATen/core/Dimname.h,sha256=zzEGqv_P0b4bKxzX7y7BkCpT0xdi1nnyZVZp78hm0Fk,1167 +torch/include/ATen/core/DistributionsHelper.h,sha256=TkMmcndOLrOxAoqjedlQ6LXOKhtU0HuOSuKnEDo3Ywk,12567 +torch/include/ATen/core/Formatting.h,sha256=2-RIUD4XKg4X64G3OPnessK85uyo5NioFstRDHTFtxA,693 +torch/include/ATen/core/Generator.h,sha256=lZme09d3Xrc7WzCMhidsnLlFBEAGHHWoI2XCpslFxDU,6406 +torch/include/ATen/core/GeneratorForPrivateuseone.h,sha256=NJalyQExfXlcGAY2UNYSMFSVxrT8rUKk-jVISMBwYwM,1071 +torch/include/ATen/core/IListRef.h,sha256=ML06cxR46qoCiHdTEQh4p1ao9uarv8APP23CUF37SU4,20938 +torch/include/ATen/core/IListRef_inl.h,sha256=jSNVvhcnsSQi9UhTBwMzinG2sdTny3gq3p6SwMIKFdo,6109 +torch/include/ATen/core/LegacyTypeDispatch.h,sha256=OtpiQhk03P4M9Cxv7lihyc7sTNkEvbgyJSCyHzwh1dA,4857 +torch/include/ATen/core/List.h,sha256=z-ybIMFVfbB5yI5PDCrQJ7d-v64kolcgBcjE77GsrH0,15958 +torch/include/ATen/core/List_inl.h,sha256=3xRBJrSAu5b_b7YVQayfZlKhjqxoHkE0hN3GrRcbSE4,10777 +torch/include/ATen/core/MT19937RNGEngine.h,sha256=UE78lVAVtnNeB_XkGLD08Qaj-mW678NMmy7tf8qNKH8,6510 +torch/include/ATen/core/NamedTensor.h,sha256=CA5Pc3TGk1UfoQ7o3WpaiwAi8_lMItIdVaHTnvo_VR0,5036 +torch/include/ATen/core/NestedIntSymNodeImpl.h,sha256=kHhFL7dl5qVlYJpfSH_l1zrPl9ZuOYZCdxmgzCxpVz4,5961 +torch/include/ATen/core/PhiloxRNGEngine.h,sha256=nT1duXK3nsHoOuMt-HEOAGl_1vau4X5HYn9xNj8tY5g,7814 +torch/include/ATen/core/PythonFallbackKernel.h,sha256=qBpI58DEY8_NW4FhjePCgxHeEpVC6ubdtubhAN39j8Q,506 +torch/include/ATen/core/PythonOpRegistrationTrampoline.h,sha256=xedhMKWYJL6qkxzBlAICSiJGHLjX_QUlU9hwTcjHSJI,595 +torch/include/ATen/core/QuantizerBase.h,sha256=qwPle1oeJZQYaDzFfsKpMI60msE9wzMPI14PA7DZlCE,2677 +torch/include/ATen/core/Range.h,sha256=v_kVnAoeiHq0eRTodrFqDV8nudjBOX1mwhdeiDrJRt4,418 +torch/include/ATen/core/Reduction.h,sha256=mrCDtCU9J2CCzD7EWxvijtEmbGxwn1Szr_pC8zkQmbI,399 +torch/include/ATen/core/Scalar.h,sha256=6_8TdN11df5vabWQCR30zjSFTLQbwnsUMTWfzKaM_sM,29 +torch/include/ATen/core/ScalarType.h,sha256=o6WgbV6_nD4gO6tVJo7Hx_NQfeXC-iB3TFXbfRusbBI,33 +torch/include/ATen/core/Tensor.h,sha256=7bT_jBE26NiDvifZrkla6bpwosowJviXHofTeYpULCE,2222 +torch/include/ATen/core/TensorAccessor.h,sha256=ARJVCXj2PB2ELRDmed74TSiKeP8VmBIl0Q-GL-fAKkY,10525 +torch/include/ATen/core/TensorBase.h,sha256=EwBINmMLwhkZSik35_a34FTcPujNb1eSaZOXdIYkjtc,38071 +torch/include/ATen/core/TensorBody.h,sha256=S6QOMdB4Vaea1_MbSDq5EKJKu85Ezp2rDF-VwN1WKeM,290301 +torch/include/ATen/core/TorchDispatchUtils.h,sha256=1mDjM9W3xmII3IT2vznD_R7hnEQkp9lDKs_KOy9LwsU,484 +torch/include/ATen/core/TransformationHelper.h,sha256=d7p_LKAoxSWPjPDPsmNU9xoqkzaeC4LI98Ccococe0U,6854 +torch/include/ATen/core/UndefinedTensorImpl.h,sha256=gC73nEbVD9sYFdMxGPovAHH997995eRiDYXO3S1y5Uo,42 +torch/include/ATen/core/UnsafeFromTH.h,sha256=bQyv5mA6MEL7f6hp0LIQYaHBaQVilgt_frmUzNCD28c,708 +torch/include/ATen/core/VariableHooksInterface.h,sha256=-p0SCmkGH27Sb1MHzDd2sUUGJidjzrE7UynyPhKt9jI,3538 +torch/include/ATen/core/Variadic.h,sha256=ybiXfI9v-uojD-RMQVKXDLxm3w3c7g1tqy_zJrOHxCY,2380 +torch/include/ATen/core/Vitals.h,sha256=iw-VWol1buZdO9MAO1YhDHxuw0BEA-uCAHRVCNWJ9XA,2309 +torch/include/ATen/core/alias_info.h,sha256=nk_ndcIKgjcGxJ8sko4IzWfY0JGzIk8XYaZwAdfjHxM,4160 +torch/include/ATen/core/aten_interned_strings.h,sha256=Rajt83lVT6Zqu4nDoH8tKo-zilX-uITU9EW9yYpbyJI,55678 +torch/include/ATen/core/blob.h,sha256=y91p9AVk6o9Kx-pDHrB9l3Y19LG0JKeeImZ7DDreL8I,5251 +torch/include/ATen/core/boxing/BoxedKernel.h,sha256=SY8En44wDTk8vtWO1CzPUdokN5x3YWehacJoJLx7N4c,7924 +torch/include/ATen/core/boxing/BoxedKernel_impl.h,sha256=pHz9otw_XAcNlLX2jXtkIUjqT1Wg8q_W9a4oRvfTkRE,3261 +torch/include/ATen/core/boxing/KernelFunction.h,sha256=zlMsCxeqK-s61TSYYKWP9_zzqCMPzQwM4q9rs6Q8o94,8535 +torch/include/ATen/core/boxing/KernelFunction_impl.h,sha256=dEOjk_ouhrbYoApdQZ2iCR-b3jVTWkaMK-GP56_2hN8,10690 +torch/include/ATen/core/boxing/OperatorKernel.h,sha256=fkeWwwDc-UjCqu10E0K69Jdf5h5Uay_BXsEv1Do8wHs,692 +torch/include/ATen/core/boxing/impl/WrapFunctionIntoFunctor.h,sha256=AvWOZxgDAqErgFwkc1s2wVMod8j4lBv5H4aW6XIQZBk,1313 +torch/include/ATen/core/boxing/impl/WrapFunctionIntoRuntimeFunctor.h,sha256=nIpD8XFNKEIJdR6RgF9d3-N0uOSBgqKYIzBw6P0iBVs,1454 +torch/include/ATen/core/boxing/impl/boxing.h,sha256=CTxLRPNp2udGNIhfXbHiyBoBZSp0q57Aq5n-46EQxSU,13418 +torch/include/ATen/core/boxing/impl/make_boxed_from_unboxed_functor.h,sha256=rqmkQZfh3nkH3dUvQCxWXA0vBtbTnWxWTfXz_HVuCwM,31258 +torch/include/ATen/core/boxing/impl/test_helpers.h,sha256=4Mm5y1fbiG2WAz-hExEm-JmEUd38iktQenClw64q2Qc,4296 +torch/include/ATen/core/builtin_function.h,sha256=MPy6hWQrRiS7YH41KwT0BVgD_hMbab5bsr18gbbF0sQ,2044 +torch/include/ATen/core/class_type.h,sha256=uREzYUAzue96F4S_k8PvDDfLee-GifV9rK-gCFaxhEE,14052 +torch/include/ATen/core/custom_class.h,sha256=K5zORoYA965P8p4mtrEBnTXN7HzMA1x7KECVMrvTPG8,744 +torch/include/ATen/core/dispatch/CppSignature.h,sha256=_TpTmgWGkSTiqcKevLVZOJMdDZsekVjcXUdiYvpTDEc,2455 +torch/include/ATen/core/dispatch/DispatchKeyExtractor.h,sha256=hy2bcDk-7XkxoYRYS-bf45-fBH5TyocPIXUmE_ZYtUU,9670 +torch/include/ATen/core/dispatch/Dispatcher.h,sha256=g_rIm6wHpUVKqNnR4TwsEBVkP7ofZET4Q-EXjmLrDlk,32986 +torch/include/ATen/core/dispatch/ObservedOperators.h,sha256=GKoSUqGxIkIkNOO415E5tOFAUr33uxrk_tL1vWpo6G4,329 +torch/include/ATen/core/dispatch/OperatorEntry.h,sha256=SlCXLmiOwph3RTG_BBze6vOXBV7O_sDi_Wz-fraWBJo,12887 +torch/include/ATen/core/dispatch/OperatorOptions.h,sha256=Kzc_qB8w3bMoyojv5RuhPcPaSRd64V_etw_n4blnHIQ,923 +torch/include/ATen/core/dispatch/RegistrationHandleRAII.h,sha256=e-Mj1D2ucxcOFm_VvYWsNuRxasim7WyrfkXvlfSTIhY,858 +torch/include/ATen/core/dynamic_type.h,sha256=VNz1_WqIpeO9vBJ2kxa6BlleTrJQMDp7XCnkmpraCl4,10401 +torch/include/ATen/core/enum_tag.h,sha256=BCOg-8HA0W4yFtbfkmcn7HTpwwdMzkLfLqOWdSYm23w,480 +torch/include/ATen/core/enum_type.h,sha256=T_m_Ij8oJQAlhvAsXkgMymZ-9a7i_W0WPt-QeclLlXM,2765 +torch/include/ATen/core/function.h,sha256=_3XLK9tzWbQAvFxMwSPdmIMFSAIWYcWDF4JvOZNKHv8,3455 +torch/include/ATen/core/function_schema.h,sha256=iNObUwaipAOAJDgUs2QyoyIq49CTPP_HtuXOYXYfgW0,23961 +torch/include/ATen/core/function_schema_inl.h,sha256=nZunMUy5vA-LOnkmVIkQlpVq5yq8-xob9iusMW4jv8o,1988 +torch/include/ATen/core/functional.h,sha256=nQru70dgD4D1U6H27oL3mFRbFigON0XwekO7-6bgoVA,1464 +torch/include/ATen/core/grad_mode.h,sha256=uhkLSjH7WfIVojVuftgyWZ92HPS41dQiwclS9Sk4QOQ,210 +torch/include/ATen/core/interned_strings.h,sha256=ascLYehXhTHaNvqHXaG2AhZxdH-iCEojHXuCP71h-NM,13408 +torch/include/ATen/core/interned_strings_class.h,sha256=9oF2TEI8EKu5wcse3CzAyR3eD3vAYGOlrTyZ1FhV7-w,722 +torch/include/ATen/core/ivalue.h,sha256=CUSlE7DvbnB2QkSHlTARSksHLEZfIRxvk5pGuSdnUxY,51156 +torch/include/ATen/core/ivalue_inl.h,sha256=7sxO0r1FmaD4Vo0vwd69cxG1RBsnMhIAJL8G5VS16Tk,86732 +torch/include/ATen/core/ivalue_to.h,sha256=8EZ5vbME27AVJ8AiBbT7jcvxzWCvGlTNS_12MWCdXK4,756 +torch/include/ATen/core/jit_type.h,sha256=3tBFFm5OMfPuHPaK57Kv_Df9ufr1Ouj4xbSf3Olyp04,71881 +torch/include/ATen/core/jit_type_base.h,sha256=XqKErxazieFhKcFlMEj1ogDBurxPzqW7Nq32OXTwhn0,23095 +torch/include/ATen/core/op_registration/adaption.h,sha256=sgnTpxOIT6okZuJKcqqqZQpvB5hTMYWi4VAHFrG0XMQ,3241 +torch/include/ATen/core/op_registration/infer_schema.h,sha256=ZJhAv7rZUsG4_SG8MhnSUgPhwy_54m0A6Rh8DQEuHrA,6781 +torch/include/ATen/core/op_registration/op_allowlist.h,sha256=GbSN2aaSGw81FNF6lVBOtuyREKjBN6V1kDU_9hdspjU,6908 +torch/include/ATen/core/op_registration/op_registration.h,sha256=bLWv6AYM86euBKAzqQQwviTS3471OQPFB_lTlMRLxlM,28654 +torch/include/ATen/core/operator_name.h,sha256=0U3wnMH6NS921m2eDH1ha_UtFqaSm2E3-6xNecM2ed0,3045 +torch/include/ATen/core/qualified_name.h,sha256=0dN_w0uzEKRTrRVkIifW685vE2MZRte5XrQJeMnluZo,4373 +torch/include/ATen/core/rref_interface.h,sha256=jZQZJUsGdWaWYl527FztgPaG3bvh2pGu7_5oovdsZ6M,1143 +torch/include/ATen/core/stack.h,sha256=t0wImnAXwyLuqXdTLCMnRn7mtnVUkWsaXmOS6qNHf_A,6228 +torch/include/ATen/core/symbol.h,sha256=2biKjnmqHB4tRsXCmK_ioN6RcBq_DCUN9ntusaosqj4,5874 +torch/include/ATen/core/type_factory.h,sha256=SUgeuP5s3d4XP0GKMviJP5VZdQrssZ1I38CfLH6duSs,3252 +torch/include/ATen/core/type_ptr.h,sha256=bLQzZcfOt8jjfhoed9oBvy18T-afIhYAl2JDPce5EJM,1218 +torch/include/ATen/core/typeid.h,sha256=kuExuy1u5A34ATIuUUgf_aPWYvKKXQwfE0OLtHs8hhM,29 +torch/include/ATen/cpp_custom_type_hack.h,sha256=j_wxuMgOWKLv9uotd8SW3Fm3QMyvVU9XQQePdrao1fM,5430 +torch/include/ATen/cpu/FlushDenormal.h,sha256=QqfyFZmXGzLL1WuhGhc7SWsNq6-TARAJufstsTuMjQw,537 +torch/include/ATen/cpu/Utils.h,sha256=rQaNg4hRXSaKFpoxoRjsANaaCT6fHZNib1xXn7WfC7Q,701 +torch/include/ATen/cpu/vec/functional.h,sha256=PznpXDxQCMjC8FX6vXq8opByq5PY1JElx5blu6kIttA,102 +torch/include/ATen/cpu/vec/functional_base.h,sha256=ySKq6zaYVSipNHv5mkyvfdXGVoxbjROtX8uuHCTEROM,13047 +torch/include/ATen/cpu/vec/functional_bfloat16.h,sha256=V-9mMXiSH0er9dYY7_vqdomTkIveLx02EPQbBsMHfX4,24678 +torch/include/ATen/cpu/vec/intrinsics.h,sha256=9qCBxPS6Bk0QQ94mj8OVlZ5f-cHuq31J0waxvnsQfcg,1880 +torch/include/ATen/cpu/vec/vec.h,sha256=t3d-Hia4E37SrivrBsgrYoN1OJ4dn7ni7X3gYs8WTmc,1280 +torch/include/ATen/cpu/vec/vec256/missing_vld1_neon.h,sha256=iUZEGw4kcopg806wRIVP-R5Z8YWxRpUTqXauRcy8yKY,13559 +torch/include/ATen/cpu/vec/vec256/missing_vst1_neon.h,sha256=1bgQ5STR5_8v-jf2MPYWv9b-q8Px_aUKcI_BKN18BTs,282 +torch/include/ATen/cpu/vec/vec256/vec256.h,sha256=L8axE2IugxuDoWDTgalnYTEw2HkKwg7_aBpMMIdYaWo,11916 +torch/include/ATen/cpu/vec/vec256/vec256_bfloat16.h,sha256=-lCw52xqPHK2hgTeUyqH6JrOntCg1WaDRKPd3lFZYts,42208 +torch/include/ATen/cpu/vec/vec256/vec256_complex_double.h,sha256=JEWUrPxLryRNQuwkqbQeMe0y7Pw03PkbmZhArRpBk5U,18141 +torch/include/ATen/cpu/vec/vec256/vec256_complex_float.h,sha256=ha4rm-pgFEzviGrV-cptYrfCAoKVUMMyR83tmeu4V7c,19946 +torch/include/ATen/cpu/vec/vec256/vec256_convert.h,sha256=jW63tjLYqRIJtxZCyZXhL6BdAsxaGUrToBp_PGYAUtA,8966 +torch/include/ATen/cpu/vec/vec256/vec256_double.h,sha256=_p3j9ATpuaCrqMxHCdNxApVdgLqdWiNoLLuGqxfeTtM,14693 +torch/include/ATen/cpu/vec/vec256/vec256_float.h,sha256=GkFqQtS9NcmuZKA5zO47ISQJMyqPl84skHZehQpKl8w,22792 +torch/include/ATen/cpu/vec/vec256/vec256_float_neon.h,sha256=RUGviZ6zJ9drFI7vKNIVjR6v-wxWAz8Ktcyhif6DqT0,30065 +torch/include/ATen/cpu/vec/vec256/vec256_half_neon.h,sha256=o0zPOwF_YrxzWwSV2CLnUQLVqKDkhdp1ph56ND0CH_8,28996 +torch/include/ATen/cpu/vec/vec256/vec256_int.h,sha256=iWYrk6DByQxZYpz2rDxM1aXMVlyoJE7CV5EOcc3fHJQ,60607 +torch/include/ATen/cpu/vec/vec256/vec256_mask.h,sha256=GsAHh4tXtK37cZzRqWIz-pjTa__4T-JCPqQ0yf0xFdE,8772 +torch/include/ATen/cpu/vec/vec256/vec256_qint.h,sha256=W-DlzdUok7Qq1RrdlnU155Vz03OeftyUyL7o2KwNwCc,48294 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_bfloat16_vsx.h,sha256=BBlP5r-I2q2DRRJtU2TR810coROiLtZqRm8Es5F0-1g,2113 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_common_vsx.h,sha256=JXWFwf8ksihkmePgOVKaNqOIY-O3FOCzoZlLYGOSF7U,8052 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_complex_double_vsx.h,sha256=0PTtwIGWFV0XsaaiaYGfjkcixP4ArnTcR-L3xTJx84Y,19024 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_complex_float_vsx.h,sha256=rGcSSc3ikxZgE2lDvm67b93jjItRI-rJPDJLgEooPrM,21570 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_double_vsx.h,sha256=GfDH43UwGhHaKIf2u8lQVZxIKCe4LxTrBaF0B9E8kqA,15871 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_float_vsx.h,sha256=--f_W84PfnNJY1GXelrt1NaojTkTen4RHJuCsDZJY-0,16269 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_int16_vsx.h,sha256=gATHTuaCdhiXBL9jcYT8AjBw0tRRRX8EM0NroqA2vsc,14087 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_int32_vsx.h,sha256=l8fsXA1LXuf3lqYk-T76yomlJ1Rqz2rillVwkTKOaB0,11916 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_int64_vsx.h,sha256=4SHB7QXJKHp4iKUv8P9BxFmMoIQh0NT3nXcmZIs-kTo,10175 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_qint32_vsx.h,sha256=FUbSCUbg35o7cplLJNEKqw5yPT8h9PkDXUiJMvGFVaU,9779 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_qint8_vsx.h,sha256=CYG9BVSc_z2TI2RRrvkgoZSo96F7qVkywJhSdjdeRO0,16696 +torch/include/ATen/cpu/vec/vec256/vsx/vec256_quint8_vsx.h,sha256=54-fw8KFM_pQ91vroQkfCeQXkU0IGx8osU2HDbxtAy0,17379 +torch/include/ATen/cpu/vec/vec256/vsx/vsx_helpers.h,sha256=YTXDZsKMSfQN8eN37sxnTDzEtfVUlkNEXutDTvwb6RE,20059 +torch/include/ATen/cpu/vec/vec256/zarch/vec256_zarch.h,sha256=nprf0FujUxcnjmCxlW7Ym6B6Bg52zcyMABH25HUbvgA,106960 +torch/include/ATen/cpu/vec/vec512/vec512.h,sha256=hVFMmgWW24Qd2E6rCPY5tp973h3Dtv12Qtzitzcz29Y,10478 +torch/include/ATen/cpu/vec/vec512/vec512_bfloat16.h,sha256=AEU7daF-M78lCslRhW2cgv1cZ3LoIyuhPOvfLxaiLQ0,64409 +torch/include/ATen/cpu/vec/vec512/vec512_complex_double.h,sha256=V7zsFvm9cClxugJ_eZUlhHbLbOHEM5EPjozu2hTYylo,23570 +torch/include/ATen/cpu/vec/vec512/vec512_complex_float.h,sha256=LpI5V-E10mOz08B5LfopDO3JuPwIc6_08UDDcYUc_WE,43764 +torch/include/ATen/cpu/vec/vec512/vec512_convert.h,sha256=pNQHRYtRjY47o0LQWJS0SOa0HUIp_JvsoisCB4j5WJA,7291 +torch/include/ATen/cpu/vec/vec512/vec512_double.h,sha256=V4JncGjScPIgb-qyQWcXEFy6UwhNYi8jZ3d-hEtW-rM,16624 +torch/include/ATen/cpu/vec/vec512/vec512_float.h,sha256=GXBdlKANESZ-ikhUkP-EPUBCe3xJ7tconCktZT7OkFo,25674 +torch/include/ATen/cpu/vec/vec512/vec512_int.h,sha256=0NVe0x-6pdlxtM4RDA6Cy2AhsJTogrG_H6v9IadMXIo,54499 +torch/include/ATen/cpu/vec/vec512/vec512_mask.h,sha256=HTIVJtDC5iVXjhRh3GkeeVtAWwX8Yfziii4TSvW1ZXY,12361 +torch/include/ATen/cpu/vec/vec512/vec512_qint.h,sha256=NR1ssdnN_3JCX--DmYjZ8jwoSXipDthezC0P2eqVdmE,50371 +torch/include/ATen/cpu/vec/vec_base.h,sha256=IvxAXlizjp5E8PQS7iW0MYl6PggyDkbe_zmwtMCJExA,42287 +torch/include/ATen/cpu/vec/vec_convert.h,sha256=MJd13zr_iEp3oXW1GwZwiuGgOttvvCQMz02RgoC0hqY,1838 +torch/include/ATen/cpu/vec/vec_half.h,sha256=UuK6ECbFwsYk2WkxuY9lDq4O3LQEQdpTsldHzZne3Qg,1388 +torch/include/ATen/cpu/vec/vec_mask.h,sha256=N6acgRewaXSHyTGSxVVctLM2wtzbnwgI-BExbtFDUsU,9901 +torch/include/ATen/cpu/vec/vec_n.h,sha256=3jUFBCiXdHhQQ3dSsT82oap1FL5zlzh2SgT3wpWKVRQ,12000 +torch/include/ATen/cpu/vml.h,sha256=W_knWa06tFhxEERWRZvyNW1bAR3twlfTtaucWF5U8nc,6075 +torch/include/ATen/cuda/ATenCUDAGeneral.h,sha256=hzsZuf9MUxBbRSSpsefJgyxO9RnmYcnfdKYOKFEfIwE,190 +torch/include/ATen/cuda/ApplyGridUtils.cuh,sha256=OznX8USe4TdDRxZMO2O0VH7wlbxOY6wkGOdw1Ycdpfo,1309 +torch/include/ATen/cuda/AsmUtils.cuh,sha256=LQzRGYOe3jlVaNogJAUH-v9fldTuGQQshcNNeU4JzD4,3394 +torch/include/ATen/cuda/Atomic.cuh,sha256=RY3z9gTg_YRHnQfAR-tnElwJKWKgw_x7_6OARISZoUw,26672 +torch/include/ATen/cuda/CUDAApplyUtils.cuh,sha256=QRPaDCmeCNrozKO-m9OcJEoIYgJHvxAdB1dMOYIoyN8,20446 +torch/include/ATen/cuda/CUDABlas.h,sha256=tnxe70e_Q65Xov1lsnZQ-lQpgbYNkKI29Z1VbpNI6i0,12981 +torch/include/ATen/cuda/CUDAConfig.h,sha256=cwpliAylvjcwIGEyIpqB86v-He9quxJ3mWEKmJk-N84,886 +torch/include/ATen/cuda/CUDAContext.h,sha256=Ig-8EeKPP61vhcGchq20bULnvd-b4RkqJE-PGXs0qcY,238 +torch/include/ATen/cuda/CUDAContextLight.h,sha256=a9XuWBDl0BX2wUcYxV1yXVz8WVTiXZ3lZqsaRR_rwAE,2737 +torch/include/ATen/cuda/CUDADataType.h,sha256=TrpXjoBwEwrNSL0hRXzvA7WeGnOOTOPYFdYCR9K_aJU,2878 +torch/include/ATen/cuda/CUDADevice.h,sha256=1n4a7Uxt4i3pixkfEVCh0fiz8ExGeaeQ-kdUzFp4Dlo,532 +torch/include/ATen/cuda/CUDAEvent.h,sha256=GHUxgDzrjp1Opaf7mOOyU5hiSli84KvqDf808T59tXs,7096 +torch/include/ATen/cuda/CUDAGeneratorImpl.h,sha256=g6YxOlxxz-xTpBQTKJ3JKxuXuSM1dwuAvTxB4U5IndU,6098 +torch/include/ATen/cuda/CUDAGraph.h,sha256=DLgW6kmcWVTjnXi4W2_5BkTjtbAy_hkCjMmnIRRGnL8,3149 +torch/include/ATen/cuda/CUDAGraphsUtils.cuh,sha256=H7Aua86TJFzkOo53LTu4SZsRpAPD9Yf3qOGWeYEpdG0,1901 +torch/include/ATen/cuda/CUDASparse.h,sha256=Ix0CX2HkVzmNEs8iWRZIR14qrFJMkyjs_L4Kcl5Jpoc,2596 +torch/include/ATen/cuda/CUDASparseBlas.h,sha256=2fSHeA4LmKmiEfrU4l0mVKS_3WxkG2k4uTrp4Lr9cdc,12703 +torch/include/ATen/cuda/CUDASparseDescriptors.h,sha256=i1I1uMnbWe1teZL8oZvIhLW_v3rRVZJgl4uBsNhB-lk,9381 +torch/include/ATen/cuda/CUDATensorMethods.cuh,sha256=cyQq_CW-cqTVYG5n2yQKI552IDhMnlGZqFPurRzdUk4,270 +torch/include/ATen/cuda/CUDAUtils.h,sha256=agWW0ofbSjZFEJyLb5rueLR4p0-4S6Z-AN8trrvch40,416 +torch/include/ATen/cuda/CachingHostAllocator.h,sha256=-ROtzya7ddyCPTHfIr44zUyZVTcBWMcYZeInuGF8-Mc,1326 +torch/include/ATen/cuda/DeviceUtils.cuh,sha256=M6xUbeOJBa9xF8gPLhSG7AIMDbK4e9B1avhM2eXLXXU,3280 +torch/include/ATen/cuda/EmptyTensor.h,sha256=wRp8R6rCIp9BYFFHNrZWyux9sQf0boSKI1SJIP-fW_k,1206 +torch/include/ATen/cuda/Exceptions.h,sha256=8HavfCrcn0Yp_NBs51ijxnlruxzHjwmdkNZOal5RaJQ,11417 +torch/include/ATen/cuda/NumericLimits.cuh,sha256=Pucf4S3VEL-sT52Es3G9iXpX9WUXGjwR1-RvN1-6Hww,5214 +torch/include/ATen/cuda/PeerToPeerAccess.h,sha256=EMjud2nmOSYQsodrAYW_N6ph8zsRXYRPjrAn-s1NrZo,239 +torch/include/ATen/cuda/PhiloxCudaState.h,sha256=9ChfldlFF4J6Qcr7VNBtrQ_-jKVoQquIOuJKLYPXz0k,85 +torch/include/ATen/cuda/PhiloxUtils.cuh,sha256=pVnvthsuIHgFnebV4_gj01KXpuo3Fgddc3cWi1uswzE,95 +torch/include/ATen/cuda/PinnedMemoryAllocator.h,sha256=bfqZXydAQC59Kv3VBwTvVyUVoK2aazh9Qoh9Qvo9m94,245 +torch/include/ATen/cuda/ScanUtils.cuh,sha256=OolU3qBsTJupQmcl407sFTkepxZ7gdUO5xi1i1iBQxI,2027 +torch/include/ATen/cuda/Sleep.h,sha256=yDtfwQXKe_TyuXKwZhqb_vQI1l9Wc_J2VPvl2MrHL_c,319 +torch/include/ATen/cuda/ThrustAllocator.h,sha256=JgK1MhDwoM1x_OSXcRaZqE6WmZ3DonmAWc0Z8WKiEyg,505 +torch/include/ATen/cuda/cub.cuh,sha256=g7EPkEjpeEIMFrSbklXr3QT905K8fL5qrLXBIZSlfSI,14776 +torch/include/ATen/cuda/cub.h,sha256=whK5xvYlfQeK_qRNFGaECPI23eYfJZ4rdZl3IoyCfX0,3368 +torch/include/ATen/cuda/cub_definitions.cuh,sha256=oZ_AOGaE8MaiagHPLDyoI39QhX23HNMwnOyKAMRck1U,1452 +torch/include/ATen/cuda/detail/CUDAHooks.h,sha256=5x4WdWMM1S2VpluSuFkx-vwrR225VI5UrK23nbtPkqQ,2361 +torch/include/ATen/cuda/detail/DeviceThreadHandles.h,sha256=64OvgUt-NFTsK-8aHcpWMz6WtDymDfgXd2vPJckrNJw,7017 +torch/include/ATen/cuda/detail/IndexUtils.cuh,sha256=QsNiULyx5AnY9QiQwxLDxOHWAyKGCynWM22UOXa65e8,872 +torch/include/ATen/cuda/detail/IntegerDivider.cuh,sha256=qPJfE_dg2VjrM8lgNt7YVA7ltRg-X1qt92XJCSW-w_o,4019 +torch/include/ATen/cuda/detail/KernelUtils.h,sha256=6B57xlTkXWn5KX9uMJzDpU6w11OVyLNsufwj1pcRDl4,1523 +torch/include/ATen/cuda/detail/LazyNVRTC.h,sha256=mxEGFOoO7658DXR0jEe_TtUN0UShNHVM7xokcFufs3c,220 +torch/include/ATen/cuda/detail/OffsetCalculator.cuh,sha256=95SweK3n8bOBlLqLEaJNKQpuZbOrbW6F_PUUD1VzHm8,4431 +torch/include/ATen/cuda/detail/PhiloxCudaStateRaw.cuh,sha256=C5cTz2SWfNtFPyRjRaOtI85GHe6XuoOIXEDr5GStRjA,1358 +torch/include/ATen/cuda/detail/TensorInfo.cuh,sha256=Oq02D7A8aFXr0A1f5IG8vUa5arVyGiLJwI1B2sFhpIk,3239 +torch/include/ATen/cuda/detail/UnpackRaw.cuh,sha256=pMSjIYoTq_pLkG-Tvwlzd2nagrp3gSJlCpI_yXPYnhM,1462 +torch/include/ATen/cuda/jiterator.h,sha256=yfP0SBXGIcp0aZq4N5TqNF8XDSmIqgENZeGac4xd9ik,960 +torch/include/ATen/cuda/jiterator_impl.h,sha256=YFtIS1JFU5M1a3OiFYuWWjZPG2BzZPAAJwr-TWNnhFo,7112 +torch/include/ATen/cuda/llvm_jit_strings.h,sha256=xBnyc1FWrmVgnFrIS-tvwHdINw5reXg7j1W6wAHsa7c,428 +torch/include/ATen/cuda/tunable/GemmCommon.h,sha256=hnlPSCfIxdoc0Bc4aVYHX0Ir8_1sxKfdezb0ypCSbdM,11198 +torch/include/ATen/cuda/tunable/GemmHipblaslt.h,sha256=UG5u_gu5uWvJX78X3Eox88zGUlaWkHZxgNyn2EHHxus,18865 +torch/include/ATen/cuda/tunable/GemmRocblas.h,sha256=XIH_6tbY6dZoWk8Dk-f__wVt1ANPdgPaTsVrWIOmm9E,10198 +torch/include/ATen/cuda/tunable/StreamTimer.h,sha256=936M4d8zIkA8681mdW_HKSM-bLgFv645F4U9R3r6pig,785 +torch/include/ATen/cuda/tunable/Tunable.h,sha256=KqoC7tpY0qVY8kw2nZQyVvgUtQ81iY5x1T7i7qp7tno,7477 +torch/include/ATen/cuda/tunable/TunableGemm.h,sha256=cN5F7tVRTLgxcaWVOZnunKc4P4_qqcvwaqMFnhP6KN8,8986 +torch/include/ATen/cuda/tunable/TunableOp.h,sha256=denqPuswzRm6uALr9iIclGHimSaDgGG6gfCYvlCHGEo,10456 +torch/include/ATen/cudnn/Descriptors.h,sha256=I1lqi3RRSBtl3PhopmZzLJ7ZJevF7yJFEnoJcnRcHNE,14964 +torch/include/ATen/cudnn/Exceptions.h,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/include/ATen/cudnn/Handle.h,sha256=Q3u6V5WAOzyq9Hff0vEQf9sQG2dtn-xVJf19Z5YDA0Q,193 +torch/include/ATen/cudnn/Handles.h,sha256=y6xfj3ZCKYNVzohfiFqsqeNSeXCpHSVqtB7a5WLhF3g,44 +torch/include/ATen/cudnn/Types.h,sha256=SkDDXkxZvsugjktDgXDQLQhgd9ztjJmPV95PB0Yr09o,310 +torch/include/ATen/cudnn/Utils.h,sha256=nHth5X7BpAgUT_OjgpR0aj3yHUtWelEWqRW5GTMjuyQ,595 +torch/include/ATen/cudnn/cudnn-wrapper.h,sha256=6TsB0_lQJ63a8X3WCun71pSUBTBoz9lyYmx0ElhUEXc,540 +torch/include/ATen/detail/AcceleratorHooksInterface.h,sha256=7wSQjMEo_g4uGqzCYdvp_Yk3TMqQPP3192UpgwzmKHc,1700 +torch/include/ATen/detail/CUDAHooksInterface.h,sha256=LtH17KjPByiOYVTArP5fr5lGupLhCJhpCNq9aT4B21Y,7247 +torch/include/ATen/detail/FunctionTraits.h,sha256=hXxkxMEEYleExSPSUbTNXcan2bjV888M3A88DCo0PAY,3075 +torch/include/ATen/detail/HIPHooksInterface.h,sha256=k9p5qUo_ugsQkVIwPoNRO-Fh22Grih7PugKIIjAU6G0,2187 +torch/include/ATen/detail/IPUHooksInterface.h,sha256=kdWU202IKxacieC-_y8t9LuYq7wKER3RRUQt4a7fZAU,976 +torch/include/ATen/detail/MAIAHooksInterface.h,sha256=lCNtkT2AKuXuq_GKk3Pkl_4KabMVtmnpJiiqlZwO6Bk,909 +torch/include/ATen/detail/MPSHooksInterface.h,sha256=R3A4STIMBj4hV_PxDLacLtMlwWnbput3OiCkhc9IJmc,3441 +torch/include/ATen/detail/MTIAHooksInterface.h,sha256=0FzU6lolo_vG75R1S1CkZ3Ho4d-3fJa2NGDOGp6nRCk,3176 +torch/include/ATen/detail/PrivateUse1HooksInterface.h,sha256=pVvv2B4tj_yWFJZBY2LS7ZUZvNqBX4w455iTSZxsXIE,2101 +torch/include/ATen/detail/XPUHooksInterface.h,sha256=p9b41vAuQzsZrDq85o9YTVWmNrnZB45pmdpq1xorK-4,2477 +torch/include/ATen/div_rtn.h,sha256=sPoWObIG7LtKrAxw_5aTCP-bqVotCNujmu_P0kzgN-g,211 +torch/include/ATen/dlpack.h,sha256=t6KwpstfIp276qy7g2hrk6jgmyXOdrDTGV6KIG3zy70,6911 +torch/include/ATen/functorch/ADInterpreters.h,sha256=-29jJLOGOyQb3oEA9TC5VEJig-INBgJiYAoh1GpUWX0,1560 +torch/include/ATen/functorch/BatchRulesHelper.h,sha256=a5Mri494_Xx7aICEDp9bv4NjrmWHO_isD7sNUfwc_mE,18496 +torch/include/ATen/functorch/BatchedFallback.h,sha256=bVftski7cdzVm1XocpesCC01mKpuw10BK7f_MDUlV3U,3439 +torch/include/ATen/functorch/BatchedTensorImpl.h,sha256=vNojQh677EbClhZ7HrSk-uLtNxFalT4F3IoR2UAyN2w,6380 +torch/include/ATen/functorch/BatchingMetaprogramming.h,sha256=5fR6gjn0yIWbP_RvzWYNzr_wnWy475wcwk09fG6tDig,4990 +torch/include/ATen/functorch/DynamicLayer.h,sha256=x08NiFNV2idAtbSoF4ICsWbipyPFQ0iIHBn3DAK-O7g,5561 +torch/include/ATen/functorch/FunctionalizeInterpreter.h,sha256=1wgCr1aYCHCafvUISKOkRq4UKq32Xb2Bu5vAVhejnOs,907 +torch/include/ATen/functorch/Interpreter.h,sha256=J-igvAdVWpnew29GCDhWMeWJH2IxLQFP3lHZFkp5ITM,7887 +torch/include/ATen/functorch/LegacyVmapTransforms.h,sha256=jq0i0UfU-lzmvclKqLfsTlxi3lkeF84dQ5l1VepHUEk,8252 +torch/include/ATen/functorch/Macros.h,sha256=KVN3mhngmrlRkvlHdP-X28YRu4zrvfxVBW_j9PFoR-w,50 +torch/include/ATen/functorch/PlumbingHelper.h,sha256=bMSo839RGNDSqU2AEn1Y6GjnrFGhvg39PGfh3uwcTc4,2868 +torch/include/ATen/functorch/TensorWrapper.h,sha256=39MxwjK65pgDP_NMH2JhceWG3NuUjG9-FCYzTtqvu0c,4026 +torch/include/ATen/functorch/VmapInterpreter.h,sha256=n_Qh7KijuFE9YImBryejjToM735303IN1K9LbwchexA,957 +torch/include/ATen/hip/impl/HIPAllocatorMasqueradingAsCUDA.h,sha256=47pJM09o3ljXjP-_iBQsXkMe1FMcdDU7dT18E1L9Pw8,997 +torch/include/ATen/hip/impl/HIPCachingAllocatorMasqueradingAsCUDA.h,sha256=lwoGk5s7GA1FSagtjE0T1C0xEnhNbWWia9TOu3sbkg0,517 +torch/include/ATen/hip/impl/HIPGuardImplMasqueradingAsCUDA.h,sha256=XpcgAJ0KjEUP4uZ5NgDJp1VnGgqtllm-hIGtEUSaEFg,14870 +torch/include/ATen/hip/impl/HIPStreamMasqueradingAsCUDA.h,sha256=Xev7D_h7zcX4gIS8tdAUSguenUBHVaak5MFMbE6Bj_I,4514 +torch/include/ATen/jit_macros.h,sha256=P9m1lVGf1syAvF20g-tKHFIc8eGCp8gO2rV5Fem9gfc,230 +torch/include/ATen/jiterator_macros.h,sha256=X8fFoFw_LAws7_Hw5k1APlSvvu03ahBZhGYQ_Qpfvi8,1506 +torch/include/ATen/miopen/Descriptors.h,sha256=fzctcbp90QDQfAYScC4ExDxavJJq5YQdPAH25i-_BOE,4761 +torch/include/ATen/miopen/Exceptions.h,sha256=iphmGt4foYp-oHbK3ZLW3vblofLb83gNnHY506HS5o8,1076 +torch/include/ATen/miopen/Handle.h,sha256=X4mpxUvHXTiqmtmKlD-cPHYMHb5u-XF0U2tPJr5k_0c,141 +torch/include/ATen/miopen/Types.h,sha256=P1iunZ2QG0vFpb1QON-4Dn199S_I5rppfdoJmnjNAiQ,233 +torch/include/ATen/miopen/Utils.h,sha256=-NuO-vc2fpcO3dLawvuoRnXRibX8W0qzYxW-31BCQGo,401 +torch/include/ATen/miopen/miopen-wrapper.h,sha256=F3RQZuVLB6BxC5084QGXs1o3vrehQdZpnzQ4TE5fLLc,41 +torch/include/ATen/mps/EmptyTensor.h,sha256=TPxQrwpwwGBHACMgiVKrtE6Q_oxMOFR-CclSREAelWQ,760 +torch/include/ATen/mps/IndexKernels.h,sha256=34PoNuVT-6RMYHgOhHFfPrVpTfUjLxIf7d4Vy2w4_GI,25216 +torch/include/ATen/mps/MPSAllocator.h,sha256=7RUvZQ2j2y1HE4MLXyrUb1md4_1qsWRo_hMHCyV-lsc,18593 +torch/include/ATen/mps/MPSAllocatorInterface.h,sha256=SkA2-Y1NGRaQoH8dN5U7zamxi4MQyWtBk9LDpmThj1M,2700 +torch/include/ATen/mps/MPSDevice.h,sha256=UH1DA-oLBP1SEz0yf1sXdIhW7RLcvR61vx3ZZY-tBDc,2088 +torch/include/ATen/mps/MPSEvent.h,sha256=T2uUFjesoudny-Hnns8A7WJBqMQZwURJdldbyku-aws,3547 +torch/include/ATen/mps/MPSGeneratorImpl.h,sha256=bijrI64eFCwKFg_Wms4Kfdl173C2PLJo6k5SGDG78Xs,1540 +torch/include/ATen/mps/MPSGuardImpl.h,sha256=3i1m3A3JIVq0qWY99zw7P4GT7p44slIGrGP1KaE0J20,5122 +torch/include/ATen/mps/MPSHooks.h,sha256=piQOmj-OnMwoB8a6HAkrNUvfhCqq1sgwiraJs5ysR2Y,2052 +torch/include/ATen/mps/MPSProfiler.h,sha256=wy7wAY9SDr7DR__qpbmt0AM2TAk-GkVh63D3O9Ixqwg,17003 +torch/include/ATen/mps/MPSStream.h,sha256=4mrco9iXBmZEfrsD0qfYb5UsbMtyMXkGRN9pu72Vjsc,4273 +torch/include/ATen/native/Activation.h,sha256=jrY1U9O_XM8qRedGQmon5nXXZK1-7761XvtvqqN69qU,4270 +torch/include/ATen/native/AdaptivePooling.h,sha256=XOUmQ1rgKS26V64Tjz4GQsLgEzumWbXeIT6GBacszhg,2436 +torch/include/ATen/native/AmpKernels.h,sha256=5-R_s1Rh1YgWB87s_wCyU2XRH6vpaMHCr1SeH6RaSqk,619 +torch/include/ATen/native/BatchLinearAlgebra.h,sha256=KnVkg71cA5UyMi9QKITJtdKA-VZ8LoqWPX_30nQUvIs,9922 +torch/include/ATen/native/BinaryOps.h,sha256=SM3XVUlHxwsSoyjF_NdSGX9jDVSm-kIEpjBgv8ICdUQ,5993 +torch/include/ATen/native/BucketizationUtils.h,sha256=4tqNZKROjLx22dpvRl1Rrv4XQSV1lc5Gz0OTuR-Vjxc,7789 +torch/include/ATen/native/CPUBlas.h,sha256=ZdQYUUkuNVwIryxzvksq19CfHQrwyjcGqKkdznhROJk,6729 +torch/include/ATen/native/CPUFallback.h,sha256=jOWihTOutM5FdtYGRU9BiKJgH5ovt8qijTgMFwIFAcI,2413 +torch/include/ATen/native/CanUse32BitIndexMath.h,sha256=gHAOodx1KsektVgmllOKAX4n1QvGJfKyzx8qSGE8GDk,242 +torch/include/ATen/native/ComplexHelper.h,sha256=btmQjpW5GwDNmwc4vJA5d8HEeDf9sIItsucn5CkHUxY,4048 +torch/include/ATen/native/CompositeRandomAccessor.h,sha256=BMz1QA7muxCi1EoKhTSfzjI_Zsr-EM6HvVfIRrKamBI,876 +torch/include/ATen/native/CompositeRandomAccessorCommon.h,sha256=R46csHYWaGheNhcj3irxcvZv2RfTYMwVaeTT-hQp_08,6733 +torch/include/ATen/native/ConvUtils.h,sha256=P5SVxP_WF981FdzmBIDAWXYBi6qEprLXX1eCQD3dyGw,19584 +torch/include/ATen/native/ConvolutionMM3d.h,sha256=LPOz-FenTJ6sch4u2UYsmEfXHDC-qDvpLFNCV_wXDsg,340 +torch/include/ATen/native/Copy.h,sha256=GYvJ0m1DoSgVv4I8AIwIZJNsby9x1CZrwA9qiww-98w,373 +torch/include/ATen/native/Cross.h,sha256=YuhuqDz9bvGFqUJp2K3fh4LEd63plfrK1HkKCpnEHi8,260 +torch/include/ATen/native/DilatedConvolutionUtils.h,sha256=s0SPWkwNQEaPiBwa1OkN1bNtvhfdRkvAJ18Z-nqlN5E,6402 +torch/include/ATen/native/DispatchStub.h,sha256=YzHVNAe6pUXj9xaDCQNsnYGFpxjUPxET-l7Wz2lTuiQ,14191 +torch/include/ATen/native/Distance.h,sha256=u0wUeUc7yOr3eog8qwQcTkaOlXqt5mYy4eYsk2Ql6dU,724 +torch/include/ATen/native/DistributionTemplates.h,sha256=GB19RAk89WYIvcUm2kTLyh7U9Nj4SpA0BSD9cdmeui4,18524 +torch/include/ATen/native/Distributions.h,sha256=JXxBVsNd3O4MqSxmkGKidVhhyUddpK0V9pTgqicmukQ,21566 +torch/include/ATen/native/EmbeddingBag.h,sha256=O_g3_DHVVp93oCfUmdt72AX0zZXRK-6XlVaJIktcBRA,5215 +torch/include/ATen/native/Fill.h,sha256=gv7QIvTW4Q337qi4Lcm_qZX-V2P_Z3DoZieAtn-88CU,397 +torch/include/ATen/native/ForeachUtils.h,sha256=evmoWF3AT3L2Y2NKJ8BDDIl7rgqXeZaxmKAReJCu5ho,14691 +torch/include/ATen/native/FractionalMaxPooling.h,sha256=gsIZbVNlbraEQLJHz018Yfwo26QrrkY6gYoJ9bln9pQ,2161 +torch/include/ATen/native/FunctionOfAMatrixUtils.h,sha256=VkGp6Qn2bQuZCm1Psgk7xEkDUggfSXS5hE4G2_37WCI,389 +torch/include/ATen/native/FusedAdagrad.h,sha256=R4BK1yUoaGpKQ9QmKttXpR2v5RDt6rx5p_0lNSiJoGc,496 +torch/include/ATen/native/FusedAdam.h,sha256=qqWW67hS2Nf-e3qyARw0IoAQDvIA2BovghW4H5YF_14,684 +torch/include/ATen/native/FusedSGD.h,sha256=Oj6bKqLHeRT1JZCR1QX_Jb4moI1y3zyJyJGlujfWKK4,517 +torch/include/ATen/native/GridSampler.h,sha256=s9y-a59QhVLK5UNuJ6Xuy1qeUuinKR-p3Z-BSO0WYmU,10407 +torch/include/ATen/native/GridSamplerUtils.h,sha256=H3HEU7mlnSzk_H3ON_LEZOflWM23HKm9PU6ZkO2N9go,3499 +torch/include/ATen/native/Histogram.h,sha256=Z26uGA0I7ZHm1Ztb0b3lQWq-xmazRkTEx0subO2hovY,748 +torch/include/ATen/native/IndexKernel.h,sha256=EAAZkWVJuppJTFYpLGm8ciN7RtfYgRyz4gYv_hJJXFg,1714 +torch/include/ATen/native/IndexingUtils.h,sha256=LoXJ6ibkFt2jhAWyXHivFy1gy7Sj2Ire0iT47u62j-Q,5570 +torch/include/ATen/native/Lerp.h,sha256=IeOOeCmpL4gJVHEErIhEK6vvXHvcbUgpOi5tGWOJ7xs,1463 +torch/include/ATen/native/LinearAlgebra.h,sha256=jBgDCYhT3E6QPPMgs3rHrMGOmlqpM_7OpptftiXPPS8,300 +torch/include/ATen/native/LinearAlgebraUtils.h,sha256=xUnsW-wiYZjz9K1igCc-31VubGVUGuelUi91Ts47D8s,26331 +torch/include/ATen/native/LossMulti.h,sha256=b5N9Puce5fIQLBYQw41_01aepSuLfbe6ym0tLddh2ZM,2114 +torch/include/ATen/native/Math.h,sha256=P9BcdcU854GUPEB93Ti0gITtUOVD6Ullo6eikgkeH9Q,141344 +torch/include/ATen/native/MathBitFallThroughLists.h,sha256=Zqlt930nF4Gf0VDFlBv19LgxauGaOIP7u7Y16wWvKSk,4136 +torch/include/ATen/native/MathBitsFallback.h,sha256=SFytbA2JZtOFjHTIr5JcaHsZpTJlTClW911LVRtO68M,7308 +torch/include/ATen/native/MaxPooling.h,sha256=ruAurq_n29oZ-ogLKFrtPKVfjt6qvA1PSrNzb9O14a0,3269 +torch/include/ATen/native/NonEmptyUtils.h,sha256=GssuIa3_Nbo70prkoJCxp8O7_6XqsNkAossTxmjuXoc,599 +torch/include/ATen/native/NonSymbolicBC.h,sha256=HJycKTcID9FKdMVig0iY1UgTFBwn3jJLSAgRSjJ4zC8,2876 +torch/include/ATen/native/Normalization.h,sha256=ZyTT6pn60B47EoJqDT4YwxBomd1F9m2PhfxMKyPm4XQ,554 +torch/include/ATen/native/Padding.h,sha256=hddAXRR5lvpL-TvQ6VYCSoNCyCdDUChnS17CMz-XQVI,2019 +torch/include/ATen/native/PixelShuffle.h,sha256=eR377A8l82A9nyzqVdsKwkrWOq7DQVI_CPl2ZgS-JR0,1756 +torch/include/ATen/native/PointwiseOps.h,sha256=-ZCD27h22Imw-Fffq9l0Zyc4GhH5Oz5-0F_7mvsvLqc,786 +torch/include/ATen/native/Pool.h,sha256=dD3Ew-dNv6gNMiVoZNBrWUw_pR22gIlJg5Nq0d2nFYk,13090 +torch/include/ATen/native/Pow.h,sha256=ce5mmmvhq910S6kveL1O6XpU6iFTIYWZ-mWlYDOPudM,1715 +torch/include/ATen/native/RNN.h,sha256=JX0WRObE8Z04hOZLtTikqZUYE5LagQH3rWkZvcLp_j8,2521 +torch/include/ATen/native/RangeFactories.h,sha256=Hipw9FPv9BiYw8yt2OXIRAQKbaGkiuGwQ2U2oB0bGOo,356 +torch/include/ATen/native/ReduceAllOps.h,sha256=d-elOBlAUrCrveYEYin3JqBGgcs6rZRWFuYp88nyqXg,399 +torch/include/ATen/native/ReduceOps.h,sha256=UWI7TDAaX4eIou7GAM8ilq9mzG_YBTWir0PPYYPgcfw,1784 +torch/include/ATen/native/ReduceOpsUtils.h,sha256=QMcZx2hWhdZC8z9ME7XoT9YW9oHLucBSreiTvBLnclk,16395 +torch/include/ATen/native/ReductionType.h,sha256=4in61M1dx5diKR_S10jR1FLenWIv_V0YF8-4qNy6CBk,1139 +torch/include/ATen/native/Repeat.h,sha256=nV38kH1j2_627DlJdgm_hiPnK_4QhOAbUtumjm5U8pY,1477 +torch/include/ATen/native/Resize.h,sha256=t1-pG0U8Rr49pKxBl_3l1oV_3PxMxYH7AN_VScaKSmk,6580 +torch/include/ATen/native/ResizeCommon.h,sha256=vyIXmldyz9WCGP8cwwdZ7To0xA3AdlgsYsKb2DZ9jgs,2491 +torch/include/ATen/native/ScatterGatherChecks.h,sha256=toRByV-u6rlSImVOsq5mHqQ8rH3lBSdm4zHSL2tUpus,3676 +torch/include/ATen/native/SegmentReduce.h,sha256=KfPo08cz9SIWMZyqXdoM36kO9gAVw60978-nBJO3awE,1269 +torch/include/ATen/native/SharedReduceOps.h,sha256=J8jXEUdosM1vPTSPLZgQ6Ygo6ujsuXtC6Gb8minTLP0,15989 +torch/include/ATen/native/SobolEngineOpsUtils.h,sha256=cxzZBRujtqn8MqU8oHtxqVu9Z94tr9jLD3f8YQlIUNs,1835 +torch/include/ATen/native/Sorting.h,sha256=pOjncf_4DapbmsWeIlamKQJ7D0Z2xw-Ylea9Ke7zZag,618 +torch/include/ATen/native/SortingUtils.h,sha256=i3rlh9XmYm2DnlDmvbqvd3jym6slpXuYZszscT7GzhU,2672 +torch/include/ATen/native/SparseTensorUtils.h,sha256=srkJh_TAtVVnUPuj7vJt6wQKdSk3yv6E-gDlDy8gR4U,6500 +torch/include/ATen/native/SpectralOpsUtils.h,sha256=JyEHAbwPE6OyPE5-vKArHbqklOI71jzU7Fml74Cy7jQ,3274 +torch/include/ATen/native/StridedRandomAccessor.h,sha256=cPMgSAbNq5QR8zwKYPa9mKxRl_1xPQlAsUgi-T4xe6Q,6835 +torch/include/ATen/native/TensorAdvancedIndexing.h,sha256=bCUsglJIMBhiQsgVgZzCs_w4zHT8KCWTH1qEJ9NgInU,2782 +torch/include/ATen/native/TensorAdvancedIndexingUtils.h,sha256=S-WkqCZZf0zQPGghIyMHQTpWxJdvegpbPg3wXFyoPFo,3071 +torch/include/ATen/native/TensorCompare.h,sha256=iwfjJJ_vb9qah9iIKrvLlRd5kFfFTW2679XxSnMCEEA,1490 +torch/include/ATen/native/TensorConversions.h,sha256=u06KnAApQIGUpWclDW1deUXPoXrSJ5vkIz62Orfu4l0,819 +torch/include/ATen/native/TensorDimApply.h,sha256=HZglRNLPFRCUcEjywt9Nq8iIlt3fwjO_fI24RCZUKaY,1739 +torch/include/ATen/native/TensorFactories.h,sha256=hg8ohMoVFct2LoS97A_UKePXYQwh4uC_RwxYIjD5PuQ,5104 +torch/include/ATen/native/TensorIterator.h,sha256=Q7J_N0bs9xO5NSiLO74hLIE5NUIcmr2FmkSS_eLf1GI,46 +torch/include/ATen/native/TensorIteratorDynamicCasting.h,sha256=bHTH87on8BiLGVklbqKLiKjzJMJT-LhZi6rYs_hrdFk,1801 +torch/include/ATen/native/TensorProperties.h,sha256=S2KRCG52XtPNLE30U1z9NEr-lg7GJNCme19ynl5iGAQ,198 +torch/include/ATen/native/TensorShape.h,sha256=DvpIMYg0jWAWRG6lug38M8s9-uzNLK1c2MfUHcMxFLQ,4348 +torch/include/ATen/native/TensorTransformations.h,sha256=wbnqc9UnfnF-Chexr4e3Eayl94NDPDue3Ibt9P578sQ,896 +torch/include/ATen/native/TopKImpl.h,sha256=3d9fyYLn0l-bo4_h69ZkX7iFFIXyX9XpdYIq6QgoY1o,3459 +torch/include/ATen/native/TransposeType.h,sha256=Ge4VLRl4sgZVIlqyOXjY7WarTsulnfG26a1QTljGWlQ,578 +torch/include/ATen/native/TriangularOpsUtils.h,sha256=U_pmKG-BzKvr5lCzXu_V70vpAdfrklsNCxzIqS_c0ko,2002 +torch/include/ATen/native/TypeProperties.h,sha256=37P3hHIx2SpByQQfOse5RTyJlH31uJNmyz_rQHBXeNA,658 +torch/include/ATen/native/UnaryOps.h,sha256=QSFh5InDaOO2_jRePn5LZrQoJEicLrNhUhqOfn5IqUY,5612 +torch/include/ATen/native/Unfold2d.h,sha256=z9t0BIYg7j5Tq91LkBUfPOGzz7wBk9kfZEqHA5VZseM,981 +torch/include/ATen/native/Unfold3d.h,sha256=u-4T4k0XRQ09rOEOOHdKFGPnTMWb3zSvrx5sa9T6lQA,873 +torch/include/ATen/native/UnfoldBackward.h,sha256=xXtspg2l9zwpgbPXjt2supIFjHCwLtpLbkqwivNQ7_s,3087 +torch/include/ATen/native/UpSample.h,sha256=mChPYWee2az3we2Qro3U8wAhYkW_7pEvbp0KKJ-zXAw,19203 +torch/include/ATen/native/batch_norm.h,sha256=17Kv8b62jdY4TkU2Glk9jouQ0C42R97HcrKnWVar1Yg,1428 +torch/include/ATen/native/cpu/AtomicAddFloat.h,sha256=90ttAHoUfZSaD8x2wACxPWiyrBPiJAuMAzuAb-aQa3s,857 +torch/include/ATen/native/cpu/CatKernel.h,sha256=wDX66Ceym7cm0PbTDT1ucsNrdThVoK6Z8ldJKmzdU_8,307 +torch/include/ATen/native/cpu/ChannelShuffleKernel.h,sha256=7iOJ83spY53U4PAAEID81KBJrr8POhPwYWRLJk9SJAM,287 +torch/include/ATen/native/cpu/CopyKernel.h,sha256=6DtXHSeNNtjgiOSl5LxJ3uYLrRWHgjIXVvcnHPTWcho,312 +torch/include/ATen/native/cpu/DepthwiseConvKernel.h,sha256=9izS3h41SEB_vJQkJArIfuL6WpGshwdJh-emc5_rBRg,471 +torch/include/ATen/native/cpu/DistributionTemplates.h,sha256=VqfAY2uzPFVFO-C5voXxUl5jcikk7VsczBYH6y8VsJ4,16464 +torch/include/ATen/native/cpu/GridSamplerKernel.h,sha256=7LzNZ2XlSyd2bCf0QtdqCaUjvNo4ZOT28kwQyQVB05g,825 +torch/include/ATen/native/cpu/IndexKernelUtils.h,sha256=t5Rd38zBqCjMVf_DXuz4UzgKtCFlSyVbdUYP7dx0w08,2964 +torch/include/ATen/native/cpu/Intrinsics.h,sha256=Wb3_syiw1pcSafKzSTvrsYFxA9H5BnNhkfTd9Tnc9Js,1212 +torch/include/ATen/native/cpu/IsContiguous.h,sha256=25kf9zhZNhb7mEFvdJK9uNUEOuIs7aWmD36KY2l-8pI,2428 +torch/include/ATen/native/cpu/LogAddExp.h,sha256=Ickd2SGbL7bSxFJ-DklopBbsdr5n-Tt4GQQj-Id1fOI,2446 +torch/include/ATen/native/cpu/Loops.h,sha256=CNK_oIn_bEz-TnMqmUIFrd203ORS4gO0xEJTBCiMGuc,14865 +torch/include/ATen/native/cpu/MaxUnpoolKernel.h,sha256=PnZPzRGT_YoGU8rMBWFq1Ik8L0fBJG8HpcD2NYYOdug,308 +torch/include/ATen/native/cpu/PixelShuffleKernel.h,sha256=W7vXj1S0Xn4ZujOzRMWwmRSBxM_czXkhPnNU6H0gSME,322 +torch/include/ATen/native/cpu/Reduce.h,sha256=Szb6J3Uxuk5IjALtpXfuSzm4P1hnnbU9T7fc_EbqCEA,12143 +torch/include/ATen/native/cpu/ReduceUtils.h,sha256=9q5SSxDj00x3kSn5hbDj5BKmS6X5zx3Xe8hkxVvPs80,8810 +torch/include/ATen/native/cpu/SampledAddmmKernel.h,sha256=0RURrJHUEQOfotfF05Yt8IL0s262iPmGYPAezRI_MdI,323 +torch/include/ATen/native/cpu/SerialStackImpl.h,sha256=oaTFhqqi8En5y_0MVsFY-M00p56MsXbF6W7L5xWNmFM,5458 +torch/include/ATen/native/cpu/SoftmaxKernel.h,sha256=UBNcP3lmiaC7l3D28FIAHwZbPs9DuCMzRL0iN4O2qRc,943 +torch/include/ATen/native/cpu/SpmmReduceKernel.h,sha256=N5ZV1t3r70FsGPsGtmTSwhG4oqMhgEGfvph8YRBqhL0,1358 +torch/include/ATen/native/cpu/StackKernel.h,sha256=QndLKxyfTo4jBDmd-lgdSf9WSKCzTbfsgpat7mJzre4,309 +torch/include/ATen/native/cpu/UpSampleKernelAVXAntialias.h,sha256=-JUd1pJNiIKEPcGr4tMD_7ydlzq6mc1ASNB2K99K3-M,58172 +torch/include/ATen/native/cpu/WeightNormKernel.h,sha256=8J2ci81CY317W9WfyYKM_635gfWAMGRbJDl3Gh_rJ4w,552 +torch/include/ATen/native/cpu/avx_mathfun.h,sha256=myM3TvQ8ENSdsLchEId1advZBrVtLGdH75UcPDFS6Gg,17449 +torch/include/ATen/native/cpu/int_mm_kernel.h,sha256=2e3g286Qu4hvv-OvfjIpq02edCdvSyBVj1ZHBFHJgVo,583 +torch/include/ATen/native/cpu/mixed_data_type.h,sha256=7DTJuuLt5Dz6FMBnbjAuaqVA0kDb8pnU75J-emh6YKY,1408 +torch/include/ATen/native/cpu/moments_utils.h,sha256=H6TTlR7nXAFJaKa7bV0cFpehLqmOfP6S2h6tAQI7eEg,6577 +torch/include/ATen/native/cpu/utils.h,sha256=k31XuDIitVKxvNGXZkmQrhukpwfv7_NQe5oUJf90h6c,7137 +torch/include/ATen/native/cpu/zmath.h,sha256=aeBgFxiT1-w9Z4i4xUZhdURm4wjaG75KUMIWUsAPAFM,6622 +torch/include/ATen/native/cuda/Activation.h,sha256=P5cfZgC8E8x-E4IHrTGWJsp-bvSfd93iEV0zUbA3L5M,548 +torch/include/ATen/native/cuda/BinaryInternal.h,sha256=BpaOkeNO7HljHl1xUbYVikQxnX7jKRZhY7OqCadLTkw,1237 +torch/include/ATen/native/cuda/CUDAJitLoops.cuh,sha256=J8oFKg3vSWE25kIabNHiV6at35kBVG1WcWL9JK245kI,10804 +torch/include/ATen/native/cuda/CUDALoops.cuh,sha256=oWqQsH1be0HZI0I39c4DR6QS7D0k6WZC0RdWVfaWJEo,11341 +torch/include/ATen/native/cuda/CompositeRandomAccessor.h,sha256=XtnBCyL15XJYIjUN0B2DQLkeXDqFsn5BPd6U-jj1-TM,929 +torch/include/ATen/native/cuda/Copy.h,sha256=MkXCb-zta0imFs_rA-CzVWXa3rqh4cZ5q8jZriuIxgM,162 +torch/include/ATen/native/cuda/CuFFTPlanCache.h,sha256=w4di6bqSH31BZfCcn_JTTlUZFJwpYiaEnhi7w0KXRVQ,17961 +torch/include/ATen/native/cuda/CuFFTUtils.h,sha256=qP2OwOGhPnEbQIm09Rcx_J0Svhlvk-CjuEY2EOtskJA,1863 +torch/include/ATen/native/cuda/DeviceSqrt.cuh,sha256=AdZWqFvanSwRhnzlFK9vyhLDXoMpy9gwfW9dhRn0xN4,573 +torch/include/ATen/native/cuda/DistributionTemplates.h,sha256=NwQknNGW-1PHRaE3w0MVZG2vhNNTq5HfUuALadDdHzM,28144 +torch/include/ATen/native/cuda/Distributions.h,sha256=12qzUlaPXjPsS9aqFt8o2r2bfp4IPLnZb0sBFH9AedI,641 +torch/include/ATen/native/cuda/EmbeddingBackwardKernel.cuh,sha256=bk76DSvQvBTgsXU0RHb9D0cxKlIlfQwcpDV9x8-SuTs,543 +torch/include/ATen/native/cuda/ForeachFunctors.cuh,sha256=ffNOYb1OdrVOzlbYzM9cgOXC_qQxmS26m0-fmt1iYRA,22505 +torch/include/ATen/native/cuda/ForeachMinMaxFunctors.cuh,sha256=UxN958DDHsTZXfypAx-jsBPcj-M4DM8sXJYcC18oTns,426 +torch/include/ATen/native/cuda/GridSampler.cuh,sha256=jH4SZXiEoTfOz_CJTSFCrkv1B40rCNwxlOuxEiuFVW4,10987 +torch/include/ATen/native/cuda/GridSampler.h,sha256=P_B6yQiM4A279_cKWZz9DEFL397xWjNcz--8pzIT1T4,1157 +torch/include/ATen/native/cuda/IndexKernel.h,sha256=Nb58RQW5mPLguGZTgOGRtBDqXKDKCbLEcFMET6hM36k,350 +torch/include/ATen/native/cuda/JitLoops.cuh,sha256=YESWWJLTTBpyKLuVV_PZPXmZE9d5wbREcDNXHDDCIFA,6909 +torch/include/ATen/native/cuda/KernelUtils.cuh,sha256=DSfDCs_xdIxQTjqqHHHq2-RU-64aR8F_PDah2mVDuH8,4836 +torch/include/ATen/native/cuda/LaunchUtils.h,sha256=r0PwPRvBDaSUHEQm3CmFMKYM9LxatU3Zr3SYGoNf8bs,306 +torch/include/ATen/native/cuda/Loops.cuh,sha256=VsEHK_Mh4ovcokiikNIGKU8hxciZ5iqyIHJoWSemgAc,11605 +torch/include/ATen/native/cuda/Math.cuh,sha256=tMPqsrs6fu2f6OBAgOqCafr7tMCmJNa2QDEWobyeJso,122806 +torch/include/ATen/native/cuda/MemoryAccess.cuh,sha256=c4d-LKj4Fmg5fSNzD8DihGq8ypVZILFQj5AUCHzsI3I,13714 +torch/include/ATen/native/cuda/MiscUtils.h,sha256=Az-goaDi_bSPbHvbKtODEZcfKCdviw3XLalPqtLi24o,958 +torch/include/ATen/native/cuda/MultiTensorApply.cuh,sha256=dPtaHlZosTPQGCxTIKRq5OjzCqJPV1o6GCIQsi5aoFw,13880 +torch/include/ATen/native/cuda/Normalization.cuh,sha256=RGFysyf1S4jipmmi15G6w0aTE_cVo_tU84D3gnBJKk4,74250 +torch/include/ATen/native/cuda/PersistentSoftmax.cuh,sha256=EkNpM8roweQJavNCMTBwzoc-7609ahY3Z173gNH96h4,17932 +torch/include/ATen/native/cuda/Pow.cuh,sha256=nmuniiZfa1G0eEFSg8FLDSTgANz92vZJXMyOdlF_2go,2170 +torch/include/ATen/native/cuda/Randperm.cuh,sha256=trCMbiV-zX4jH0YcRGxifLW7kme8jO6bLxSQbI9QCpM,2119 +torch/include/ATen/native/cuda/Reduce.cuh,sha256=JB9tfXG59Ah1YB_M4ECmaXwkAN1cr8HhpRCwRax1uBc,48222 +torch/include/ATen/native/cuda/ReduceOps.h,sha256=qmtqmP3p0nTVFynzZ8EsBqvM1xlLOhLhzmJnmrjBuZQ,500 +torch/include/ATen/native/cuda/Resize.h,sha256=Ew-uV-oAOw033de8U7BjxduS08MD3k1DmXxnpkICQKs,1556 +torch/include/ATen/native/cuda/RowwiseScaledMM.h,sha256=2B0oCc2MnfjtCyqCpwFGRrETu3V0hl1m6WTN1sO4p7o,361 +torch/include/ATen/native/cuda/ScanKernels.h,sha256=YwFzB36CbHF6ELtQg5_QNq541L-YNT_BkabZwFcrnaI,779 +torch/include/ATen/native/cuda/ScanUtils.cuh,sha256=B_f6JMvrEUfXX5T7acaqzU2gXR0-VsU2_3_TB8rEnDI,20219 +torch/include/ATen/native/cuda/Sort.h,sha256=mpeobYqLP6-tToMrfwyr2y6ei_pIAHy0neX-CZ3ldMk,409 +torch/include/ATen/native/cuda/SortStable.h,sha256=yNg75vcvcJt78I8250zSXn_H7UW3uZBxPlKQSbB0HqU,464 +torch/include/ATen/native/cuda/SortUtils.cuh,sha256=feIzD-t6x2UKiJY3J-34TrSHm-x6drBq6YMXoORhIZo,12377 +torch/include/ATen/native/cuda/Sorting.h,sha256=tyOJg167vdu5y2_oahkDmQMkxE2wWIAA_Sd1-mxeMmQ,408 +torch/include/ATen/native/cuda/SortingCommon.cuh,sha256=KkwDYkYTT6MkQocoNo0F3uMZZeNKhAMpM7yic_2z6dg,5373 +torch/include/ATen/native/cuda/SortingRadixSelect.cuh,sha256=MMkusWH1l3qmMQeHSqcWXtXQ0-DKGBPCbYJtMyNdk_E,12339 +torch/include/ATen/native/cuda/TensorModeKernel.cuh,sha256=5lZXByGjt6-xQrS5Euu3ZxJh_hyAzPSLTlRIVwUu2Wo,14512 +torch/include/ATen/native/cuda/TensorModeKernel.h,sha256=Ts6qovjkJSHzmmJZMdC7i048cYRdGsXZXJMJSGIk1hk,431 +torch/include/ATen/native/cuda/TensorTopK.h,sha256=ES-i6xXC1ktxDsoILkqO7fdXFOhAO7TGCZz93pIAVNA,266 +torch/include/ATen/native/cuda/UniqueCub.cuh,sha256=9IrI9RFom8G3OWESkqn_eDOhd2nF97ZBICXxlU-0lU0,352 +torch/include/ATen/native/cuda/UpSample.cuh,sha256=yxYvvc7zUrPlL8SSvnmN0hXWADbRAdjarcZaeIuxxdo,11649 +torch/include/ATen/native/cuda/block_reduce.cuh,sha256=xfPUZa9rUMlKReSIRPUMfSMxu_mu2837Wn5ArFGnPmo,4278 +torch/include/ATen/native/cuda/fused_adam_amsgrad_impl.cuh,sha256=-tasCPQgLI1CRSOaLNMcbaQjQgChexfe5wnUqtTHJRA,1054 +torch/include/ATen/native/cuda/fused_adam_impl.cuh,sha256=JhAzrDq6z-eBkJ8cfGKjsRiCCwWwPU9NByVbSIgzlWg,966 +torch/include/ATen/native/cuda/fused_adam_utils.cuh,sha256=s14oS0XrJi_hp7PLP5pHWv4n5OYV1IqL9J3sHkEj-R4,7022 +torch/include/ATen/native/cuda/fused_adamw_amsgrad_impl.cuh,sha256=wgV-mxmZwpJZUYIrzSyQig-Jder43IvYfBi_7CyI8UA,1056 +torch/include/ATen/native/cuda/fused_adamw_impl.cuh,sha256=5nuLEUlcs9lYtZefTMhU54AaWUCbeaeGoZbWBXxD1V0,968 +torch/include/ATen/native/cuda/im2col.cuh,sha256=Gl9vUMVH5hsLZY4OIY5bfv_eQOiR5p0Z0hq3esns564,9885 +torch/include/ATen/native/cuda/jit_utils.h,sha256=1O1--QUX0FAtGgRe4YUCx-9mpZF9n0V89zVtUF6TZv0,6095 +torch/include/ATen/native/cuda/reduction_template.cuh,sha256=C3PG4NrpuYT-ksixtachGHHj_ZBu-XLE_KtINCypuQs,21668 +torch/include/ATen/native/cuda/thread_constants.h,sha256=zIqWk31kGD_-TgB0Zokq_PKYmtbNILZzhPMueOBckz0,611 +torch/include/ATen/native/cuda/vol2col.cuh,sha256=j7bynLoRTWztCz9CRKmMfcMa7fCgwbA2SFQayvu0nng,8118 +torch/include/ATen/native/group_norm.h,sha256=rkPRett8cs5FdUXbVwVoc1qvN7l0KSrKn6MZlOayXHI,907 +torch/include/ATen/native/im2col.h,sha256=0Ho5Km0ZJvPbhKmsOPcE1PuYHaUaaNGrDlsNWaYhivE,5223 +torch/include/ATen/native/im2col_shape_check.h,sha256=swdSPYX4XPJVeJS2Q6JLNezzYtosJHlnSr2QzoNQcU4,6899 +torch/include/ATen/native/layer_norm.h,sha256=ldeqXN1Wbqx4vcskCngZ-1dpiZ8IcsaRy72EjqCo38E,2965 +torch/include/ATen/native/mps/Copy.h,sha256=_YKq-Fg6ODQ-TAPEjYrSqeeY7D7zL20cJFZICv0rbTA,295 +torch/include/ATen/native/mps/MPSGraphSequoiaOps.h,sha256=Ow1llORYlQs1tD2IQALn-9qAzLoYe8AVAVk921eLb2g,1355 +torch/include/ATen/native/mps/MPSGraphSonomaOps.h,sha256=U_9Nk4r48N8J9fixvnRQhj1kdxSkICgtdAZDDSu-gi8,2504 +torch/include/ATen/native/mps/MPSGraphVenturaOps.h,sha256=zP3-OPUSy3MPUU_7Wf7BCtKUjc5U_9EeKvcEYPglKxw,12735 +torch/include/ATen/native/mps/OperationUtils.h,sha256=PeGNcGQSZm_NnHj3_qkOnYph6VsZoilPiqTruOnVcdY,17882 +torch/include/ATen/native/mps/TensorFactory.h,sha256=-sjtEk0Ehlgo7IYeWOONV0AesCQOvLgwz48Qj55lufc,759 +torch/include/ATen/native/mps/UnaryConstants.h,sha256=Ic1ZA1ayvhwakIHPmnG9Eyp3vLRdeAvkfGaRz0kBo4c,2799 +torch/include/ATen/native/nested/NestedTensorBinaryOps.h,sha256=IQsCcVTz8D7tzno-P3AQ6wLEiXS71FdE1Hfj17BEbI0,414 +torch/include/ATen/native/nested/NestedTensorMath.h,sha256=Khaojzeju07H0CbWutxxlqzKs2Q5kjKLptd4x3ATO9c,2723 +torch/include/ATen/native/nested/NestedTensorTransformerFunctions.h,sha256=eR6Ube4RRUC_Soha4LHCSGkRbwNsbLg_5mxGVIYva34,2806 +torch/include/ATen/native/nested/NestedTensorTransformerUtils.h,sha256=mlsvJTiQKYFNwHSyXoaxzQJojcpJzlOsuiPgEhgr3Ys,1378 +torch/include/ATen/native/nested/NestedTensorUtils.h,sha256=5HxC6SuY_VNyfy5HlwGpBk6Sj4edeTHRAjmT14SVi5c,15204 +torch/include/ATen/native/quantized/AffineQuantizer.h,sha256=c2_S8goAaIWs2-l1tWchdBZM5a_kc3zXb7F0LgQH67s,3696 +torch/include/ATen/native/quantized/AffineQuantizerBase.h,sha256=_QFFrK5F78aZY9XEpv8kCqd2DRNy7rbd7U1RDgGU0aE,1460 +torch/include/ATen/native/quantized/ConvUtils.h,sha256=TDqUwX4q3fcVmYD4XLcYEwzVA7QMgAYVJnb6_4QnV-s,2240 +torch/include/ATen/native/quantized/Copy.h,sha256=IPBQcU7gPA6UsoZ0LssBH6zNrIK-R08byy7KLnpQD3c,169 +torch/include/ATen/native/quantized/FakeQuantAffine.h,sha256=J3Zt2zhwoUWLY_FUjq1uxIEUpFtd_E-osh8GPLAtwQ0,1792 +torch/include/ATen/native/quantized/IndexKernel.h,sha256=xbgIIP4_ERQIdeMjFsiRVv2fEsJfaA9dUwbIgYjTOG4,567 +torch/include/ATen/native/quantized/PackedParams.h,sha256=tlgLUDIx-LXJEmA7CbejFLx1bWP3UZouZ9v9MZ_4ZCs,4739 +torch/include/ATen/native/quantized/cpu/BinaryOps.h,sha256=Sk9Ynu9H2sZSOPDMfe1skLDtsvtlEbofdbYP4Fd74Rw,173 +torch/include/ATen/native/quantized/cpu/EmbeddingPackedParams.h,sha256=1Eud4Yr6DEQ4ouMcW-PB3wF3ZLITAcnKut6kjC0oTdY,921 +torch/include/ATen/native/quantized/cpu/OnednnUtils.h,sha256=v-n0r3UixAgn_aWyKLCbfsKyx9nuYgY8L7x9DRL3jrc,13768 +torch/include/ATen/native/quantized/cpu/QnnpackUtils.h,sha256=O-g1wavt-nT3kA9trtEFGG-nobP4dzKkKYz7EJHjXD4,17723 +torch/include/ATen/native/quantized/cpu/QuantUtils.h,sha256=CilOigJWOnTBhAAORiKeC0jecm8Fv3OTY1uIZkeqn2w,8361 +torch/include/ATen/native/quantized/cpu/QuantizedOps.h,sha256=XhnrF5gdFiL5_y2Zy-RZ8k-RNnDMbRqzIVktQkbsze4,7861 +torch/include/ATen/native/quantized/cpu/RuyUtils.h,sha256=83PvlzQs-kUbWBT__Wk_sEYH-7eFPeviJIcGwjaUC1Y,385 +torch/include/ATen/native/quantized/cpu/XnnpackUtils.h,sha256=AVtaDPbS4PHSlcj_UgCNtvy8DKwlIagB5Gqa4dnu2hI,14155 +torch/include/ATen/native/quantized/cpu/conv_serialization.h,sha256=MeFMnFfWHp_YBIgEl6lLV_hGGzUmpAYbffMSsKmelAg,12650 +torch/include/ATen/native/quantized/cpu/fbgemm_utils.h,sha256=lJYK0yA09-5CV8VqQN1WSZeIGhIkPR74O9uNeJ_uE3A,12022 +torch/include/ATen/native/quantized/cpu/init_qnnpack.h,sha256=ZiH_VsIdp7AG6DNL49ia3_grOjjiQHM3iTKk7TKXuTw,146 +torch/include/ATen/native/quantized/cpu/qembeddingbag.h,sha256=Iaw2p7NiRxT6advkLnpjfMFw5fUn2Jgbqr_CqFt3Kpg,1020 +torch/include/ATen/native/quantized/cpu/qembeddingbag_prepack.h,sha256=zT7mp_4ONdKsddkhtN_qiln_fKrmtnLYbiSsqeHM6mQ,319 +torch/include/ATen/native/transformers/attention.h,sha256=TMod7UCaCUAcN4KX7F_nicOf_2ViezRUsml2izhBpG8,2303 +torch/include/ATen/native/transformers/sdp_utils_cpp.h,sha256=YUqBy2XJXxw6yO2w2ioF-1pY_zIGIW0g5LZPRRxO2c4,17677 +torch/include/ATen/native/utils/Factory.h,sha256=FIzaChh3Vjlwq0sx7qIgCF_FS88V6otLsHwQCbg8yvA,553 +torch/include/ATen/native/utils/ParamUtils.h,sha256=BQrIXLsOh_dyduINcB9w6KWCsVxl9BalttfHl7PGIUo,1188 +torch/include/ATen/native/utils/ParamsHash.h,sha256=4d7c40z9WplS5EyNfBGU-QTnki9ta4M9QxufxTInsO0,3124 +torch/include/ATen/native/verbose_wrapper.h,sha256=3Xfi-C_0uzhaiTnuCN0KgTg3OAAUlIbK_-q9AglaUuY,193 +torch/include/ATen/native/vol2col.h,sha256=yjxz_iCNWzZHX41_Ri_AaQlmqCD8sWqA3ZqA9PZ5l4U,3555 +torch/include/ATen/ops/_adaptive_avg_pool2d.h,sha256=jbqCaP4FQvYNphhl5vxQZD4aYwdsjrNsYyzco1cjWSw,4148 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward.h,sha256=xEqYQCE1iEUtQVZi6MjfbONhLrN-SgiQSjjnaVldJ58,1429 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward_compositeexplicitautograd_dispatch.h,sha256=JeLI_X8mSdsmv7f3ukT7TkBg9Ot4oJLStchOWDwFRb8,977 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward_cpu_dispatch.h,sha256=Vvz5ASAMR98dEswKinTmRtFnS6jb9mEirjmGlx_2528,775 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward_cuda_dispatch.h,sha256=FjbZJMJ-hJc-fYJy7mrq-HOHArmXUpDAr1i1_PhL20g,777 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward_native.h,sha256=vdrp-Dk1uF7ewNq2O8rgHvkNL_RxOK0IxwLCa0bpbco,780 +torch/include/ATen/ops/_adaptive_avg_pool2d_backward_ops.h,sha256=ad5quQ18gYRF58Fwur4_jddUamA3I_pbKRyasKyjq2U,1936 +torch/include/ATen/ops/_adaptive_avg_pool2d_compositeexplicitautograd_dispatch.h,sha256=eUhdfYXL5lFZYX_w3-c-4VKL1gHZ-yc-JHAZM9Xthzs,1218 +torch/include/ATen/ops/_adaptive_avg_pool2d_cpu_dispatch.h,sha256=9KAZBHTLzD3xgNFL1r8ZfVhMMjBZCv5ctC-IjkFTfeQ,871 +torch/include/ATen/ops/_adaptive_avg_pool2d_cuda_dispatch.h,sha256=_XzGCsZJJxQSDFt8obcbdzm0gyuk6VGQaecK_ot9YnI,873 +torch/include/ATen/ops/_adaptive_avg_pool2d_native.h,sha256=AYeoxxMKZtz2RZuGxv-cXSvQSsYigNbZ3Hjpkcx0d-Q,976 +torch/include/ATen/ops/_adaptive_avg_pool2d_ops.h,sha256=RUKN6V-QyRlAW5oRgIgyUfisXtPDmQJArB6hkJ9QEyQ,1894 +torch/include/ATen/ops/_adaptive_avg_pool3d.h,sha256=F7LpBmDJNaKS5w2YU2hNGPC8YCfYilLn3OrKLQKRR8o,4148 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward.h,sha256=1wd1Ick_f4kZCGkczGqO6Ls-oBqN9h_TDqHVMY2thto,1429 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward_compositeexplicitautograd_dispatch.h,sha256=IPSxlBRDTEOj8EpFcfVMmFsk8RrxyFVkrBVm__03nN0,977 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward_cpu_dispatch.h,sha256=Cd1XxUGabRmbgumz9a0W5PkkLpTxAf22SNE9OU_k5qQ,775 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward_cuda_dispatch.h,sha256=_oTYBZFPBPelRpzzJsqL-Z0TiztPCJ6YhdYyb1aT5BM,777 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward_native.h,sha256=ktQX15Oy9SXna-BouVfJWnZv3QoVXHM9VHHyTCnuAh0,780 +torch/include/ATen/ops/_adaptive_avg_pool3d_backward_ops.h,sha256=fyHMa7Yckm4MeRbICScK_wXXGkTOAFfID26A4JHRr1I,1936 +torch/include/ATen/ops/_adaptive_avg_pool3d_compositeexplicitautograd_dispatch.h,sha256=JAWXab9CmSmvmBr1txqxwK1Fvvm5kR2Vc86geT8EX-4,1218 +torch/include/ATen/ops/_adaptive_avg_pool3d_cpu_dispatch.h,sha256=8ib8k2gXCFUBcJHjwLLarNCGlU9PcT_1B3_QO2VRTP4,871 +torch/include/ATen/ops/_adaptive_avg_pool3d_cuda_dispatch.h,sha256=rMC9fg0uRRKjIdsom01TIQdxDBw5r9wJR6rBKbFxZc8,873 +torch/include/ATen/ops/_adaptive_avg_pool3d_native.h,sha256=96QF-7r61Noy3uNeJqvLAF4LGa7YBXAAOrtqNMeVmFg,865 +torch/include/ATen/ops/_adaptive_avg_pool3d_ops.h,sha256=7ZABsc4XOgH6fkjP6i847WqWWICwxlxE-Y3NA9-6xzE,1894 +torch/include/ATen/ops/_add_batch_dim.h,sha256=n2bbaXnSI57fSk20Nag5bQU6ABzfF21q72sx0I4lAYo,724 +torch/include/ATen/ops/_add_batch_dim_compositeimplicitautograd_dispatch.h,sha256=XMCD0HQdcEVa1BVZ1-AQu9xbFTK2PEvtCl6qIHalbYk,806 +torch/include/ATen/ops/_add_batch_dim_native.h,sha256=ZlHHn7EHoRswcbjiFARpy9qVX_1_WirDTYpx882OshY,518 +torch/include/ATen/ops/_add_batch_dim_ops.h,sha256=p_sdmLdmQaXYFTN0C3mlmdycAWbR_NDZGkt1d0YRAs4,1103 +torch/include/ATen/ops/_add_relu.h,sha256=DuPI4vtguA5H9lytzp-QSED2W6chAynVR1dVHRiF7YA,2758 +torch/include/ATen/ops/_add_relu_compositeexplicitautograd_dispatch.h,sha256=vgIkXljW9LyU1ufXH1fSQOV8fTw6VeXeNA9Tfs_r7ow,979 +torch/include/ATen/ops/_add_relu_cpu_dispatch.h,sha256=W7fQahaeCmK8IjeYW7e3JcQSgfxAIor1d5wiaOO_wJM,1373 +torch/include/ATen/ops/_add_relu_meta_dispatch.h,sha256=nX5KL5CMnNFXOV7b2Cl5UHXWVX6cqIX5DatN3RjYhDg,884 +torch/include/ATen/ops/_add_relu_native.h,sha256=AeuZIxiE9HDbygD_kDHHDkj7SAkqPX2pMWQe6E5AItM,1128 +torch/include/ATen/ops/_add_relu_ops.h,sha256=EuxzjbMFeWSRGNgTHjzaLVdWcW_1kyqGbTYcwrqiqNA,5015 +torch/include/ATen/ops/_addmm_activation.h,sha256=onTtA0yG12pqZVR5xd9GR09Nw3OThZw7Me0IWKNvpOA,1824 +torch/include/ATen/ops/_addmm_activation_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sIopst68KEwCsexgyCnhaKQcVewRkBhvuAambw3VR0w,927 +torch/include/ATen/ops/_addmm_activation_cpu_dispatch.h,sha256=QzxYnkcvNJS_ASTa8XGoe8NNNp8LX8WuaVqsQmbJ5OA,1278 +torch/include/ATen/ops/_addmm_activation_cuda_dispatch.h,sha256=0fer6SRslwjTIbqsNymKPeu-C6vbSZeq3gAoU04eOhs,1280 +torch/include/ATen/ops/_addmm_activation_meta.h,sha256=2MUb9hYv5dxSPCZlGz7CUN6Z-27s-YxBSS5l1cmTzuE,702 +torch/include/ATen/ops/_addmm_activation_meta_dispatch.h,sha256=D8lzWemIpHzxHTZptByJ2uFoKVhL3m_pRRPA8XJCKRY,1280 +torch/include/ATen/ops/_addmm_activation_native.h,sha256=LjhvDOYCdC6GLugyAW2WAO7B6ApxV4AlrmBb-IcLzZU,1031 +torch/include/ATen/ops/_addmm_activation_ops.h,sha256=TcM_q885IjeVNdRsmnBaAJ20MFxp1dNYCJh6w5DErgg,2451 +torch/include/ATen/ops/_aminmax.h,sha256=q8ubRbZQXqTkDPM26toAfsyrzkbs7_An1KEmcoy5jjY,2227 +torch/include/ATen/ops/_aminmax_compositeexplicitautograd_dispatch.h,sha256=MteznG0qyXebTbb2FKJ_USrhV4HcPaOQEbmk2gA_JP0,1280 +torch/include/ATen/ops/_aminmax_cpu_dispatch.h,sha256=1vCSuD3wnn9AjWbSFZAn8s6TiC3_TfzSd3KWlMZZi8s,861 +torch/include/ATen/ops/_aminmax_cuda_dispatch.h,sha256=-_PjNMZ7ZQw1P1Oiohpll7EcvCoDOmv_lgHrgApoYS0,863 +torch/include/ATen/ops/_aminmax_native.h,sha256=eP_oAQQHK31iNWsdiA-PfpyEeHBX688WgkrIuiqz7qI,906 +torch/include/ATen/ops/_aminmax_ops.h,sha256=9tUzmvEMe7gTc1eCbO633P0XLYS2v1BUcqMZ1DWDtLY,3493 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale.h,sha256=5qFVBdipg5mhObyNtwBrHNJhN81_9LcOHhHPK0CH6sE,2100 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale_compositeexplicitautograd_dispatch.h,sha256=fBpAv_Tw20SWKnINqW-Uyiql5FWE9WN6StvSpfoHq7w,1210 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale_cpu_dispatch.h,sha256=jbIIFTrmYxFN86O4j3ZkF_u4QWo4D67RIuQ1icewDhc,800 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale_cuda_dispatch.h,sha256=dQAHphuahsSeTuUWzDFme3yWmGF0je5Iotgc0yuNoH4,802 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale_native.h,sha256=btOClAcDirguOJQtHounvBDoRsQpdbNJ9WgcSv0BbkY,1041 +torch/include/ATen/ops/_amp_foreach_non_finite_check_and_unscale_ops.h,sha256=SieQ8H5mvFZWJzTYf-RuXaNNkiHJmVsZCwSeaXTkmIg,3084 +torch/include/ATen/ops/_amp_update_scale.h,sha256=gXBARk2m3vLbJyIbUeYPU4PpMcpIWIffue2FiXboTZM,2741 +torch/include/ATen/ops/_amp_update_scale_compositeexplicitautograd_dispatch.h,sha256=lPi7X-wyRB-Xx3Wq0Xy9PkMM-IvaBzZ0YhZ0ToS1Gg4,1408 +torch/include/ATen/ops/_amp_update_scale_cpu_dispatch.h,sha256=-FfPHTbLp7tcTkegrNEFc_BPFLu_NUNb2JxuYR0yr8c,869 +torch/include/ATen/ops/_amp_update_scale_cuda_dispatch.h,sha256=ImG8gSNPCZJcoD117AkuE92U5pHrCqwPRQ-tusYsIak,871 +torch/include/ATen/ops/_amp_update_scale_meta_dispatch.h,sha256=pN2BeNTQkOi3g5pIF9LviKd7yyso4fAP9gSjwDh7otU,871 +torch/include/ATen/ops/_amp_update_scale_native.h,sha256=nq11zsv2ithC5D8q68u8y9mVPI7tGt95xEk0uja82cA,1304 +torch/include/ATen/ops/_amp_update_scale_ops.h,sha256=aTeBjk7s__sUbqAW7wY1ZkrSn0nUnofDZLTMbnFbgqo,3725 +torch/include/ATen/ops/_assert_async.h,sha256=_l2qjRJINXLoUwmkvB2Ii2uszCr7F0LkeYPZAulQwl4,843 +torch/include/ATen/ops/_assert_async_cpu_dispatch.h,sha256=XuHdadMdTJdaSXspDUHs6UG4U0ZkgcZCfIPP3i9THE8,805 +torch/include/ATen/ops/_assert_async_cuda_dispatch.h,sha256=_ajTb4byimj7KyPeWTxtpt0B9AuY6AyTAPOQjXO7DX4,807 +torch/include/ATen/ops/_assert_async_native.h,sha256=cWHUU80UQ4yKfgQqqeZX6w5MgJLV1yk7vfWd-y8VDYs,726 +torch/include/ATen/ops/_assert_async_ops.h,sha256=t4x4GiDz9B2m_sFuCGOH-CCsQPR6GBL2si8_a-VzLFg,1601 +torch/include/ATen/ops/_assert_scalar.h,sha256=L7AYpUFJ5APDaHKq7buaPTO0UKZXEbEz6YCSWTFkyTA,693 +torch/include/ATen/ops/_assert_scalar_compositeexplicitautograd_dispatch.h,sha256=IZAR5M7TpD3ygs4ronC-qJ_I6WjhmIdMZfuULM7TjHg,795 +torch/include/ATen/ops/_assert_scalar_native.h,sha256=qKFxayi6VGQFXpMmzEFmMRb2KB5SIer1-ncYxuWGL4I,507 +torch/include/ATen/ops/_assert_scalar_ops.h,sha256=kmtmSmLOWiQfm-vy16L8y8JRociMDJoXTlRTylsbAsk,1061 +torch/include/ATen/ops/_assert_tensor_metadata.h,sha256=K-Sz4uW0LcsuylcBaE4e7sKizWtb2GNBuctFZ8UotP4,2443 +torch/include/ATen/ops/_assert_tensor_metadata_compositeimplicitautograd_dispatch.h,sha256=YDNk5C6La0l7bXQ-iF6pTRNEutYS-M-As6kds7XxeS8,1139 +torch/include/ATen/ops/_assert_tensor_metadata_native.h,sha256=J3hF1rx57GFYolurIWYZE4Uq7_nHASYB6y5ySwFAmwI,630 +torch/include/ATen/ops/_assert_tensor_metadata_ops.h,sha256=sX1-cq4i4J_Ywbp3qc-_KOg0qVHCyks7iSIf1JdTVeY,1358 +torch/include/ATen/ops/_autocast_to_full_precision.h,sha256=n8wJqbr6C_8CKec6GneeVrIctMRXFRETAToMYKjh_sg,501 +torch/include/ATen/ops/_autocast_to_full_precision_compositeimplicitautograd_dispatch.h,sha256=6Ut2E5QD-UjXFmLachijmjXp_G3jtm7CF0ULFwnzP48,822 +torch/include/ATen/ops/_autocast_to_full_precision_native.h,sha256=nvX-e8trtdq26Hwt5OVYGMePTZKsyTxxMgdJ2D60D5k,534 +torch/include/ATen/ops/_autocast_to_full_precision_ops.h,sha256=6lK6PUbvsPjNvQaMP6KP6PN363ltMYsRBR2MR59fgc8,1159 +torch/include/ATen/ops/_autocast_to_reduced_precision.h,sha256=38JiVe6_wQ5h517OaNwO6-UhC1XlUPlj5itJKANoyE0,504 +torch/include/ATen/ops/_autocast_to_reduced_precision_compositeimplicitautograd_dispatch.h,sha256=1aZkN7eFNg3n7_nQMXGuvb0cGVxGSh40KJl4Ge5Aeeo,878 +torch/include/ATen/ops/_autocast_to_reduced_precision_native.h,sha256=HhNQ4t_d2b44W8mbvyNBKelu1i4osF2HSVaEHiFxWEA,590 +torch/include/ATen/ops/_autocast_to_reduced_precision_ops.h,sha256=sRRM_36W4PU_nnrY8F4YOlswli-WaXn7PzUHQUnOCow,1351 +torch/include/ATen/ops/_backward.h,sha256=FG91xCosiqh90Z8EDCmtzMD4cGI3NFC3CIRk7eX0-mE,483 +torch/include/ATen/ops/_backward_compositeimplicitautograd_dispatch.h,sha256=mFUsAI-2jik1qd-06sFGdsdRSsMFFnAkMypN1V-l09s,909 +torch/include/ATen/ops/_backward_native.h,sha256=5-zGS7izWQANoO9l7-8TiLYA812LTlWVgTiShvSHNRI,621 +torch/include/ATen/ops/_backward_ops.h,sha256=QHqvuo8lvyN2yPIBSUX0o052ek6IrLoWX2FNRZMA-es,1374 +torch/include/ATen/ops/_batch_norm_impl_index.h,sha256=c46xAPCU-JOG1fPagqulppychh4ZA39bOanb2oWfA2Y,1232 +torch/include/ATen/ops/_batch_norm_impl_index_backward.h,sha256=pRza2znlQ5ufKLeq8oQzOiwVHaTp7adCWNE7I_6wxcA,1500 +torch/include/ATen/ops/_batch_norm_impl_index_backward_compositeimplicitautograd_dispatch.h,sha256=FPJHZ8kJbqhVen0pQFKbYaIvTSwHWIINyQr8PgtO06E,1216 +torch/include/ATen/ops/_batch_norm_impl_index_backward_native.h,sha256=YiAfPBQG17pl1sMMiYsvRAia9TQOLfO1no3zyvOCkTI,928 +torch/include/ATen/ops/_batch_norm_impl_index_backward_ops.h,sha256=zT3v5XVVvuDo7p1JKoNW3C985hWY1fv6RXauO6lft7I,2429 +torch/include/ATen/ops/_batch_norm_impl_index_compositeimplicitautograd_dispatch.h,sha256=vOy0z1Qpriog0LV_XTRZKVsFHGvz7EIsyHM3GlqIVB8,1085 +torch/include/ATen/ops/_batch_norm_impl_index_native.h,sha256=1EuLEOLSFizOpsmeRZKdInJoAyh9SHf5MvAvZojaxxo,797 +torch/include/ATen/ops/_batch_norm_impl_index_ops.h,sha256=FnzvcJP18X7EXYbfQt9Z5LEseSDvMBc0WTb0aX8zifs,2023 +torch/include/ATen/ops/_batch_norm_no_update.h,sha256=7fkW4LSTXbiGEmnfGRWXqb2onxUAWVmN_EzlfNiMO_g,2786 +torch/include/ATen/ops/_batch_norm_no_update_compositeexplicitautograd_dispatch.h,sha256=NzZBG7IgxAPrgWMtONLaL_PEyOO5n5ASDrY9Z_Zm14w,1880 +torch/include/ATen/ops/_batch_norm_no_update_native.h,sha256=Hxjc4RfgzapUMAlzdXaJ7JBgYN73mpFYDGclBzI-5DU,1172 +torch/include/ATen/ops/_batch_norm_no_update_ops.h,sha256=hKPlqvhXjquNP6jJcu9aogFlQJIk157q1T_o3ZIpMTo,3644 +torch/include/ATen/ops/_batch_norm_with_update.h,sha256=g25mk6o1ZopAPVzhrF0Z4k2vx7FY_X9DaLTRABIDWfc,3470 +torch/include/ATen/ops/_batch_norm_with_update_compositeexplicitautograd_dispatch.h,sha256=OZzUPrwq-fw1ik7byqeRecdtmvPeRxxWZrifW8Cz6wY,1042 +torch/include/ATen/ops/_batch_norm_with_update_cpu_dispatch.h,sha256=XJ9VUMfdqmvwtfqHbx5eoFDL14jP-_aYbYNC9Vjo_v0,1732 +torch/include/ATen/ops/_batch_norm_with_update_cuda_dispatch.h,sha256=2rAv4f9nLscayoiePZ2QZFR3ypi3dW76R4qWHgtda_k,1734 +torch/include/ATen/ops/_batch_norm_with_update_native.h,sha256=7Dg9wyg3UjIxlZ3q_pNWx1uJvy4dyFlta7HunE60c3g,2418 +torch/include/ATen/ops/_batch_norm_with_update_ops.h,sha256=Kz0SvSKxYT3XkxY1ooBRSm-cMXBw3W5iC1_sQQQDJSA,4924 +torch/include/ATen/ops/_cast_Byte.h,sha256=KOZxlw-5L8bgq1i8RTMKNY3APU4y5yuTSukr6LiwUn4,694 +torch/include/ATen/ops/_cast_Byte_compositeimplicitautograd_dispatch.h,sha256=A8Bo7FtgsCwo9Lw_qpxlX_dskXN6CkAF7Dm1HrIyBLc,793 +torch/include/ATen/ops/_cast_Byte_native.h,sha256=r8ca_K-HHFZPC7EaRxCbnIQfHwUY_EsDL-UBpFVapMA,505 +torch/include/ATen/ops/_cast_Byte_ops.h,sha256=a6JBqArF14AQBG0cldEufzAoWFS363hJNkSlA-_Ny4M,1048 +torch/include/ATen/ops/_cast_Char.h,sha256=zS64PxU4KlRZKjXz3-j1BT5z2_Y-RiuBi0QK3a_n3YI,694 +torch/include/ATen/ops/_cast_Char_compositeimplicitautograd_dispatch.h,sha256=l8_rLTLnKmPUDcFblxL9AqD1wFMYjNYOHTZG0r1FFxI,793 +torch/include/ATen/ops/_cast_Char_native.h,sha256=5eto8dQvywUal9BosyiU4OP3RuvVvIb2yE16snr9Lco,505 +torch/include/ATen/ops/_cast_Char_ops.h,sha256=U3lx_gTayPjDjn3u4wS-lFStkOB8N_Dasbkw4mGcELE,1048 +torch/include/ATen/ops/_cast_Double.h,sha256=koNkVFR7k1TqYXskrs-Y11gj0wlFG4mXz4Y4YuB5Qig,702 +torch/include/ATen/ops/_cast_Double_compositeimplicitautograd_dispatch.h,sha256=NgPrwVgyMIcenixxf_Tt5WPCyciEscJzaTv0qYHDeHw,795 +torch/include/ATen/ops/_cast_Double_native.h,sha256=Ejp0_7dBm3zocyRprGMal74vGnmDjmPd1KS7BWaO6OE,507 +torch/include/ATen/ops/_cast_Double_ops.h,sha256=eBLuq5JKaAl1DRIXU7_K4XBBydk_87j0kWo20jrLsEg,1054 +torch/include/ATen/ops/_cast_Float.h,sha256=_Zk6QJjJPcqdmaVjUlYwKCTt-9m5nlJSnhBvd3HF7YA,698 +torch/include/ATen/ops/_cast_Float_compositeimplicitautograd_dispatch.h,sha256=sGLB4Am5BKLcmkexHc-9CpK16SvW7KGbiapIiGBbS5Y,794 +torch/include/ATen/ops/_cast_Float_native.h,sha256=MFRIxMv7L_O12flNVUWDf47sUla-HuENLl58pxuxmJI,506 +torch/include/ATen/ops/_cast_Float_ops.h,sha256=7pfRgI8PiwYAgUDMaKQyi-1KbgC9uuIboa4Yets1Uyk,1051 +torch/include/ATen/ops/_cast_Half.h,sha256=wVzZxGVS-U651b7glSqELkghwt11_XfhX8IYYIwH2vc,694 +torch/include/ATen/ops/_cast_Half_compositeimplicitautograd_dispatch.h,sha256=tRyePzWVLwvGxZe3nLl4byZ4CWYCsXBzTwdUWZAvcXQ,793 +torch/include/ATen/ops/_cast_Half_native.h,sha256=PlREDEiQukCpm1GAY1xwIiZYpBvUB3Mt2h6P4qtTsyk,505 +torch/include/ATen/ops/_cast_Half_ops.h,sha256=e6ditpYVySgsLiasjel7fh7T9h20Rvk0QJPlIQM2Iy0,1048 +torch/include/ATen/ops/_cast_Int.h,sha256=-8WNEN-BbB7C4p93lNGZ6eUEOZBqz28M97tZV3GlY1E,690 +torch/include/ATen/ops/_cast_Int_compositeimplicitautograd_dispatch.h,sha256=-59XKkROrFchPfiecd31MhM8OEk7m_HBHyk1sA9CKhI,792 +torch/include/ATen/ops/_cast_Int_native.h,sha256=ePmfT8yqxV-Ohjj3H134r5d-xYSnNNWVPfpIo2L2K44,504 +torch/include/ATen/ops/_cast_Int_ops.h,sha256=MNAtzTZvi_maVvnOh6fFMkng1P5e-H7etNc_KwmgJQ0,1045 +torch/include/ATen/ops/_cast_Long.h,sha256=OKH8SCC2TuYxyrPMlOwSz2DONDe_2aRzaKx4erPV0n4,694 +torch/include/ATen/ops/_cast_Long_compositeimplicitautograd_dispatch.h,sha256=xUpMfaOaTlrop_EaFPhi3EITulMbMEMDSqK0gHx8SGg,793 +torch/include/ATen/ops/_cast_Long_native.h,sha256=sptC5BSJQITGjKh4Sm2qEL0WaNMazsXLR0b-P5YG_gs,505 +torch/include/ATen/ops/_cast_Long_ops.h,sha256=mOBIfXNXs7xd9ynSPtHTkaZtniZl-e0yjauswijUu4M,1048 +torch/include/ATen/ops/_cast_Short.h,sha256=V81yDnhRmIVi6CZGr5SNo-Q2LF-lDc3x52it8aVPN68,698 +torch/include/ATen/ops/_cast_Short_compositeimplicitautograd_dispatch.h,sha256=iMXfYViZJ1TyUQOfeI_XKhiGzo7yj0y960N7kbk7nVU,794 +torch/include/ATen/ops/_cast_Short_native.h,sha256=Jayv7mdZae3KqjSu_AHsTjyz-bUIXgpM3mmRy7Y0kLE,506 +torch/include/ATen/ops/_cast_Short_ops.h,sha256=UK99kO0FYtcD3Ak0KedXr6qvHtJIF-0DbvgD24B5HDM,1051 +torch/include/ATen/ops/_cdist_backward.h,sha256=X4pVcIkBOU7PSpKTsdnMa4BiyJPOp5ld_gD1KZs964Q,1529 +torch/include/ATen/ops/_cdist_backward_compositeexplicitautograd_dispatch.h,sha256=9A90H9iXtxiHJ1M8nIqWZXlljoWGMCOZCNZrBG18SMA,1049 +torch/include/ATen/ops/_cdist_backward_cpu_dispatch.h,sha256=Ni3BgfKM5kOEZT8Plxe-QW3vZd7ktBbGOkkycjUdV38,811 +torch/include/ATen/ops/_cdist_backward_cuda_dispatch.h,sha256=Ux_rbWPpQCYWfSANwgWuJAsbc7c0iG16lOOiUKHGKZA,813 +torch/include/ATen/ops/_cdist_backward_native.h,sha256=oLSVYY2bTFmqs7kDji8YbzY5bqtXOGHtu50KnWMsFdw,736 +torch/include/ATen/ops/_cdist_backward_ops.h,sha256=MmbFpFgX4yGy6PSv8QmJfx9Jc3axE7_Yte8C1xMLMOk,2198 +torch/include/ATen/ops/_cdist_forward.h,sha256=w0WjV3APipplmPEzNNV0GI3hEOPR-jM3xwz0128JS_k,1462 +torch/include/ATen/ops/_cdist_forward_compositeexplicitautograd_dispatch.h,sha256=Tkr_LCK5qyGLMDMcPIR_9tWAR9zMZLmmYTZH8KHYYDE,1023 +torch/include/ATen/ops/_cdist_forward_cpu_dispatch.h,sha256=U5-ALx7OD_vYMv1WcugQRb-FRYiNu9SPfkI1-Tstp7M,798 +torch/include/ATen/ops/_cdist_forward_cuda_dispatch.h,sha256=tcy9k45kuxsXuxQHiJzUEtXQEUpubDCz4a5Ej6Ee5Tk,800 +torch/include/ATen/ops/_cdist_forward_native.h,sha256=teHw7a6ykDs7gkBaMkb97we2hrowYAShJ_lESj-7T-o,710 +torch/include/ATen/ops/_cdist_forward_ops.h,sha256=Dx4CssxdJ_oVQyxPGFZJjUGFZNuYAKTi9s4hftq80ME,2100 +torch/include/ATen/ops/_cholesky_solve_helper.h,sha256=Pk4NpnJGv8J-xCYU2ouA39uzmqLmoZvxf9nKmgIUBKQ,1362 +torch/include/ATen/ops/_cholesky_solve_helper_compositeexplicitautograd_dispatch.h,sha256=R9dhWqSsEkt1nkOVqMzBvQnKT45ljx72pFJzHv1FnSM,967 +torch/include/ATen/ops/_cholesky_solve_helper_cpu_dispatch.h,sha256=43oV4ihJqQtfaq8sY6TAGC6FsRU4XeSqw365JopW-Zw,770 +torch/include/ATen/ops/_cholesky_solve_helper_cuda_dispatch.h,sha256=JXYLAPjiIVfr4JECqAlolanWHcig2qcg5ntyYwKptK8,772 +torch/include/ATen/ops/_cholesky_solve_helper_native.h,sha256=hI2ONRhPBdmJrixIUaT1yudn59qir5iqwgZpxG7AlN0,767 +torch/include/ATen/ops/_cholesky_solve_helper_ops.h,sha256=ezBXam7CZu44XaAOvgFJLbZEdEoQa8Hhkp_6hiO1tv4,1918 +torch/include/ATen/ops/_choose_qparams_per_tensor.h,sha256=_XFyQpbT7Lt6dJ9koVkVqUW9RYtqSGQbLOfbQS6qV9Q,782 +torch/include/ATen/ops/_choose_qparams_per_tensor_compositeimplicitautograd_dispatch.h,sha256=esZEMdKWR5RkSQE4sCaDUMhBYvMLOE-DoAKjXFPFahQ,827 +torch/include/ATen/ops/_choose_qparams_per_tensor_native.h,sha256=3fTINZzR5fTvclGatDMvYhH_xXh4ZPULboqXke3R20g,539 +torch/include/ATen/ops/_choose_qparams_per_tensor_ops.h,sha256=ZzywxGvh5kzvfsqi0e-Fi6d2apa7UbrXBjApqZSCbxk,1156 +torch/include/ATen/ops/_chunk_cat.h,sha256=oV8FYjU0-3LJonx3kVwl22hwixf9PMBXSJfHF-JOYa4,1290 +torch/include/ATen/ops/_chunk_cat_compositeexplicitautograd_dispatch.h,sha256=Sv84YLH9XbleBK10RBeYW9r5g7flE-WhM1RPuO7MQC0,1029 +torch/include/ATen/ops/_chunk_cat_cuda_dispatch.h,sha256=YKs-QGVkHxMQJUGFP2V_IGoYGfOJne-EwtqZk2Tgri8,987 +torch/include/ATen/ops/_chunk_cat_native.h,sha256=3mHsG83eKwsUG1QxxU7HKpkp5e3fFF3jWKy0NpQV2z4,840 +torch/include/ATen/ops/_chunk_cat_ops.h,sha256=0dWpxGAH62PQgFju6_WwXYpXC3V1V3sbTFTQ_YCzjMI,1830 +torch/include/ATen/ops/_coalesce.h,sha256=-Anng3ew0v8EF5_uBCjrGywY2Fw3xNiOKOyxjVggAYk,1034 +torch/include/ATen/ops/_coalesce_compositeexplicitautograd_dispatch.h,sha256=MqZGPzLt05vsPP3G2qlM-v14odHE9p_hFBho2phUCZ4,873 +torch/include/ATen/ops/_coalesce_native.h,sha256=vmv8A_ZMuGT4onyB5dlzRWOA3x1u6fT8ZgNbmdRIcdA,640 +torch/include/ATen/ops/_coalesce_ops.h,sha256=cfdIl40u0LQtV8MON3XJ63QD3KepG4UAPiMbywxY99U,1608 +torch/include/ATen/ops/_coalesced.h,sha256=XSo49BBmstRPV9dT9GDQr_6zVEad9yeGBh2q8DUYzA4,1173 +torch/include/ATen/ops/_coalesced_compositeexplicitautograd_dispatch.h,sha256=y-77fkAkkrFMG6exwRBa3_jveOoWvSuLqF-EVlNUV3g,981 +torch/include/ATen/ops/_coalesced_meta_dispatch.h,sha256=xw8dgdJtozkYpwN_n6Jj-izJmM643tDjj3PV519DGEg,739 +torch/include/ATen/ops/_coalesced_native.h,sha256=Se-uixsb3Nu7gVfOFMbByx6qcUe7UTV7zWONYh2BhfI,672 +torch/include/ATen/ops/_coalesced_ops.h,sha256=wd0q4UG7XGNEeOP9OW-TqAvAZdGVzxJe9mQVoJVHKak,2320 +torch/include/ATen/ops/_compute_linear_combination.h,sha256=tvaRlda08qIES7RpL1GykmiNkoJmwORmg2YpXgQD4f4,1427 +torch/include/ATen/ops/_compute_linear_combination_cpu_dispatch.h,sha256=zMOsx4HlaNySt8Q6S2IztS8H12Jv7GslE7Tj4bYhyfM,1042 +torch/include/ATen/ops/_compute_linear_combination_cuda_dispatch.h,sha256=kL8uPnJ8onlkXzyYf5MpQXZcpkhlfvIyxgtf535Q-8o,1044 +torch/include/ATen/ops/_compute_linear_combination_native.h,sha256=88hWznlbZYYBIEmb8kS3G_-nnBdNRUxa_-42JrjGgbs,664 +torch/include/ATen/ops/_compute_linear_combination_ops.h,sha256=YhGUYRz6dmi1CmXDngg3I7ut3gXfhtpUVKssDXceph8,1936 +torch/include/ATen/ops/_conj.h,sha256=OC0olOk9OYmPVWyvx2R2qAeGhNu5zQNPW3n-fw-RUgk,616 +torch/include/ATen/ops/_conj_compositeexplicitautograd_dispatch.h,sha256=jZikQR9eAS7_84oP6v4-NemdI_qXnbJ9UUk-j6frXvU,763 +torch/include/ATen/ops/_conj_copy.h,sha256=vZ6vCFTAzRbDVNJWfOHzKalX8TqS1XfGohYPmR24RYY,1044 +torch/include/ATen/ops/_conj_copy_compositeexplicitautograd_dispatch.h,sha256=ITqhMVVxq5_OuH8sPJRg_OSydjhNu8a25UfNSIGcPdQ,875 +torch/include/ATen/ops/_conj_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5uOogQG4enxDme07Ey3zH9HFZIHf-Y-1FUGgMDwftZE,794 +torch/include/ATen/ops/_conj_copy_native.h,sha256=GdQBuGU-uaUInCThdJNsJ8BL0NXWxGW4Ww5sfo93HuM,562 +torch/include/ATen/ops/_conj_copy_ops.h,sha256=NLM0MdkSgQmDnH2Uud11ju9G8GSs30TZBv32eUDIEbA,1614 +torch/include/ATen/ops/_conj_native.h,sha256=cpVCzD_i5POJObXvbJxWtoiWpurYIWzm_QPYhgbmlvA,475 +torch/include/ATen/ops/_conj_ops.h,sha256=K6O11K8u8lBbN2hmuW2kTHEhwe2vU5KayHBiBAZIaIc,970 +torch/include/ATen/ops/_conj_physical.h,sha256=QKGVGBEYkNJ1sufpxfE1GgmuRi4w9AlD1ixG0fCoJcQ,1084 +torch/include/ATen/ops/_conj_physical_compositeexplicitautograd_dispatch.h,sha256=36yGLH4a7Kbv2Z2kIKESRc6qNFut53VxqPMBlJ7o75g,945 +torch/include/ATen/ops/_conj_physical_native.h,sha256=Cf0G7n1zZYKw3TlFJybEWoQalM_4NsVKEjHjLCPc9p4,642 +torch/include/ATen/ops/_conj_physical_ops.h,sha256=aMesGzfsS8w0ohDy2bbVBgqPxlOm-Cy2F5FTuB6g93Y,1638 +torch/include/ATen/ops/_conv_depthwise2d.h,sha256=PPSxR0d64NNItgYXKCIP8IcYJtye1iaLYz4edwQXYkY,7427 +torch/include/ATen/ops/_conv_depthwise2d_cuda_dispatch.h,sha256=qkee6zj-zfgRj_szulV-EJ72CbdTMsu-zjQ9o_VBZfo,2311 +torch/include/ATen/ops/_conv_depthwise2d_native.h,sha256=mcssm_hBhj65cB1H8-3aJbn1wmOkhc4nTcdoduTvYc4,942 +torch/include/ATen/ops/_conv_depthwise2d_ops.h,sha256=r_gbfZK6ZSD1Eu0jbJGtc5ZNgwVE1nM9U19dePuMilk,2948 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr.h,sha256=grYfki-lqPgiHhSic9As7vLZNE_TTwiU5-tHdR_7dDo,1516 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_compositeexplicitautogradnonfunctional_dispatch.h,sha256=md898hSb-4WDOxW9KufkwciemixPF_10Ve2vBN_lNDg,852 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_cpu_dispatch.h,sha256=hKgFj4X_n6sg3csuJ7jny8QZXY8jGPRi9hpKVLgj5iM,1057 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_cuda_dispatch.h,sha256=Iao57wZRMkkoGouJIRAUiUytDQPBAml9Y5VdSuIDKI8,1059 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_meta.h,sha256=qtMPpiSagKRCRXgCFoJTdQU-fFsr1n5piJNqAFgQ4Ng,631 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_meta_dispatch.h,sha256=_1Q8s0nRcH5NL4dJG_8S8P1FP75gC-4dROpiWWjG-ng,1059 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_native.h,sha256=0u8m9p73OXMEzPPDHzQOzC3Pb1nbBDCGqIsOe6j3rFg,950 +torch/include/ATen/ops/_convert_indices_from_coo_to_csr_ops.h,sha256=0P-3Oly002KEQJigRq44J4fA5kgrfKEFuolrMxFzBxM,1963 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo.h,sha256=1Aqm0FAhvIrBml-f5saJJVeHRUJ3-5YIPaMDGUCXAeg,1852 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_compositeexplicitautogradnonfunctional_dispatch.h,sha256=TNtGIa4b90GgtIVS5jNxC8qoBHEjjMF5SdzIMrRLyCc,900 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_cpu_dispatch.h,sha256=yq_-BrDXjpDFMuVIuGZqybAfopDI1gzebezD4i-nKts,1195 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_cuda_dispatch.h,sha256=d_AHh3CDUBX4Z6xI-rvGIH31nV1MHRLY8qXCIAEIP1I,1197 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_meta.h,sha256=QvJZQStxXKPyQwJ0RlaA-ZVSDkBHruBHoNnvF4IRE1E,673 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_meta_dispatch.h,sha256=DakGc8cs4QhgEXH-DlHrUcnYoLtDKc2e5cQxO0oEKOA,1197 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_native.h,sha256=rfNdSf0ZotwN9h-9Z-VZ5ECadUfmdoSdB9aL4NsNSGY,1034 +torch/include/ATen/ops/_convert_indices_from_csr_to_coo_ops.h,sha256=K8c2_sezzWkMEy8VnLzqERAa0wajc9YyNyxDnB7Rc1w,2245 +torch/include/ATen/ops/_convert_weight_to_int4pack.h,sha256=FYQXghusLN5t_33UcPiN5UzeQ-xYtGgHoG8ukOZ6pSE,749 +torch/include/ATen/ops/_convert_weight_to_int4pack_cpu_dispatch.h,sha256=-NuCazKc7jk0GSdxyys87-YVXwRImNx8wNUjhnGuapU,762 +torch/include/ATen/ops/_convert_weight_to_int4pack_cuda_dispatch.h,sha256=GxONpxSGHOf9Rsl9ctw8s-HfbPjCue6den_bnKPRU2s,764 +torch/include/ATen/ops/_convert_weight_to_int4pack_native.h,sha256=W2rGDSq8lfG2MDBWzFZiDh7jFxiWAaNIkiCJdNaKXKw,623 +torch/include/ATen/ops/_convert_weight_to_int4pack_ops.h,sha256=zSZJQcAprZBun480B0k5PttmqrvGSRedWGyAsQ0b8mQ,1098 +torch/include/ATen/ops/_convolution.h,sha256=c1WY0gelAb8-7oOuW-p2X_muj9IkIbMCHxNZJTjfdc0,12995 +torch/include/ATen/ops/_convolution_compositeexplicitautograd_dispatch.h,sha256=sFFB29kUy2g3e5CG05IqP-7HDbA2yES0WNVHWlP4HJM,2947 +torch/include/ATen/ops/_convolution_compositeimplicitautograd_dispatch.h,sha256=qAe5gFqygNqNwOWu4uYEsdoJzFS02z40sID7lXLbq7k,1385 +torch/include/ATen/ops/_convolution_double_backward.h,sha256=LEzHBawo6BB9lHgYIa442mJpZDjqZDM3HADQRQgY5Y0,4022 +torch/include/ATen/ops/_convolution_double_backward_compositeimplicitautograd_dispatch.h,sha256=uoj9rrfRuX6U7zimcm6Y7GdJFTvllUCQKKbxRpvfagI,1655 +torch/include/ATen/ops/_convolution_double_backward_native.h,sha256=zbybeENk6O1akdf7aKB_K5Ccv49RCt1zRJZKwndFlIY,881 +torch/include/ATen/ops/_convolution_double_backward_ops.h,sha256=1rkxRgal8VXYG3z7N9L1Kx3M7rfe-M98DG-6qPCT7tU,2366 +torch/include/ATen/ops/_convolution_mode.h,sha256=RPmeqXrhw1v3YDn2u1r91LO5cD0FbR29pDhU0HQZfl8,2443 +torch/include/ATen/ops/_convolution_mode_compositeimplicitautograd_dispatch.h,sha256=TtIZbqRFSpQWFkkvuBbos5-aTbQWO9WyVzrGVBqQuzI,1183 +torch/include/ATen/ops/_convolution_mode_native.h,sha256=NZueh-4UWUZ6Uwl0X_lAaUNGxevGNVKP4W7nqk8sTog,668 +torch/include/ATen/ops/_convolution_mode_ops.h,sha256=pr1XIMXV7eYz8SaBPdHO4c-_OysfalhJQe-nAluNlTo,1572 +torch/include/ATen/ops/_convolution_native.h,sha256=81IwLG8VWGfZCL4F6fQxUgHPyDMI6jDtVPUUQbgRMAk,1485 +torch/include/ATen/ops/_convolution_ops.h,sha256=z9Jp8yLmIhB2J3MOr2VXcALbxvH8ztoMrWlqyQ677vU,5149 +torch/include/ATen/ops/_copy_from.h,sha256=tfWDjbQmEnB9OgVIMr9mP5J0GkuSVvd837ibkuDHBls,1353 +torch/include/ATen/ops/_copy_from_and_resize.h,sha256=cDd1iKBbEkZzkGnRROLcQ8RLNFPH2Y8O87EIU3mdZWI,1277 +torch/include/ATen/ops/_copy_from_and_resize_compositeexplicitautograd_dispatch.h,sha256=wdWwmc8ggmZTGrHuXMLqI3KwiChhLh1PXzr7sRHDTFo,945 +torch/include/ATen/ops/_copy_from_and_resize_native.h,sha256=hJfM5OK94L9JXa7o_cIrgchOxJTZ3MzyXnKRzPBO8bU,539 +torch/include/ATen/ops/_copy_from_and_resize_ops.h,sha256=QCaPmVcrLGQpSoVT_khxqepmD02r1TgjOYMwxFgPNNA,1840 +torch/include/ATen/ops/_copy_from_compositeexplicitautograd_dispatch.h,sha256=ThwIHqVmopNlQavWr8FdTUNYxFjxbBYElKenPsAuvBY,967 +torch/include/ATen/ops/_copy_from_native.h,sha256=0KtcAKnbq5kAOS7jR5zlRk7KYmiX7QOdG5ltt16fXSg,547 +torch/include/ATen/ops/_copy_from_ops.h,sha256=Nz0nuid5S3bDX5xdJFm00Mbjr9XVoO00i4gAchqms3E,1912 +torch/include/ATen/ops/_cslt_compress.h,sha256=UzTeAAloWHMFpknMZFsE7HTzCPwRYVTrW8cprLrS3Kw,649 +torch/include/ATen/ops/_cslt_compress_cuda_dispatch.h,sha256=do4dKVCbxCekhM_ytJgcp7iQgwV6ARtxvFeZhD7pthg,731 +torch/include/ATen/ops/_cslt_compress_native.h,sha256=ifnJhI0auJPn1VD0I1wZcW5qK6m_Ymk04Rz25gWTSzs,485 +torch/include/ATen/ops/_cslt_compress_ops.h,sha256=wiUSSkqr8fhjCAnxahyZOfUyLFjaoZpTFR-eslQf650,994 +torch/include/ATen/ops/_cslt_sparse_mm.h,sha256=PN3G5BORY0kwbVqj9ROj1cj5b4Iucr9-PStLlFLv-mU,1083 +torch/include/ATen/ops/_cslt_sparse_mm_cuda_dispatch.h,sha256=EhtG-jSKZEup_yuLl2mypIy2mLXpOfzE7r0TvPGwo4Q,963 +torch/include/ATen/ops/_cslt_sparse_mm_native.h,sha256=fio1Fon-_kYOH0xMIbMrMwZf8vFRyOXhjOUI0A9UqIY,717 +torch/include/ATen/ops/_cslt_sparse_mm_ops.h,sha256=bPUqffn0X88nQ0vmIq5Fv3JcwlIYEhosPwq06FtR-z4,1676 +torch/include/ATen/ops/_cslt_sparse_mm_search.h,sha256=5KBQvQPYQTWycwTQn7EoiBkAirfxE6AuUV8eBQSrS0g,1065 +torch/include/ATen/ops/_cslt_sparse_mm_search_cuda_dispatch.h,sha256=pLFg6ZYG7057pnPuZnnHrlnbNBPJglGve4XzEnj0piA,949 +torch/include/ATen/ops/_cslt_sparse_mm_search_native.h,sha256=o4yCxLe53k1ZrVArxspW7SOcvpTyoLZd3tWQTpolS4A,703 +torch/include/ATen/ops/_cslt_sparse_mm_search_ops.h,sha256=gNhp1emg-zT8VmxgbXzbYg9w7cWeBZssDAenwVZBMNA,1630 +torch/include/ATen/ops/_ctc_loss.h,sha256=yRjM0zP9_67caQF7jumqSFEF_4TUiBsWlwjlC2JpXpY,3904 +torch/include/ATen/ops/_ctc_loss_backward.h,sha256=wLfYK7LwfaA28U9D5Lil_UtfIwbFQmt1jxAn6ZWuVo8,3287 +torch/include/ATen/ops/_ctc_loss_backward_compositeexplicitautograd_dispatch.h,sha256=kl_9-LmsGN6WMTNelqwUDYDfGEYvrMuBWLJIw2ffnrs,1347 +torch/include/ATen/ops/_ctc_loss_backward_cpu_dispatch.h,sha256=dPm-AFGOpsPQqGUewFPxT6mjN9uxvL3jpBD3U7yXC8k,1266 +torch/include/ATen/ops/_ctc_loss_backward_cuda_dispatch.h,sha256=nNB1uGt5DbIaevZyoRznT_HvUMGtYP56EB0Sc0t_Eww,1268 +torch/include/ATen/ops/_ctc_loss_backward_native.h,sha256=hTmFArD_s-BfsD86v6GKNvuEjnfMSTspcYvBnH-qbeE,1646 +torch/include/ATen/ops/_ctc_loss_backward_ops.h,sha256=O2TC6X4kSAgXSrxPk1nCfJ4nlEF8PPGvMqbUXqrmi8c,4485 +torch/include/ATen/ops/_ctc_loss_compositeexplicitautograd_dispatch.h,sha256=Fq1vpNflvoQktKiMH1k2iDk_YEj7Cs0idp_99aHIMKk,1776 +torch/include/ATen/ops/_ctc_loss_cpu_dispatch.h,sha256=mIzleZfbzRhJgZGmpIEzucFVxTTrS7vBl_rUWT2iXbA,1114 +torch/include/ATen/ops/_ctc_loss_cuda_dispatch.h,sha256=ck6S8jzMRKK6u_EiPPf9KqGs6SwfWuxbuiqYBdwTSRA,1116 +torch/include/ATen/ops/_ctc_loss_meta_dispatch.h,sha256=xefcrBu9JD2xrpA4NoU3iKsghd5R_LCAuOadz2QVRUI,889 +torch/include/ATen/ops/_ctc_loss_native.h,sha256=cqmAWxTDWSH3pGNK6jVTphaynmNlKZpxRZqmln_s0fQ,1859 +torch/include/ATen/ops/_ctc_loss_ops.h,sha256=99_RMcyA6SXTLgsBGdJQg-JGTgdDna9ce9uzYVnXhHs,5103 +torch/include/ATen/ops/_cudnn_ctc_loss.h,sha256=pPWRSpqn309LhejP4BS5MNHo2x4QGNjWZ6Qk7_PSkLs,2912 +torch/include/ATen/ops/_cudnn_ctc_loss_compositeexplicitautograd_dispatch.h,sha256=JF0waOMeVxAyo53VBOYnZiuW8ocLlTx_0yb0VBKInbA,1281 +torch/include/ATen/ops/_cudnn_ctc_loss_cuda_dispatch.h,sha256=Gf5sklXww9HrY3XRUTw0Iv6TRQLGFC_FhFdLnNPpl84,1152 +torch/include/ATen/ops/_cudnn_ctc_loss_native.h,sha256=GuCfPcyeqL9Zp-loLyKn0XTVf1rrPPqtKx0m6RQkmjY,1198 +torch/include/ATen/ops/_cudnn_ctc_loss_ops.h,sha256=Ewwxy2_rNqLIqQiU5joW_fZzNMO9blEdmZSrp6lfggY,4057 +torch/include/ATen/ops/_cudnn_init_dropout_state.h,sha256=AM7qvcVXno1j6YAj9J6AywNt-O6sHbRbinJ14bGgefI,2226 +torch/include/ATen/ops/_cudnn_init_dropout_state_compositeexplicitautograd_dispatch.h,sha256=CWO2TOuEgM6H101FHJp_zK2AYqALVb8rZZsgfQR8TKs,955 +torch/include/ATen/ops/_cudnn_init_dropout_state_cuda_dispatch.h,sha256=_w5k_luDbCWVlAJG_9Q0UDmYVNvTQWWXBkG8dO0xMmw,1036 +torch/include/ATen/ops/_cudnn_init_dropout_state_native.h,sha256=5bVMKYJ3U-oGQtusvtwLGg2KRJJnoyBSfERXLe0Xa6I,799 +torch/include/ATen/ops/_cudnn_init_dropout_state_ops.h,sha256=ZsPmZuNcO-9CLnmFD4bWPnGJVkHLWvwuom5O6CnKWeI,2371 +torch/include/ATen/ops/_cudnn_rnn.h,sha256=EL5aKrOCsYxAyxf5jOE2b3aZ95r5mjyCN3g9ncy6nY0,13350 +torch/include/ATen/ops/_cudnn_rnn_backward.h,sha256=K466IQyH4JgJCXNVeKCW5FixO17YRhuMQmX0DU5ObD8,16465 +torch/include/ATen/ops/_cudnn_rnn_backward_compositeexplicitautograd_dispatch.h,sha256=IgcQrPSoDVCRG9EpPIraKh_lC2aNDFGHshu7nktZPI0,3682 +torch/include/ATen/ops/_cudnn_rnn_backward_cuda_dispatch.h,sha256=PW8ltTcrVtdtjWkT1vDiJgSAzrxwL4OQSye4gqEls90,2125 +torch/include/ATen/ops/_cudnn_rnn_backward_native.h,sha256=XLvwxCR4VG8G0dxHrqwgkUAKCp2zIGplrqwIouijNao,1893 +torch/include/ATen/ops/_cudnn_rnn_backward_ops.h,sha256=cHk8ECIpBMdrMJekLJ1OlZm69KH4ONgxQnh870yrSM4,6004 +torch/include/ATen/ops/_cudnn_rnn_compositeexplicitautograd_dispatch.h,sha256=0mwuzcdLNTgiV_lUP7OcBqR_EEoL86gbysLiaM8x6vw,3166 +torch/include/ATen/ops/_cudnn_rnn_cuda_dispatch.h,sha256=dQ-OzbKvKz4b0aed5TnsRj_5IpahCw0wGXPx2L-9W_w,1677 +torch/include/ATen/ops/_cudnn_rnn_flatten_weight.h,sha256=Ckn2F8Y4jk9XT0wa4e4ZeEZzPml8LagPK3tZpobp9Vo,7591 +torch/include/ATen/ops/_cudnn_rnn_flatten_weight_compositeexplicitautograd_dispatch.h,sha256=BJVzLpteJFddpkcwqbtzzRXZ84oFT0UJcFtR1PVeLes,1770 +torch/include/ATen/ops/_cudnn_rnn_flatten_weight_cuda_dispatch.h,sha256=u_pJP2-G8x-7euEaNl-8FsKjjViffuFU4hfxz6INzDE,1149 +torch/include/ATen/ops/_cudnn_rnn_flatten_weight_native.h,sha256=W0YH5fevS4PUvPePNHadPCtzVW_1zYTnfAy7uU0RFUo,927 +torch/include/ATen/ops/_cudnn_rnn_flatten_weight_ops.h,sha256=piLnvsvAkss19dXLptgCBP-zpC79Adyi1E8bcNqRYGw,2830 +torch/include/ATen/ops/_cudnn_rnn_native.h,sha256=9fdbo_0aQZiUSVPRs4kYUKWseZ9u6oa6gIBrn8GoyvM,1540 +torch/include/ATen/ops/_cudnn_rnn_ops.h,sha256=nISJWBO2G1zHbZzBmLtDAg5m7cv6Ihxmgr-OJ6oQe7Q,4917 +torch/include/ATen/ops/_cufft_clear_plan_cache.h,sha256=mevPCON1-3qNebHSQovypdBo0dMlIYlI0X8ffiYUwxU,698 +torch/include/ATen/ops/_cufft_clear_plan_cache_compositeimplicitautograd_dispatch.h,sha256=_zAa6vTMd_tkB4mM8NvpiJxDMeaPhLWa_uUW8YjFjdI,780 +torch/include/ATen/ops/_cufft_clear_plan_cache_native.h,sha256=W7jOmBaN3E8ekKb9MGLVPU92LPEvCKuMlO7mFK9BBrQ,492 +torch/include/ATen/ops/_cufft_clear_plan_cache_ops.h,sha256=YKfi8SSCV3rmq6IcDJ0Jj2fr4bgQzoyeS0qS21yncrw,1016 +torch/include/ATen/ops/_cufft_get_plan_cache_max_size.h,sha256=MYVh-vOssp81Y1iWaPgEv3pM5o3B8FbyNU1pZ40v47Y,730 +torch/include/ATen/ops/_cufft_get_plan_cache_max_size_compositeimplicitautograd_dispatch.h,sha256=lRuQnwaWw3MyqCJVobNl1rkQAGvo56gpvfXlrC2-eOc,790 +torch/include/ATen/ops/_cufft_get_plan_cache_max_size_native.h,sha256=aw0Y347xlTpzwjJjZcbkclIK_bDtAnNGCEaa4ck0BC4,502 +torch/include/ATen/ops/_cufft_get_plan_cache_max_size_ops.h,sha256=GUp6B3O3eEa6tYxKlqsL3ZEUzXkzxBk50GQiDvHVgBQ,1047 +torch/include/ATen/ops/_cufft_get_plan_cache_size.h,sha256=Cwmb9AzCesvR6E3GwZp25t8y44GWhx-ZOJSp5Jvnhuk,714 +torch/include/ATen/ops/_cufft_get_plan_cache_size_compositeimplicitautograd_dispatch.h,sha256=2BTxfDzNqtSojYHHw8B7ccMCU592V4JhR_vjt4kdReQ,786 +torch/include/ATen/ops/_cufft_get_plan_cache_size_native.h,sha256=BlhmfT8Nu8TDewcIY-eEl6sKhjjApWkXDA6gWimbJL0,498 +torch/include/ATen/ops/_cufft_get_plan_cache_size_ops.h,sha256=CJewpscUeht6p57VzAzf4CbJYjz6ANr6wPhjhrGcPtg,1035 +torch/include/ATen/ops/_cufft_set_plan_cache_max_size.h,sha256=_uKpY4zzGigBpllheAPCtCyC0I_9irjBoqmup3cONe8,768 +torch/include/ATen/ops/_cufft_set_plan_cache_max_size_compositeimplicitautograd_dispatch.h,sha256=DfuJklbzjdJcVOG2ZhX0tCmFNjAy1ysloO0GlJQGO3E,805 +torch/include/ATen/ops/_cufft_set_plan_cache_max_size_native.h,sha256=IwhvXxqeYR-s18u_jgLviCZlmGdnSukvAtL3-EXZWqY,517 +torch/include/ATen/ops/_cufft_set_plan_cache_max_size_ops.h,sha256=7PNnKjWPX2msB8tTvtWcYmKxjED-4724nul0sz84s_w,1096 +torch/include/ATen/ops/_cummax_helper.h,sha256=kzcpvsm9StFMGHjmB-wRS2Uumk0jw-ciMtnHEMJucbY,762 +torch/include/ATen/ops/_cummax_helper_cpu_dispatch.h,sha256=prmNk9iXFh9bhJf5CY68Zj8DCMOamkPXTtYMwG-72-s,778 +torch/include/ATen/ops/_cummax_helper_cuda_dispatch.h,sha256=HnvH5-1VDTYq2XZxb043--mDlJhnHHq6Cc2mEdNv3x4,780 +torch/include/ATen/ops/_cummax_helper_native.h,sha256=iE9VBIYt5HUfa7l6ibKkzI2ZW1uaxEXmd58ECRoHyNg,653 +torch/include/ATen/ops/_cummax_helper_ops.h,sha256=6D2L3WyYXhuSo4zCD2yIY6crxFC3nb71azQCZtg1jKQ,1166 +torch/include/ATen/ops/_cummin_helper.h,sha256=w6JKDCTZirdb2mVA0aJV3gcTnnLlV9AssPWkv4WIK-w,762 +torch/include/ATen/ops/_cummin_helper_cpu_dispatch.h,sha256=tcfCKkNvNnUVesdZAJKnoXZJzgPG02ycOCT41lgjV_w,778 +torch/include/ATen/ops/_cummin_helper_cuda_dispatch.h,sha256=aBX8KYjRtYg-DAyII4U9UAtlNWve1xRmJADhYs5VV6U,780 +torch/include/ATen/ops/_cummin_helper_native.h,sha256=wqvnTbOfO_t0TIWyYSMx4G5tbF5D7T0fiOkDFpXgisA,653 +torch/include/ATen/ops/_cummin_helper_ops.h,sha256=nsa3GtmHcJIthSOJ7Njf0b4c0YLW45ebGTkDa1mxI0Q,1166 +torch/include/ATen/ops/_debug_has_internal_overlap.h,sha256=B-kAdz6Q9KAzNkCANwYnTQf-448GQbVGimvXoKYt6u4,692 +torch/include/ATen/ops/_debug_has_internal_overlap_compositeimplicitautograd_dispatch.h,sha256=sV2dyBa-zqA9LrUeTsAbnaeNUNYiT4-EiVLehG_l-wI,782 +torch/include/ATen/ops/_debug_has_internal_overlap_native.h,sha256=-GZfKurfbjoDx8Ew2X2IvC68VqZNeynxwHfnr3jPMP4,494 +torch/include/ATen/ops/_debug_has_internal_overlap_ops.h,sha256=HreuQ0EXOMjbhA0nlJkIr0HKeqFBGIpL2zzYdX_m5dE,1018 +torch/include/ATen/ops/_dimI.h,sha256=7K5fLpq_nsW3Cdq_vys8UIK3rAV2OEA3W-ii4_f9hyw,479 +torch/include/ATen/ops/_dimI_native.h,sha256=zgF8ABsT9qRbiePds6fpHuA_WucV8y3L6T7Uy3PdMWY,484 +torch/include/ATen/ops/_dimI_ops.h,sha256=2jlhrQAoabzLHQVhpmhLfVYisEe3tElfWo0CLsj39fg,952 +torch/include/ATen/ops/_dimV.h,sha256=ekf8xPdY3UQW8ImGc7juFTs4QJKJUBz3HBWas0Zw19Y,479 +torch/include/ATen/ops/_dimV_native.h,sha256=XNF8GomSmRE43VR_uiXbK6Ogn4z5emZt-nWyx_LdwIo,483 +torch/include/ATen/ops/_dimV_ops.h,sha256=jA9e_0Je5RHaHERwaGCYQbhPkrDd78e2N4crjIV0cKk,952 +torch/include/ATen/ops/_dim_arange.h,sha256=R4W-z_afh1szuvr5AOJKUVuXILvg4uThLZ2tghbTgoI,661 +torch/include/ATen/ops/_dim_arange_compositeimplicitautograd_dispatch.h,sha256=1Ypjbef56celQqCM8-Zaz3dnCH7w8xwEWxd7d3E-5Xs,782 +torch/include/ATen/ops/_dim_arange_native.h,sha256=kqKNTolvuBJhvlud4jyMKxWZBjzW-XkcoO4ibOWsRgI,494 +torch/include/ATen/ops/_dim_arange_ops.h,sha256=kK6Jqw-bD8MTP_g7jS-HEDR5zl_kX2FalXx2rTgKq2g,1026 +torch/include/ATen/ops/_dirichlet_grad.h,sha256=mY97YKJcQf5lpptnKLklmL8-k67uMzZObVhdno9eYrs,1349 +torch/include/ATen/ops/_dirichlet_grad_compositeexplicitautograd_dispatch.h,sha256=CJ2r5c7rwdF-oFbGBds_bkpj6ckaPTexp-q9iJcUeqg,983 +torch/include/ATen/ops/_dirichlet_grad_cpu_dispatch.h,sha256=2f4DEVNY5upltnWRxuG0UJ4qXW28myAnIerYbWhc6UY,778 +torch/include/ATen/ops/_dirichlet_grad_cuda_dispatch.h,sha256=BrBAlWqCoBf1imK-MlVkXdXVRWWDOBV9JzC8VDuKpko,780 +torch/include/ATen/ops/_dirichlet_grad_native.h,sha256=M3mDvOle-3R6-o16ZA8MiWQFBdnAX1f4yK0czZNe46c,791 +torch/include/ATen/ops/_dirichlet_grad_ops.h,sha256=YJrC3s14SxTZ1ASD08xyvLZnK6kUuwfVvV1Ea6hrw7g,1970 +torch/include/ATen/ops/_efficient_attention_backward.h,sha256=VfQsy-f4iXk1khuRD6OEnS-cJx3-GDi6pWBqREAeFy8,5971 +torch/include/ATen/ops/_efficient_attention_backward_cuda_dispatch.h,sha256=dzBXCi6xksLpYJ9e9kncn6dzroxMP3_sRf0xQJrw08U,2221 +torch/include/ATen/ops/_efficient_attention_backward_native.h,sha256=bjZrrVSeQK_LJhkr75h5TGEOWPEZstAe75VMoXXxIdo,1191 +torch/include/ATen/ops/_efficient_attention_backward_ops.h,sha256=WTycEu4JuhEYwD6YTTrfL636ECbgK0JhmIKrGLkedlk,3176 +torch/include/ATen/ops/_efficient_attention_forward.h,sha256=HeecbbUSt0jmtNyEqL27_Jq0lg8UzHMSuSgnOu7gSYE,5392 +torch/include/ATen/ops/_efficient_attention_forward_cuda_dispatch.h,sha256=TTj_QreJPc9OZgL-AwGfxL1Fh95An9dnmo9S6RzXTSE,1965 +torch/include/ATen/ops/_efficient_attention_forward_native.h,sha256=Ot7CG4uvOYbLnSd6Rwhg5Zg2Dy4pLpLIf-a-0N064d0,1063 +torch/include/ATen/ops/_efficient_attention_forward_ops.h,sha256=fmxW5TG-4cx4pWZTOjg-gWqPF4udjhzHpF9wqw_ypKk,2884 +torch/include/ATen/ops/_efficientzerotensor.h,sha256=csLcWfmLl7rG8O_7ECQeNTiuKEmcHTDsbOt6ekMHpw0,6057 +torch/include/ATen/ops/_efficientzerotensor_compositeexplicitautograd_dispatch.h,sha256=IUaSge0k6HkAyRavA8GukdP5Lewhrtum9tbYAMlImKs,1090 +torch/include/ATen/ops/_efficientzerotensor_cpu_dispatch.h,sha256=jGP_omHfJMKrOtOSlXTAqs2LXrn8ahiwOL5jTzSJjpc,1298 +torch/include/ATen/ops/_efficientzerotensor_cuda_dispatch.h,sha256=4HkRcxYMlLqlUFXGKeXA3yNpExbkr4Xm8lOm1p63w8A,1300 +torch/include/ATen/ops/_efficientzerotensor_meta_dispatch.h,sha256=ala4R6WhoZGTgr3l2w8F1Hhf02kFWQIGfoBl9Xv5mmE,1300 +torch/include/ATen/ops/_efficientzerotensor_native.h,sha256=bumMr6huCWDBYU5cCXLkOruZ9e5O3SxfmB_UnNPHaWo,1209 +torch/include/ATen/ops/_efficientzerotensor_ops.h,sha256=UfgqiGt0azSCUa8lmy8aaGLOSH9x91kgABQ80Viy-Cg,2180 +torch/include/ATen/ops/_embedding_bag.h,sha256=ynr8jBmFwghnpObmhTWrZQGsZCE3B0LKj_8FYfxlmjA,3155 +torch/include/ATen/ops/_embedding_bag_backward.h,sha256=3X2GdP-6OJIJCE7eTNoaFR92m9NXywGOmuFFGdb07X4,3537 +torch/include/ATen/ops/_embedding_bag_backward_cpu_dispatch.h,sha256=45iHUyDese2X7uptuAS0tKYk_Gj17O6r07FlfuB_JKA,1429 +torch/include/ATen/ops/_embedding_bag_backward_cuda_dispatch.h,sha256=XhiFNBWLRELJIE9i9KtvOo5IB4qyK4D_M3ZBUEsFcDY,1431 +torch/include/ATen/ops/_embedding_bag_backward_native.h,sha256=zH8o6kn7DhjpZcl699XDomEujzVBT51iwY80fvD4NZA,809 +torch/include/ATen/ops/_embedding_bag_backward_ops.h,sha256=KQ_UeokofB3VPbu2NVQBq4sASNb2stiXhGoj2_Ejst0,2018 +torch/include/ATen/ops/_embedding_bag_compositeexplicitautograd_dispatch.h,sha256=QuNQSnHR5iTZvJWKIgx0vPrz6QAfhlT6Q5HPUlVOeYg,1557 +torch/include/ATen/ops/_embedding_bag_cpu_dispatch.h,sha256=TOt0M4kLEjroHtBKm6F-yzZ7etExtkME038pnat2s5g,1014 +torch/include/ATen/ops/_embedding_bag_cuda_dispatch.h,sha256=85YmHY7AuYkNEoBmrFY8G3rP4udNPQmoC3DvkK-CELM,1016 +torch/include/ATen/ops/_embedding_bag_dense_backward.h,sha256=QQpm32E9y7t-ljVT6exByzkEr3LrSQ24msA2wI2a1pQ,9299 +torch/include/ATen/ops/_embedding_bag_dense_backward_compositeexplicitautograd_dispatch.h,sha256=kCzTFP-YRMJJu7VEdbYrmcO51TcACTJB1gt9yfdRS6E,2188 +torch/include/ATen/ops/_embedding_bag_dense_backward_cpu_dispatch.h,sha256=XuPJExOck6RBEh-gr7ySK86ocCQ0ivOHqdAk84-j7IU,1359 +torch/include/ATen/ops/_embedding_bag_dense_backward_cuda_dispatch.h,sha256=eGWimdFJBSovKr5IwnjAjyyCJuySBh7-Xr-igWp7byw,1361 +torch/include/ATen/ops/_embedding_bag_dense_backward_native.h,sha256=HQ-uE_7LUW29hR28XVQsw2E1J-sGWMpUwCFF4Z7Ciac,1486 +torch/include/ATen/ops/_embedding_bag_dense_backward_ops.h,sha256=Ltb6LEn7ZfI81eKjuHEqF4vQ_fOrj7O67hzyLhWpn5s,3454 +torch/include/ATen/ops/_embedding_bag_forward_only.h,sha256=YZhvfWlHqcprRffB7CsuGawxVR0Lf0mAt6UpHqmMXVM,3285 +torch/include/ATen/ops/_embedding_bag_forward_only_compositeexplicitautograd_dispatch.h,sha256=TaRMr51VIEw7Ohi_k_1Uu7KXtcyPj_3hduSa9EkTnb8,1583 +torch/include/ATen/ops/_embedding_bag_forward_only_cpu_dispatch.h,sha256=M9JFujbhLEQ1n7aHtMx2jV37HDTr7Qgs53XLYcrtxL0,1027 +torch/include/ATen/ops/_embedding_bag_forward_only_cuda_dispatch.h,sha256=iWskHRVPDV86z6YBRC2GeUZgI4sWhIvak2SoBwdDB5c,1029 +torch/include/ATen/ops/_embedding_bag_forward_only_native.h,sha256=wBylZrfB-MWhoZP118B7IMyw-gU4tMqfwYrD27zTBsQ,1576 +torch/include/ATen/ops/_embedding_bag_forward_only_ops.h,sha256=N7eEXdAUDBagO5eGPeVClu8hc_psW0JgybA_5tIJSI0,3730 +torch/include/ATen/ops/_embedding_bag_native.h,sha256=8oIqwVJZcPvihObgJPtnHBwW_--hIr62cxT0gUHysnE,1537 +torch/include/ATen/ops/_embedding_bag_ops.h,sha256=wjZS9-Syck_zNgYjQJAgVtc4Ut_JLRq7__7JRS9IbIs,3652 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward.h,sha256=-6vZRcuIUd4R36LU0sxiy8_-2ER4LeKmyiKDwXbqPOo,2276 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_compositeexplicitautograd_dispatch.h,sha256=htwTEtG3JsmKY1-yJ-RwrPYBh8WFL663lORtOfsmlHc,1240 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_cpu_dispatch.h,sha256=S_JX79qmZVij_rqqXdyOvOulHpWc-B7Pg6VFFoxmJfM,908 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_cuda_dispatch.h,sha256=3CIHmD-DAh_D-BG1xVGmdS3i37JOJ4HV_xxZoIS6yng,910 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_native.h,sha256=GLSkBhMfv1L0denm8NynKlmVxwxu3t1Dy0yE4VVRFNE,1178 +torch/include/ATen/ops/_embedding_bag_per_sample_weights_backward_ops.h,sha256=CVUJBxwey2KeARfaGuVGYQnyiVlbOs0WLtEDpUF3zLk,2790 +torch/include/ATen/ops/_embedding_bag_sparse_backward.h,sha256=Mc-OMx4g-IB7n5lHwbdA9QAXJQ3rkLPrsuj4TuhvwT8,3244 +torch/include/ATen/ops/_embedding_bag_sparse_backward_compositeimplicitautograd_dispatch.h,sha256=OmKJGrCFMnxhQ9GIirCKXtDwNmrANgCnzZyvWQ5HQ58,1389 +torch/include/ATen/ops/_embedding_bag_sparse_backward_native.h,sha256=BiTcSV8AhPqBZ7g00JpECY-w_f-Ny18KQyCxVgKwSsg,767 +torch/include/ATen/ops/_embedding_bag_sparse_backward_ops.h,sha256=Xvw3o5Yp1EPj5B-TnPE7qUjUUG-nsa4g6GDCt5mmstU,1878 +torch/include/ATen/ops/_empty_affine_quantized.h,sha256=L2a8Z-QEKCsSOqg-ob-4fkHHU5qM-xsa-w5uNoCkVOk,9236 +torch/include/ATen/ops/_empty_affine_quantized_compositeexplicitautograd_dispatch.h,sha256=59U7BWrmPuI9hkZiuL6lHDI74lPUpFDPCt_1G4e71ag,1502 +torch/include/ATen/ops/_empty_affine_quantized_cpu_dispatch.h,sha256=Wz9Bm0Kh8i6mcwkaD6qUt_tZqXelmZi3tVWveRr9t08,1710 +torch/include/ATen/ops/_empty_affine_quantized_native.h,sha256=6CZ9taBBbqsixJzjAVW6Ua_6jbJUnN5g8OqctyJSZ9M,1310 +torch/include/ATen/ops/_empty_affine_quantized_ops.h,sha256=w0l7xGfH9v07x50LuKKq93MLyNtnMuwekPdIVP8yXJI,2794 +torch/include/ATen/ops/_empty_per_channel_affine_quantized.h,sha256=5G3-kWTULANfXiOFvniCWqIWkD3iRwFRp895sqHJsro,10544 +torch/include/ATen/ops/_empty_per_channel_affine_quantized_compositeexplicitautograd_dispatch.h,sha256=AGOweF2ztro8EMejc9s0q_hQPjtQywsomTVbRyP6jP0,1698 +torch/include/ATen/ops/_empty_per_channel_affine_quantized_cpu_dispatch.h,sha256=2kxVlZ5fSUwSoGUYH5HoKsmoyPXLPB6eJQY8ErTQyR4,1906 +torch/include/ATen/ops/_empty_per_channel_affine_quantized_native.h,sha256=QiC_c-ZCfKnbwSXRWajo4qO7j3ULiPq2jPj4nBKSXxo,1455 +torch/include/ATen/ops/_empty_per_channel_affine_quantized_ops.h,sha256=2IpYay-J5zT3puy89yO_29FEZ7KA31sOCCxzUFXuc6k,3110 +torch/include/ATen/ops/_euclidean_dist.h,sha256=v3YATWF1AM7Kq9pdG_JRHRnwzN3f2p-BnF07DpBH9fk,1190 +torch/include/ATen/ops/_euclidean_dist_compositeexplicitautograd_dispatch.h,sha256=uHbSXlrLcuYEdrGJzbMZ2-aFWm3wWfMuBxhznBYuML0,1011 +torch/include/ATen/ops/_euclidean_dist_native.h,sha256=2gouzk59lCiuBL1xx4iuVV_xAaIUdWObrll8CCHJMtY,614 +torch/include/ATen/ops/_euclidean_dist_ops.h,sha256=WITR106CHZym5f_xwfZm33PxgcLPLP-yi0TCb5vHnuc,1786 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine.h,sha256=nIXtJoHvZ55dTBqq2WrhDGF0NvFUFNNVJ-IsWawFOpM,2237 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward.h,sha256=99dY9RfnEFle2VCd_z1VjG1G0sntbC7tZj_nymcZHck,1185 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_cpu_dispatch.h,sha256=Z3HjXf18jEHoIFJ_3ffWCsJKEn7UAjPv6CAD3SPXYiA,960 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_cuda_dispatch.h,sha256=twJrFK6zwJkcIxRZmi338cEPADDp4hCVc-M_SB4_TrI,962 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_native.h,sha256=BAby9eMYxiV1XUbKEP9NRYO3Xc7JMJQn550BO9bdRSE,716 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_ops.h,sha256=ZXaoL9WR60U8Tz1ZSrn8XgOMJBS78ddNBwaSo_5IWF4,1743 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_compositeexplicitautograd_dispatch.h,sha256=WYeo_28S6rERoId5--EQBXmtM4k3hplRm4bNPOM-V2Q,1203 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_cpu_dispatch.h,sha256=5wJlQTF9DVnZDji9DT24FEjrq2PYFn-OnPeCIRbR2zI,890 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_cuda_dispatch.h,sha256=a7OHKzFejNVNgsmcz8Xh36fTFjOVGp9pjxzKSDxoXnk,892 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_native.h,sha256=orXxT81SHYeqwmJ7p9yl7PKPc6mBQh8bNNUaUBpLU0U,890 +torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_ops.h,sha256=YarvXMGA_gRcz1HuuHIiUcln0FLpmjLoqPAUXNB9IyU,2670 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine.h,sha256=ER9-gKllkRz2PsMGREHPywxLVQ_pO1WIX9I5vnPm88I,2137 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_backward.h,sha256=kpvMc8TlUJC5mJHe4QN7bhtk6WXG0blHuajXy8j_niI,1151 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_backward_cpu_dispatch.h,sha256=1ztJyBf1_t1uUSpn29ubEBpTJKE2Zca02ZUKYE82C7k,945 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_backward_cuda_dispatch.h,sha256=eZR2vo-E9FDxhwwI3KLVbh4D6d3wVg06UmhlamH8clQ,947 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_backward_native.h,sha256=Vfq-mm8oRaOlN8Sh15trWpvFk0FUSWf7ZQbS_xEajz8,701 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_backward_ops.h,sha256=5pf8zMuK5J9T9oBV8oKi2MTIfAYbwIVMajEBaAal-Lc,1693 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_compositeexplicitautograd_dispatch.h,sha256=psAB59dp84lyDfVPrtHMC29sZg1RAFuUyrxaKkfnBrs,1173 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_cpu_dispatch.h,sha256=gMuPyptXG0J8p46sumiDlFc0LQtauT2QrT14gy8Te-8,875 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_cuda_dispatch.h,sha256=pUwXpNO6buFyLj_9szcG_E1_jBCt4p30qjPpS0Y9CLU,877 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_native.h,sha256=WRE9Se2gH9rvWK_6OhtD3ji7HAWIJHsfOe1eGffXCT0,860 +torch/include/ATen/ops/_fake_quantize_learnable_per_tensor_affine_ops.h,sha256=2tFFHJ6l_IB4BE0aR0nSd0Tm09zlhUYBw0uRZQSG4Gg,2570 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams.h,sha256=vc4mUwR5eQI9bUYQKw3Wz4mUwR75eAThjL_iwkMUstQ,2588 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams_compositeexplicitautograd_dispatch.h,sha256=G5OP8iiLIZOAtseqIUdRq7HbGIRcrF6DFuePGY4AV40,1331 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams_cpu_dispatch.h,sha256=J_VRqE9ZWG3vx8oVLeGwZFnogRnt-S2zRujG6wBH-eo,930 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams_cuda_dispatch.h,sha256=oLkSNaG5PCAs0XaF6gK4-Y2yAfcfYnqFYKiYWjHOgN8,932 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams_native.h,sha256=FdhtKbzoEPRvlWRNxmQFURlrppTD1JszzSdSzMtin7U,996 +torch/include/ATen/ops/_fake_quantize_per_tensor_affine_cachemask_tensor_qparams_ops.h,sha256=0eWG7s-ukjipa79qKN7VBmuxkUq3yx4z-TU37x4uoFU,3032 +torch/include/ATen/ops/_fft_c2c.h,sha256=1_wC7wcdylIJxaZ5UjN7Pv755JNPhJJk5kz8c7P6-KY,4460 +torch/include/ATen/ops/_fft_c2c_cpu_dispatch.h,sha256=vG9P7rhfhPiolRBiKFzwCCkPX4hgWueqCsM5Q6a9Axw,1481 +torch/include/ATen/ops/_fft_c2c_cuda_dispatch.h,sha256=4sTsoNuL94y-DfqqkHhUhdvfdpjVHRrqSo89XUxSD64,1483 +torch/include/ATen/ops/_fft_c2c_native.h,sha256=aOnyVnsgGbd7VNglnlwlzZgxIZGVSZr_8vK_pErxOc8,946 +torch/include/ATen/ops/_fft_c2c_ops.h,sha256=X2Ai_0boNnis_V30cbVS1B4-xBD9cn2bas7uFBVeIj4,2016 +torch/include/ATen/ops/_fft_c2r.h,sha256=GZbyc_X8bFlxSY8AegWJr543FwysCxTrTL7q9MA4tc4,4514 +torch/include/ATen/ops/_fft_c2r_cpu_dispatch.h,sha256=0E-hSl-YBGNvGnikAJme9itp-dWOoPtWEOq4jzQpXgw,1535 +torch/include/ATen/ops/_fft_c2r_cuda_dispatch.h,sha256=hbgk4cmX0ZIPHBW3Uk5vSB-_WFCZz776XzLaA7qDMDI,1537 +torch/include/ATen/ops/_fft_c2r_native.h,sha256=xFVk7wmWlm9yVjxn1m2UWNhq0lu6TkUGoHqi7SxiDWE,982 +torch/include/ATen/ops/_fft_c2r_ops.h,sha256=nRXWNnbq51UwwrgsNjnb83U8ph3bJdQKhpok4Ayu5OY,2068 +torch/include/ATen/ops/_fft_r2c.h,sha256=bjzKHraoPk-9Sk7XkWVYlWaxGJ5Q5wQnXeVtTzHREt0,1426 +torch/include/ATen/ops/_fft_r2c_cpu_dispatch.h,sha256=HCV6m89fdPaz42FaYguyEBeVK3uSMVbDVu37R8al-9g,1060 +torch/include/ATen/ops/_fft_r2c_cuda_dispatch.h,sha256=bUkhmkjwaLBYCqKYZilmEX_UlvprzUKiQlbUlwKeNVI,1062 +torch/include/ATen/ops/_fft_r2c_native.h,sha256=S1s7VaZhPfDKiTDC5delfzfZ0jCwt_uBv1l1RWUNV_g,950 +torch/include/ATen/ops/_fft_r2c_ops.h,sha256=RSHsuE0nkGW7DpwyXhSyjXVyxHwcQVRgwjiFzLZZpKY,1992 +torch/include/ATen/ops/_fill_mem_eff_dropout_mask.h,sha256=sP76KFbD4z9oL4gxhDs05FG0VSLl-uYXhTXj02UEBX8,813 +torch/include/ATen/ops/_fill_mem_eff_dropout_mask_cuda_dispatch.h,sha256=vQYSybPCXC_d0UdKdq6dT_VPS3g3GTehIGDktXx3_QU,787 +torch/include/ATen/ops/_fill_mem_eff_dropout_mask_meta_dispatch.h,sha256=_-mUYXvW1VRaAGykMy9tiEaL4qhpENa_etemoRI8Y6s,787 +torch/include/ATen/ops/_fill_mem_eff_dropout_mask_native.h,sha256=PKn_W_mxp3fwuNmAkuUHagtyPffkSGLFrRAQclOJnqc,541 +torch/include/ATen/ops/_fill_mem_eff_dropout_mask_ops.h,sha256=vuE159p5NIkQsQzY2aKC1B_zsYYZUmHmsB2uZDHjkNo,1187 +torch/include/ATen/ops/_flash_attention_backward.h,sha256=BzCwwQCiHtTUY9ehSzv3jE9yr7v9KNFfFzJ8vyBbpiU,5137 +torch/include/ATen/ops/_flash_attention_backward_cuda_dispatch.h,sha256=PZK-_ujvdImgRZGsdizCsJQ6JpdT5EhSMsPgocCSkKQ,1883 +torch/include/ATen/ops/_flash_attention_backward_native.h,sha256=y6sGxx9S6knDfUnY416eyyvjR4OgvIJ9KCqNC6gmJHk,1018 +torch/include/ATen/ops/_flash_attention_backward_ops.h,sha256=bsnChDPNA5WYs6yH9Yfd3O-h3t_zSpjP-ukCdWYq3zM,2669 +torch/include/ATen/ops/_flash_attention_forward.h,sha256=t4hBSpBB2AnsvBZ5NlFiWUCfiWPZQLMCfEQsyWL06jc,5364 +torch/include/ATen/ops/_flash_attention_forward_cuda_dispatch.h,sha256=XjdUT1y7y0VeD-eUHL5PyQ-4jZQlYZuat7ZW6D-hYgc,1949 +torch/include/ATen/ops/_flash_attention_forward_native.h,sha256=qLZ-vT8S5S7SihpDseMg-VpNVdFNH9UlfKwhLlTkhmE,1051 +torch/include/ATen/ops/_flash_attention_forward_ops.h,sha256=XSLhpx1kH7VzopmCIZYujYHjC_9AllOW0OA-IZA5_0Y,2829 +torch/include/ATen/ops/_foobar.h,sha256=_-d3oeLEvQhqRLlZJCLkk365EB2H9-IkGARusx02ddQ,1344 +torch/include/ATen/ops/_foobar_compositeexplicitautograd_dispatch.h,sha256=eTMDwoehka6Yew01hrZlosLyhVbg2eQWEMrecSrVyaM,950 +torch/include/ATen/ops/_foobar_cpu_dispatch.h,sha256=U54bsQYe5HIg0ZNZacgqQQXBZgjbJkUrZk_zFpuULNs,769 +torch/include/ATen/ops/_foobar_native.h,sha256=7zf46jeqpcnYrILSfOWwmO6cNzS501vuFGvRAR7oMAU,636 +torch/include/ATen/ops/_foobar_ops.h,sha256=cOMnPe7YinoNEctttUuPk03UF8t_On1wFUdbdEFyJYo,1863 +torch/include/ATen/ops/_foreach_abs.h,sha256=VyyDukYO2_O3bUneppKVYn9n_-Tbzua5JL6kofP928Y,1199 +torch/include/ATen/ops/_foreach_abs_compositeexplicitautograd_dispatch.h,sha256=ea0-d6KEM2NnR8E_kcko4GTlRuqjtS2-0EB8I1TLWu8,981 +torch/include/ATen/ops/_foreach_abs_cuda_dispatch.h,sha256=5UlQXvudjhXWBNRzjmqqyHaJfUqA5ccfQ6hEJp2aT84,790 +torch/include/ATen/ops/_foreach_abs_native.h,sha256=j6j2PRBMt9OaK-_d0CvBH26GxQePuKt9C_mpTyDrQfE,784 +torch/include/ATen/ops/_foreach_abs_ops.h,sha256=wUmVk6JwMj2JHFTgD6Fdqls5_NDq7uKqlkkWRuzoKvY,2155 +torch/include/ATen/ops/_foreach_acos.h,sha256=ajBftliHauCR1nntpTPRZNXBiW3a9axzWV4kJy3ONM4,1212 +torch/include/ATen/ops/_foreach_acos_compositeexplicitautograd_dispatch.h,sha256=jrQpMNRB9GT17CHqof06bsQzvvcnfGH0VZC1okd62ZE,985 +torch/include/ATen/ops/_foreach_acos_cuda_dispatch.h,sha256=NRoWwOrZzam4MFnN4nvHRPJa3a_mLnrUh_lJukCtHx8,792 +torch/include/ATen/ops/_foreach_acos_native.h,sha256=UiTX3vB_gtN5X4KIS4MfOTRM5I1o256shqy1LAGCBOw,789 +torch/include/ATen/ops/_foreach_acos_ops.h,sha256=DMVbKL1s0q1u0mOWMVauGoAz7XJy671n1asV8eUyZvo,2164 +torch/include/ATen/ops/_foreach_add.h,sha256=TugbZ2UOliYfbbiwyEDlhEr44yy79s2ZHpJOum1nrfA,4809 +torch/include/ATen/ops/_foreach_add_compositeexplicitautograd_dispatch.h,sha256=a-XbIwCrE_SR_I5oBG8Pz-ptosXI2KOwgEyZ3MfntAk,2450 +torch/include/ATen/ops/_foreach_add_cuda_dispatch.h,sha256=ewlwgufQtICulyoxQOLsPdvUteiugdv9EQBcaeeTnDg,1486 +torch/include/ATen/ops/_foreach_add_native.h,sha256=I8AQTKHFtnZp_7BOIBZUHfadhBWIcu-jxeB8GYeVB1Q,2953 +torch/include/ATen/ops/_foreach_add_ops.h,sha256=zRLxisBOhxt9t3RZZZZkhWwR0_M99NAzI7hD_EQ8mlc,9209 +torch/include/ATen/ops/_foreach_addcdiv.h,sha256=lFLq9CAQRp_0tRfrFEbITuoR-p036_2tUR33MzBUIC4,4857 +torch/include/ATen/ops/_foreach_addcdiv_compositeexplicitautograd_dispatch.h,sha256=wjJvxJBOn1sUu6ww6yaKDeuwCTsD9o9cPGYKgyEUJJY,2505 +torch/include/ATen/ops/_foreach_addcdiv_cuda_dispatch.h,sha256=vAAK2qZphWw5PllUGaQl7vk_Tu8hHmsVsxwXw1CxJqg,1526 +torch/include/ATen/ops/_foreach_addcdiv_native.h,sha256=36buZv7yxCDm_Kvg8Tkq9ckX3H1yxqvVMpZ6-e25UQU,2861 +torch/include/ATen/ops/_foreach_addcdiv_ops.h,sha256=rgwAqRIez1hav0SyEh1aEBrzx8gHhhhFmSQnLhihJmg,8276 +torch/include/ATen/ops/_foreach_addcmul.h,sha256=UNJVVE9lQPomzK8lCy7wjH7RvjI-Mo1fP_JnIogg5Rg,4857 +torch/include/ATen/ops/_foreach_addcmul_compositeexplicitautograd_dispatch.h,sha256=j-ediaAs0Kp7iWkFdSZSeCJmH5Wmjg27ZzF5nNhxZho,2505 +torch/include/ATen/ops/_foreach_addcmul_cuda_dispatch.h,sha256=DpgTCL0YXc9a80E5UYbr52EmlCjpJL3Bdf4Q1-DTPVk,1526 +torch/include/ATen/ops/_foreach_addcmul_native.h,sha256=SK_isn09CIROWftskyfk0iD4picU_4h96p0RTZiMVG0,2861 +torch/include/ATen/ops/_foreach_addcmul_ops.h,sha256=-f-yO9LQ6BzdeSr5a2ASfcdegmkSvEZRQc5UHQP8Lhk,8276 +torch/include/ATen/ops/_foreach_asin.h,sha256=dUr_0AdMROnKySkIbIRH8fjdm2bvTKyRtDx9m-GEy9U,1212 +torch/include/ATen/ops/_foreach_asin_compositeexplicitautograd_dispatch.h,sha256=E-mGYa2Dal-OtqVuecxsOCA2vQjbFZLkRrMTLEBska0,985 +torch/include/ATen/ops/_foreach_asin_cuda_dispatch.h,sha256=38H_01JqJQ1F4EWAi94S8tbhzO92Ywy_iVJrHwQpHcE,792 +torch/include/ATen/ops/_foreach_asin_native.h,sha256=Gqc5W7CRL1DLAgJsMdLtwNycS1SXo9rAlBRJt1qRBiA,789 +torch/include/ATen/ops/_foreach_asin_ops.h,sha256=ytj7g2vsPa9YoX5VcYe68I5bwEEYYR3GYyqZ6S8V5z8,2164 +torch/include/ATen/ops/_foreach_atan.h,sha256=TQbOHQ9j57Y4C8FvI0QaeGYBA0Ap-l3ifaNAt-tNE5w,1212 +torch/include/ATen/ops/_foreach_atan_compositeexplicitautograd_dispatch.h,sha256=6gmtuOJeV_VajZ2quEruWgTyKTdgKed00VGGlNKe60o,985 +torch/include/ATen/ops/_foreach_atan_cuda_dispatch.h,sha256=GWRjqm41W79GslWxuejYEbMBJLDVFxkcAlSz5oyjOVg,792 +torch/include/ATen/ops/_foreach_atan_native.h,sha256=0lOM9BfTt6ZzYPapE6LK3cC9MXdPy8JGtA--CKRKG-k,789 +torch/include/ATen/ops/_foreach_atan_ops.h,sha256=NJDRkDVje00uogyRWXClDQbKsYK6TnXzIQ2-p3-1OhU,2164 +torch/include/ATen/ops/_foreach_ceil.h,sha256=dk07lLS8odW0_TVjrQ7HeFZz6owruiAdY5BqSce--Mc,1212 +torch/include/ATen/ops/_foreach_ceil_compositeexplicitautograd_dispatch.h,sha256=3iWC0YLq0QGYB8zItvW162YLcy3pE95juzx9E5Vp4EU,985 +torch/include/ATen/ops/_foreach_ceil_cuda_dispatch.h,sha256=ym66WI0oTa768dUfaWaR1Z_-iGx9d6HlmaPqAvR7Avg,792 +torch/include/ATen/ops/_foreach_ceil_native.h,sha256=a__7Z2GNreC3ACVEfZ1Uh7ToveFxkdR6KKQUmhaYdgM,789 +torch/include/ATen/ops/_foreach_ceil_ops.h,sha256=u60EdsrWH10CKuK032N3kbgveNiXDpKteDMYSxTsUx0,2164 +torch/include/ATen/ops/_foreach_clamp_max.h,sha256=eZwihWBCD3Kw7XHQM4ov3GPbDSjrXUvJjiiMQ1en-q4,3657 +torch/include/ATen/ops/_foreach_clamp_max_compositeexplicitautograd_dispatch.h,sha256=C8esRwf4EwS0-mrTV32qjd5cnVGshlJncynkucBmdLk,1927 +torch/include/ATen/ops/_foreach_clamp_max_cuda_dispatch.h,sha256=kSGVSTx5UF7E3eTENGbq3cM_UYpCjXWK1Y4oZ3Sexlg,1236 +torch/include/ATen/ops/_foreach_clamp_max_native.h,sha256=VsDH7TVMsZkaJkwO3m3oCBPZ0JhLcqosW283ZNZbeVc,2212 +torch/include/ATen/ops/_foreach_clamp_max_ops.h,sha256=TNI8G4ClRi8wDkCDeKVF4Ua6oFmHoF9duzf4VmyjsCA,6791 +torch/include/ATen/ops/_foreach_clamp_min.h,sha256=Qj4wW2F1NEDPzOu6llUj4fQbNYTF8nc_V8eCrgCfVXw,3657 +torch/include/ATen/ops/_foreach_clamp_min_compositeexplicitautograd_dispatch.h,sha256=JcjQfhNeCu9L3C3NR2JC2snpv3smHeNsZqRts24G_To,1927 +torch/include/ATen/ops/_foreach_clamp_min_cuda_dispatch.h,sha256=zFVRm881gidg0CMY-C27EZKVdVwefNs8LhPMyqwcyCU,1236 +torch/include/ATen/ops/_foreach_clamp_min_native.h,sha256=bUGQ0aOkVOiAQFovlwDZ1PolbfKwRdqDKWO5Xw8aPr4,2212 +torch/include/ATen/ops/_foreach_clamp_min_ops.h,sha256=MVcdXyM_TQW5MgKVi3ZVN-_Bxfmgjd-_w_Vlx3Xco50,6791 +torch/include/ATen/ops/_foreach_copy.h,sha256=xCXrC8IDhS9HdQYbdgbyDD3HqicM4mDpUDh4zPonssY,1627 +torch/include/ATen/ops/_foreach_copy_compositeexplicitautograd_dispatch.h,sha256=obrrBt5AFt80SOwzITSdc4EYLE1dRTQ_OaEacTnHEno,1159 +torch/include/ATen/ops/_foreach_copy_cuda_dispatch.h,sha256=5T0gDhiArLSHK0k0eaNLeMICjZlZ4EzU_XrbZ8MYyVU,765 +torch/include/ATen/ops/_foreach_copy_native.h,sha256=gazc_Xke29hXGcgAaFrWp-2I9dJu-J5IfPYroR0uMq0,893 +torch/include/ATen/ops/_foreach_copy_ops.h,sha256=NWl5BCVIfoj2hjIewFfSR0IU90LfPr9b8oFK4Tx-PeI,2590 +torch/include/ATen/ops/_foreach_cos.h,sha256=MwlOgy_EDmieADtjT5cDzgy0LL4SS3NX5TfjnGrm21Y,1199 +torch/include/ATen/ops/_foreach_cos_compositeexplicitautograd_dispatch.h,sha256=fVCVu5MBC3cXgDCjwbshYkOLMTayFDubavCBwJAv1OQ,981 +torch/include/ATen/ops/_foreach_cos_cuda_dispatch.h,sha256=GsjGjHE-sZgyew1RPiPD93Wf4Q6ZK-i9lIZCqaYn4pk,790 +torch/include/ATen/ops/_foreach_cos_native.h,sha256=RBDL9CtuXDtUfesXcZsZ7qqEP7Ij3allTsCpHewXEJw,784 +torch/include/ATen/ops/_foreach_cos_ops.h,sha256=nljb1dH7C8hy8-dHX9Dx5sl40DEqixxyl6nseVPmni4,2155 +torch/include/ATen/ops/_foreach_cosh.h,sha256=WByYbdp-bY1NaiLkrJ_N5Ary7U-cwKs2ZB8m8s3b-zU,1212 +torch/include/ATen/ops/_foreach_cosh_compositeexplicitautograd_dispatch.h,sha256=_Vy1c1V-vuLBqTxD2tvjEB7hiJ1lEuhdt-TzVUxGGkw,985 +torch/include/ATen/ops/_foreach_cosh_cuda_dispatch.h,sha256=Tjd_o4u9CbccpZCl7MsO8cl8h0phD4e73t4pRf5986A,792 +torch/include/ATen/ops/_foreach_cosh_native.h,sha256=dksWlBKDflY0KBaJk6Fa39FcUxCbHOC0XjEtUPHWSvo,789 +torch/include/ATen/ops/_foreach_cosh_ops.h,sha256=tpU_qymschWpmxpTrBF2Z9AFW4yCaPQpTf_-X4mnhVw,2164 +torch/include/ATen/ops/_foreach_div.h,sha256=ANBya-3whEo__Uig45ibNF6iyan29zqskD96HP2qPnI,4393 +torch/include/ATen/ops/_foreach_div_compositeexplicitautograd_dispatch.h,sha256=WvYo2ZXeoKBfXkXm_0XKMxri3BOEmKzzes5x_9gJ5sM,2230 +torch/include/ATen/ops/_foreach_div_cuda_dispatch.h,sha256=blhBC-UbhxiAk7kKXdRfqEsaDydYA8bXKHPwUPpoB4k,1374 +torch/include/ATen/ops/_foreach_div_native.h,sha256=CgYWpnmhobz1X1B0dbSdE700GhJMc5PsU1TdWh-CjJc,2677 +torch/include/ATen/ops/_foreach_div_ops.h,sha256=mCNsmIWAqRQOtuQ_agogLvdWD3pnzMjvBC74xr8oKUo,8669 +torch/include/ATen/ops/_foreach_erf.h,sha256=zlsmcIqVVxF8DdtmGNfXxzULVaNrRqdWOHaQLa3KXAE,1199 +torch/include/ATen/ops/_foreach_erf_compositeexplicitautograd_dispatch.h,sha256=ScUqCzLbjlvsgIyuIYoQDR9xhfbJOfqr_4Cy9eI4NIg,981 +torch/include/ATen/ops/_foreach_erf_cuda_dispatch.h,sha256=WJjVQufaliIg_rLuhVlACskdOsRiofBS6g7uxf_ZUio,790 +torch/include/ATen/ops/_foreach_erf_native.h,sha256=g6my8A0_33qrTmj8L4_MPNflTb1oiP4xodM48kyn0uo,784 +torch/include/ATen/ops/_foreach_erf_ops.h,sha256=2CZC1-00adqZlcMJVDNERi9otWhAr1WIWv43KmjzoiM,2155 +torch/include/ATen/ops/_foreach_erfc.h,sha256=FP1WQAMEZKMFQTdAp5dUMTMZBPC-B7Gti7ktJdK9v68,1212 +torch/include/ATen/ops/_foreach_erfc_compositeexplicitautograd_dispatch.h,sha256=snQnXp6-Z5zMXG7MuFE8l29xatmzabbWMofdrRJJ-ic,985 +torch/include/ATen/ops/_foreach_erfc_cuda_dispatch.h,sha256=OGrzRWlfkYHpaVDfyvoktU8UQq6QyV7xGtCUT_m4P3E,792 +torch/include/ATen/ops/_foreach_erfc_native.h,sha256=oPBlyDrlZGnHD8snD2wE06uYWTElYWfwURSVZ35vLGk,789 +torch/include/ATen/ops/_foreach_erfc_ops.h,sha256=S2xftv_I3BCkeVMAAFLCMt9CvNhwf16eQoo_P-9Hwts,2164 +torch/include/ATen/ops/_foreach_exp.h,sha256=Iz7O9T6PRanqmkH8gsABD7g9DTG_LSQWyndHeHAJE38,1199 +torch/include/ATen/ops/_foreach_exp_compositeexplicitautograd_dispatch.h,sha256=axGDWCrL-gGaEzm9XT_cdWY934Jx-RWC_dRMtVDzaCI,981 +torch/include/ATen/ops/_foreach_exp_cuda_dispatch.h,sha256=XWlwnhSYGBPeuwbKtqdimPK8GvWeKm_dYO5iKg6iQTc,790 +torch/include/ATen/ops/_foreach_exp_native.h,sha256=qBcwuA3w7wyG4uh0EMCW3r8BG9Axw6Cqx6MBFAfQpRY,784 +torch/include/ATen/ops/_foreach_exp_ops.h,sha256=l91LX7en0foeZSLxF-AX6EK2EyVkTyE5gA-PLSWcx-Q,2155 +torch/include/ATen/ops/_foreach_expm1.h,sha256=O6cpuogbTrIhH-mG96GbsFOgH_EgR6dx96VaBH9_M58,1225 +torch/include/ATen/ops/_foreach_expm1_compositeexplicitautograd_dispatch.h,sha256=bAuhJuVXoCh1nCxirBElYAWKp4yylHvRUaOhIYm78Yk,989 +torch/include/ATen/ops/_foreach_expm1_cuda_dispatch.h,sha256=D5YmPyI30BE8PM7-_qlIfmWAXNESKCTBaqkopj5KbYQ,794 +torch/include/ATen/ops/_foreach_expm1_native.h,sha256=MFz-4ozlMNn9RJNMNlTIVVCPz5_Dbn3SWU142jffvJ8,794 +torch/include/ATen/ops/_foreach_expm1_ops.h,sha256=N3M7yJEbusLeYCU9gtOH-WP8Oy-Yry0FgRI_5a5pmp4,2173 +torch/include/ATen/ops/_foreach_floor.h,sha256=465Bkpix6_qFSpfyzs0bkhM83KI15GTP95LQSeL4npI,1225 +torch/include/ATen/ops/_foreach_floor_compositeexplicitautograd_dispatch.h,sha256=CVbSrREQnNY_H9hQFYiifp6Zv9i4fal-ikxKsMwGBSo,989 +torch/include/ATen/ops/_foreach_floor_cuda_dispatch.h,sha256=55R0Y5YKLcpj_ChzYu_4yP_RrZcRkVn5C68n8VGhg94,794 +torch/include/ATen/ops/_foreach_floor_native.h,sha256=aEq-dcl5dkWNOsL5tbAHi51MLozRV38_w7JAhnC408A,794 +torch/include/ATen/ops/_foreach_floor_ops.h,sha256=J1Dor77M1hTeXb1prePrQnVLJQQ6-kNyAM2450CiiKk,2173 +torch/include/ATen/ops/_foreach_frac.h,sha256=v8Su5U_WLWUf2MYmUwEtE87FBo_YQkzCrTHxxAFXLjQ,1212 +torch/include/ATen/ops/_foreach_frac_compositeexplicitautograd_dispatch.h,sha256=YXC4zaJ6Htl-7tieimHV-56rXeVJcFArfaKSHe2l34Y,985 +torch/include/ATen/ops/_foreach_frac_cuda_dispatch.h,sha256=IBPJo0ySAGJvURuBXiBrF2Wn_kCZKL-uIusdCA1p6iU,792 +torch/include/ATen/ops/_foreach_frac_native.h,sha256=dOCaXSExk1QHltUjJXbcSvN3cU5PrYJUTWAh7PFwMlA,789 +torch/include/ATen/ops/_foreach_frac_ops.h,sha256=dCMVAdQrsHTYGyyWNTXtFHqLQEAWjIagXeWKx4-f6V4,2164 +torch/include/ATen/ops/_foreach_lerp.h,sha256=an7e35E3metX4yEY7OOb1dS_HHqdk4iIJwuIRHpWQtU,2870 +torch/include/ATen/ops/_foreach_lerp_compositeexplicitautograd_dispatch.h,sha256=mQFLMVQFJX0Aod_buRpoHa4ZmTv1ykxNw7fXwsqFOXg,1664 +torch/include/ATen/ops/_foreach_lerp_cuda_dispatch.h,sha256=51JMl2TR8pMMlKU8riGR7UqL61oZbeaRuQC9dn7DVa8,1118 +torch/include/ATen/ops/_foreach_lerp_native.h,sha256=5_y9t6mwXpWylBUOgDZzhEzSeTiVD_Ka3t3W0Rrf8Y0,1739 +torch/include/ATen/ops/_foreach_lerp_ops.h,sha256=2GAKYMZvcFgVNUPw3KzpqeXFSLdQUhpzhmlyH3U9DAQ,5021 +torch/include/ATen/ops/_foreach_lgamma.h,sha256=P-vjHaDzYuVSq4RPYbaEVTUOUOfhBmU9dksV1BJOqpY,1238 +torch/include/ATen/ops/_foreach_lgamma_compositeexplicitautograd_dispatch.h,sha256=G-Y70sbGXrtUWZBMPiDkWCrqoUvc2KIOqBFvMjBW0Bo,993 +torch/include/ATen/ops/_foreach_lgamma_cuda_dispatch.h,sha256=8N9U0VyHcVjoHtuRQsaKxTTzvIef3uZ4qNjcg_Tly4w,796 +torch/include/ATen/ops/_foreach_lgamma_native.h,sha256=SUmnjFyVZgTB5WVkfpMu-b4uUmlMvpeR8PrLHrKly0E,799 +torch/include/ATen/ops/_foreach_lgamma_ops.h,sha256=d35TE_PNawqSDLWrF4Y0Cg5mLVsws7mnfmwQqCBdPms,2182 +torch/include/ATen/ops/_foreach_log.h,sha256=8CjQvCndTelC2f0XVYkS8aoDDco3dwpS01hLUG2lglA,1199 +torch/include/ATen/ops/_foreach_log10.h,sha256=KLmK6gYx3u9Wx3JIkja2RRoJ1BSvmVu9AkcDH7bgBaA,1225 +torch/include/ATen/ops/_foreach_log10_compositeexplicitautograd_dispatch.h,sha256=zftQ39NdGmrBSNRxSq6YpZULw3RiT0n7KduxfSRkXoc,989 +torch/include/ATen/ops/_foreach_log10_cuda_dispatch.h,sha256=vuuyxCjXUYlHAM9JE3KOKRv7aLqAuINTDBf58pSfXS4,794 +torch/include/ATen/ops/_foreach_log10_native.h,sha256=y4CLpuy_Iuu9tGE2vcNi2yNXfyOoSFcel0Z1IfQqfcw,794 +torch/include/ATen/ops/_foreach_log10_ops.h,sha256=320j3AsE3y9dHsz3Esesw-VyQrv3c_0mdevy-hHvVfw,2173 +torch/include/ATen/ops/_foreach_log1p.h,sha256=CaDwZb4P3M63SpNNFIlUT-D-q5-em1Uv_ndcuKtL40I,1225 +torch/include/ATen/ops/_foreach_log1p_compositeexplicitautograd_dispatch.h,sha256=qvtQo3yEmIVfYTDryT9XeosFvTlBbl_Is_9py8xZesw,989 +torch/include/ATen/ops/_foreach_log1p_cuda_dispatch.h,sha256=eJGg9QKhLOU6lTcUwbFci09U7R8m2u1rIhjf_wlC9fk,794 +torch/include/ATen/ops/_foreach_log1p_native.h,sha256=anCejA0oSpABgLDTUP_nSYKQ4kJZcmwEHo5hLvq48Pk,794 +torch/include/ATen/ops/_foreach_log1p_ops.h,sha256=5B0surp5W0PRpRIBzkoQBMy1JxxxSjUQn2GtKEpOSB4,2173 +torch/include/ATen/ops/_foreach_log2.h,sha256=8ZuC0KfC0ZrmFHjIGIXBxkkvin_nHYs4zXWzRwjruzI,1212 +torch/include/ATen/ops/_foreach_log2_compositeexplicitautograd_dispatch.h,sha256=jD7hKl_4czBvV7pSx__dwNaf1DzL_L69wZGyhyve8Xc,985 +torch/include/ATen/ops/_foreach_log2_cuda_dispatch.h,sha256=oOUXvy7zsR-xVGIya-T5mYfdE_VcwwFksrPBIb1VzhY,792 +torch/include/ATen/ops/_foreach_log2_native.h,sha256=DCk6SFGqASEtuBEBm77348lxVkRaIEl7F7iOcpq1wT0,789 +torch/include/ATen/ops/_foreach_log2_ops.h,sha256=XbVINpE8g7MlxhwyCn6o0DlNnCdbAjYgewrGbjC32Xw,2164 +torch/include/ATen/ops/_foreach_log_compositeexplicitautograd_dispatch.h,sha256=2g39xAsehwKUPcCkzbXUXOcDCjMffyy-Wvw4t54tyMw,981 +torch/include/ATen/ops/_foreach_log_cuda_dispatch.h,sha256=O7Cgtxe9NkYqZRutywEMbRqxvKUpQtikji0HI-55X7g,790 +torch/include/ATen/ops/_foreach_log_native.h,sha256=a4-WrfcTxKHDJH-IU_mhlrsdbPxVP3AvfuqA_uVNndI,784 +torch/include/ATen/ops/_foreach_log_ops.h,sha256=YbcSr87EdP1cP0yI81fIIaECsdLeZ8VNcc5AQo4sXm0,2155 +torch/include/ATen/ops/_foreach_max.h,sha256=i7h3MGAp3RSzsmF1VqtrH_30-hVEXAFYlkOeEhTyTHQ,1051 +torch/include/ATen/ops/_foreach_max_compositeexplicitautograd_dispatch.h,sha256=R8nvIk-PLvPz5eP8hgGPMFWJS5rocX0QxYm6_oLsE5Q,930 +torch/include/ATen/ops/_foreach_max_cuda_dispatch.h,sha256=OhOjFkQ0SJn6yTBBCndzkZ3DSvOeFFPmsWovhJ6f2GQ,739 +torch/include/ATen/ops/_foreach_max_native.h,sha256=AWFocT8TAsPS-lFMh8g90xeX9wZWTShhQz8gxuh_nmU,660 +torch/include/ATen/ops/_foreach_max_ops.h,sha256=e08avI3nrfAOaHItnprjIb9NXthJxAmnWRZi3fQZnjE,1629 +torch/include/ATen/ops/_foreach_maximum.h,sha256=5Oi3DB5uVLkBuFiua0v6CseLWP_znZ_sUYpPFvdsWQQ,3583 +torch/include/ATen/ops/_foreach_maximum_compositeexplicitautograd_dispatch.h,sha256=oAXK0Goeaaqt0HZZHPs7aXYV7NzqpY6yfGW9p1umMmw,1903 +torch/include/ATen/ops/_foreach_maximum_cuda_dispatch.h,sha256=RxKOWS1pl8YpZ34yZsK8_P_qffiTKHL7AFPzIVvscNg,1224 +torch/include/ATen/ops/_foreach_maximum_native.h,sha256=K-_yw6eIfm9m1sXedBwX-JUbel1QEWQk2y5lGTTMh74,2206 +torch/include/ATen/ops/_foreach_maximum_ops.h,sha256=Gv12vpDpy44gbOgSQI5aURcqnsA7JrLWgaEaFOE4EmI,6737 +torch/include/ATen/ops/_foreach_minimum.h,sha256=9I2lI1E4A72Cf4UN5MyfWYgZcxzuaVlr8fsLH1iByCc,3583 +torch/include/ATen/ops/_foreach_minimum_compositeexplicitautograd_dispatch.h,sha256=YraMfO1UgDm1ErhAS7_B18IDu4i6Sb-FnW0dsOZznq8,1903 +torch/include/ATen/ops/_foreach_minimum_cuda_dispatch.h,sha256=qP4oz43SebofPXOZP2DXigz8hqBkRmaMquXE8hTMuKY,1224 +torch/include/ATen/ops/_foreach_minimum_native.h,sha256=-TfSa6Vg7Wh72G5y_UkWrOdoon0fV-5MSa5-OdLVEVo,2206 +torch/include/ATen/ops/_foreach_minimum_ops.h,sha256=ji8xaolfTnCE6xV4SIUxlCEhwrRT08FUWcFjCMk8WZU,6737 +torch/include/ATen/ops/_foreach_mul.h,sha256=2cJhaAN4w25gY7LMtmGLxKsyApjs3xezuu1N7cidXgE,4393 +torch/include/ATen/ops/_foreach_mul_compositeexplicitautograd_dispatch.h,sha256=Z5Hks3c-mF_5vpzd__5Zgkle3FpRiW3YDiw6Gkh_1i0,2230 +torch/include/ATen/ops/_foreach_mul_cuda_dispatch.h,sha256=Di0b-BfdiK3lI1QxWtykED3EGhm8AX6EnBFCi8EK1X8,1374 +torch/include/ATen/ops/_foreach_mul_native.h,sha256=02Qz2A4smMLE-MlRlH9GOG2fFp5AzSJQ8XJQO1U6rwE,2677 +torch/include/ATen/ops/_foreach_mul_ops.h,sha256=9OU12rEs-9ufGDiz-uLj5E8WJoHYudBv-xBgw4X9ZPw,8669 +torch/include/ATen/ops/_foreach_neg.h,sha256=fX6B23j96wr4Y6__0uNYJKMP9huEmU9CyzBCzDa0Q3Y,1199 +torch/include/ATen/ops/_foreach_neg_compositeexplicitautograd_dispatch.h,sha256=QQyLlTWTee0STX2VyfAM2cVL-E_Zq5KB1q3tBV9g2fM,981 +torch/include/ATen/ops/_foreach_neg_cuda_dispatch.h,sha256=LNirobRyRHGw6PbIqMf_mKBUAUSZJSjAGDRmRVBBRS4,790 +torch/include/ATen/ops/_foreach_neg_native.h,sha256=h7ZTHdp722yfGHrVtzdiGs623_6GyjhBx8-_FMbuDm4,784 +torch/include/ATen/ops/_foreach_neg_ops.h,sha256=xOphbFt9L9RDfjotxpYKakZw14kytlpc_MylONV4IqE,2155 +torch/include/ATen/ops/_foreach_norm.h,sha256=0OFQKtT_fDBHu8c_Ete1vTDBP91j9ZLv0tF87bv8uTg,1476 +torch/include/ATen/ops/_foreach_norm_compositeexplicitautograd_dispatch.h,sha256=XwsSos2Euc_OkuAl0R7hvxykxM9DSB_DMBRrgOnWQkU,1156 +torch/include/ATen/ops/_foreach_norm_cuda_dispatch.h,sha256=Xa-SgLdRECv_A7TlhHo9V0OqTNXntCSoug2Ru1cWfac,820 +torch/include/ATen/ops/_foreach_norm_native.h,sha256=g5rxdxIY3zWBZKcwgncrk4OC8VTg_L97i0DHhcOnfpo,893 +torch/include/ATen/ops/_foreach_norm_ops.h,sha256=vEuqRs-JlPBkABhGSJuX14BM7eDRn_LzOuZ_b-E0sGc,2110 +torch/include/ATen/ops/_foreach_pow.h,sha256=GobTjgqXPVEimDiAiLa6o79ibCKWxB7eLVQHKpEmSrg,3763 +torch/include/ATen/ops/_foreach_pow_compositeexplicitautograd_dispatch.h,sha256=q5yzQAdlg72QUxq3oqlMBbZ2LoJFFuutNYy1UmjrfI4,1979 +torch/include/ATen/ops/_foreach_pow_cuda_dispatch.h,sha256=UtCRbPpjeMg-XjAVX4cTxmCUF6fZagEnaa1GkiTl53E,1312 +torch/include/ATen/ops/_foreach_pow_native.h,sha256=7C-pzRruCPP_WIt1wkXUEIHCsaQ9KM9GchhYw80E0rs,2398 +torch/include/ATen/ops/_foreach_pow_ops.h,sha256=II2PwkZ0TbBwhmD7H-bEAn8kZIbxz2jHu4P16RmKUSw,7413 +torch/include/ATen/ops/_foreach_reciprocal.h,sha256=aKrILOX_Mxj8OpLz8KCauuzn9Az9Ntq5f3C80c6KzC0,1290 +torch/include/ATen/ops/_foreach_reciprocal_compositeexplicitautograd_dispatch.h,sha256=lTv6U45DvIakDoKoA06Cz5SJtZYTdVDDwl2wGLZox-k,1009 +torch/include/ATen/ops/_foreach_reciprocal_cuda_dispatch.h,sha256=wdyYLOJ0AOcxTxYEmb0AYmHZK-dwx6jc0sAibvlp43s,804 +torch/include/ATen/ops/_foreach_reciprocal_native.h,sha256=c8vKeQGQKnnOtDMohSqq988i2r4JwrewdDgs25aZHZw,819 +torch/include/ATen/ops/_foreach_reciprocal_ops.h,sha256=BAjRL2cpkyL54knTp6VwAFgAyH8W29SOkMWUGayiL_8,2218 +torch/include/ATen/ops/_foreach_round.h,sha256=Nt0Lt08MLY6l2koCYHov6juwEyITxHRB5P2Fl3xHIko,1225 +torch/include/ATen/ops/_foreach_round_compositeexplicitautograd_dispatch.h,sha256=Crxy4v3MV9TK8zzmrbj1I35WBfhdOnIuzFbDOCaeLgQ,989 +torch/include/ATen/ops/_foreach_round_cuda_dispatch.h,sha256=fCNyh3eTaIc43aXrhMYwL4VUFRVgcHvtpc9vY84gQ5s,794 +torch/include/ATen/ops/_foreach_round_native.h,sha256=x1QRmXU8gY5J3W4P1L6ik68iLBdRDWkoXvkRxyBjgkA,794 +torch/include/ATen/ops/_foreach_round_ops.h,sha256=Jvs1RnbYyAhHDMVu1P6LFL2wTn3BclyZeNkHk6whuh4,2173 +torch/include/ATen/ops/_foreach_sigmoid.h,sha256=-BcHbCv7ZaSUMTF7IB9ngh2SCSbPOhZ8kntAV0Q4L18,1251 +torch/include/ATen/ops/_foreach_sigmoid_compositeexplicitautograd_dispatch.h,sha256=mNUI6c7ywnb7tIGVVuHrsufafPdcTPInMf31PvcYZLw,997 +torch/include/ATen/ops/_foreach_sigmoid_cuda_dispatch.h,sha256=dEwMO5XnsNuy8JLKQ4luueKkJS451T3oCTRZUp_NWp8,798 +torch/include/ATen/ops/_foreach_sigmoid_native.h,sha256=86vYuIiEJeEv_zoUcAfYfzHC3skn34zguMK-KANNac8,804 +torch/include/ATen/ops/_foreach_sigmoid_ops.h,sha256=ScXoAjZsuG0HWr-CBj0Zj-hKGDQL358JICX3TnoYK0Q,2191 +torch/include/ATen/ops/_foreach_sign.h,sha256=l7YrQjBNRQNYaOJBjuAe6HSnE7EvbvHs1T2gul17Gmg,1212 +torch/include/ATen/ops/_foreach_sign_compositeexplicitautograd_dispatch.h,sha256=119CdU2pz6mATsCllZFTU-mYIL-d5uGq2281Z0PlymA,985 +torch/include/ATen/ops/_foreach_sign_cuda_dispatch.h,sha256=i7B1Z15lVhpkr2dtNit6VYZ2ltDw9aFZc0UbjBVSlMY,792 +torch/include/ATen/ops/_foreach_sign_native.h,sha256=Gns4vFgAgvIvhdwoVnGCzxqf2-i4uIb282DxZ6ibKJc,789 +torch/include/ATen/ops/_foreach_sign_ops.h,sha256=hie-WhT77cWFHSTKIsfUMDhwAMgatxyzD5LE7NI9t4g,2164 +torch/include/ATen/ops/_foreach_sin.h,sha256=5OTisAlgh4RB7CmlKQJkAC3Y7rg6_TncE4rY8KUd6X0,1199 +torch/include/ATen/ops/_foreach_sin_compositeexplicitautograd_dispatch.h,sha256=E7vAYn6mRWYeIupZtBKVzV3vMimhmRh4O018rKlhGmc,981 +torch/include/ATen/ops/_foreach_sin_cuda_dispatch.h,sha256=GkuQzGOsrNLYUe1Gq9_OKJjzayi1k_wOFxJGIIjAiW8,790 +torch/include/ATen/ops/_foreach_sin_native.h,sha256=V7ng0ItxEnx5pM4bfI39EprsqGZ5JCczq3x3KVWOa5k,784 +torch/include/ATen/ops/_foreach_sin_ops.h,sha256=tjpT63pTDQcvFqo-hqSD0rlBZTjMSISj4sJWd3ZoCEM,2155 +torch/include/ATen/ops/_foreach_sinh.h,sha256=T5vLLQZtqIyqffa59E0oqOifZbkRU5Urw2K49KIhZhM,1212 +torch/include/ATen/ops/_foreach_sinh_compositeexplicitautograd_dispatch.h,sha256=WuW_pHcH3snQxpiRdRNSO_DlLyV6FzuESyw3UwTVK7s,985 +torch/include/ATen/ops/_foreach_sinh_cuda_dispatch.h,sha256=Ds_xzRCY9t6plr2vnX_dWl__guO_L4z_gWq_Lpj6mR0,792 +torch/include/ATen/ops/_foreach_sinh_native.h,sha256=s9gFyrE2YKT42RzrgYpW-mXIJ8PvY2bqOoOv58wUVkg,789 +torch/include/ATen/ops/_foreach_sinh_ops.h,sha256=Pz_qtQ20Xc7hACEvbk2oNQDEKYwbg29at54bhcAXhh4,2164 +torch/include/ATen/ops/_foreach_sqrt.h,sha256=qQ1g1_kyjTjM5dFTDACv52cl7CsQ5wPVwVVpeuArKdc,1212 +torch/include/ATen/ops/_foreach_sqrt_compositeexplicitautograd_dispatch.h,sha256=dx79QBg5vjXu1_bm-UY3QgqFpJg6Bjcb6QKQAkmDpVk,985 +torch/include/ATen/ops/_foreach_sqrt_cuda_dispatch.h,sha256=R2I313_FMJORMUQMYqSkhx40Z6FT8OQdnUaBvM9w25g,792 +torch/include/ATen/ops/_foreach_sqrt_native.h,sha256=6hsSu779GLp2mBwjQVnVys9mUOzAUNQuYuwKa78D0RY,789 +torch/include/ATen/ops/_foreach_sqrt_ops.h,sha256=fV6j3dLXgvjr3O2E30jA6TUXJEJqWrRVgeVBb5JNmRI,2164 +torch/include/ATen/ops/_foreach_sub.h,sha256=TPlhczZLf3n4sevOboPcJjYowstKxXzATJZoXrltbDw,3643 +torch/include/ATen/ops/_foreach_sub_compositeexplicitautograd_dispatch.h,sha256=PgLOsDEh2gIdUTxp6h0LE9bIpY1GAg2255Rps4dNczs,1965 +torch/include/ATen/ops/_foreach_sub_cuda_dispatch.h,sha256=clpGRsPk-bTft2ZLnilk2HuRI_1tsgszrVyToCH8rO0,1256 +torch/include/ATen/ops/_foreach_sub_native.h,sha256=P3M9bkfHbrQrjoMDwWgDBB0ZPs6F2Sf3HcoNpUz_mpg,2260 +torch/include/ATen/ops/_foreach_sub_ops.h,sha256=j5PkyohR3ImR9pJFVu-ATOgT0HTVQ8ovPzK2RMj7T5U,6899 +torch/include/ATen/ops/_foreach_tan.h,sha256=Oz1fNBLHJXCX3gzUiIfKcr1p04Az8iKA70v_QX7pHA0,1199 +torch/include/ATen/ops/_foreach_tan_compositeexplicitautograd_dispatch.h,sha256=tWiT5GxFobzjgmN343rM2JtFwxsHBmwwjDogvzFeT50,981 +torch/include/ATen/ops/_foreach_tan_cuda_dispatch.h,sha256=r9_M7nVUJmzWLJHGPPYVbvB5p5Im-7WeMMeYhmZwZiY,790 +torch/include/ATen/ops/_foreach_tan_native.h,sha256=y4KmTas2BalIaBaROCe55VYjreO6p50uvMgiecKoLLg,784 +torch/include/ATen/ops/_foreach_tan_ops.h,sha256=w5yZXgalqkdtvsT2unnu0qIAZEw9vq_qwKLtNGJGjOE,2155 +torch/include/ATen/ops/_foreach_tanh.h,sha256=chTVJTwUjHvEVVZAfbu9SXJyYanPQjFNmFvJ8k6iKhg,1212 +torch/include/ATen/ops/_foreach_tanh_compositeexplicitautograd_dispatch.h,sha256=eZiXEC9ShTFgCyo_irQy0LwKuaq6-xP1rA-gqwGKPXc,985 +torch/include/ATen/ops/_foreach_tanh_cuda_dispatch.h,sha256=hT4YelKEv44WsMi9mDhVUJXtAO0oW3ClYnhyHh1yWi4,792 +torch/include/ATen/ops/_foreach_tanh_native.h,sha256=BFAfPoTXDV14zq9Fn8rG25fVcWnUfN7fnZNzmNhe6tY,789 +torch/include/ATen/ops/_foreach_tanh_ops.h,sha256=4ehl2qhf40CO38-jThVp4_rFBDqvEDjjOLZ7sH-oOeM,2164 +torch/include/ATen/ops/_foreach_trunc.h,sha256=TU_uy-MavgCozmUh-wLaNvJ2Zen8j5PoXUe1NqqBat8,1225 +torch/include/ATen/ops/_foreach_trunc_compositeexplicitautograd_dispatch.h,sha256=A-8iOv7LlH4hPqcl_Jup-Ca5BUg3XBUrHjKR03ST3mA,989 +torch/include/ATen/ops/_foreach_trunc_cuda_dispatch.h,sha256=9J3JZTWwXdi82irP1-EXfOK7OL_dzzEwzeFO7LUxxW4,794 +torch/include/ATen/ops/_foreach_trunc_native.h,sha256=6mu5bRnw-ICYVkoV6mMqlUNOA8BtdQtVFWZWdSoGdZs,794 +torch/include/ATen/ops/_foreach_trunc_ops.h,sha256=pbCnpnVShE7zcr0NPpgDqG0LRjhkLYsre3-P8XspMHA,2173 +torch/include/ATen/ops/_foreach_zero.h,sha256=eAYVV2Oz3lqh5nx-iAy6cv8fsbfQFuaKmaLqvxCfbVI,1221 +torch/include/ATen/ops/_foreach_zero_compositeexplicitautograd_dispatch.h,sha256=QR7bf5PCuyffJIBDZH5d6dZksq-9o-EWDXJcrVxrQ14,985 +torch/include/ATen/ops/_foreach_zero_cuda_dispatch.h,sha256=xiDyMl4EnnJkOHF8FSTS6_4XN4yiehDDCZomxVXr268,720 +torch/include/ATen/ops/_foreach_zero_native.h,sha256=VPVrlfDLBlP440MVGOVjqNNQBBO8Mp5LlX5NpVjKvkY,695 +torch/include/ATen/ops/_foreach_zero_ops.h,sha256=AW5AukePRmcGN1_UsEF3442aqSiMdWNquQhEdcUTINA,2173 +torch/include/ATen/ops/_functional_assert_async.h,sha256=xhZ-7JIJvxoPLmVGO59Q3m_mQRYV35X0htaX6kTVoA4,810 +torch/include/ATen/ops/_functional_assert_async_cpu_dispatch.h,sha256=ot74CjLgr5VdbrT_dqIt5KVRcc5VP-Tqt4HGMsAXT1A,797 +torch/include/ATen/ops/_functional_assert_async_native.h,sha256=wm-KRQf6TC3uG1PV-gc28iGSDDwMEziFd5Wq6ep1t-A,561 +torch/include/ATen/ops/_functional_assert_async_ops.h,sha256=F_UgI8uZazR5ogIA_6Yi-txDkD-cux-fXpQ3P8ObaIU,1222 +torch/include/ATen/ops/_functional_assert_scalar.h,sha256=rikNraNWhnZjm4PoWzkncWLqpebXiNPKrnPtoSeBUDg,806 +torch/include/ATen/ops/_functional_assert_scalar_compositeexplicitautograd_dispatch.h,sha256=5rmSQMPsUHjMEHryg_oZ6miOh7UGhYL-QuF18bLLbKg,842 +torch/include/ATen/ops/_functional_assert_scalar_native.h,sha256=_tAPPVT9uX2sz-7Br-G_m4o7Kq5DLT49uI_jDR8ZIY8,554 +torch/include/ATen/ops/_functional_assert_scalar_ops.h,sha256=CTTej59oqr264o6Mn6t1kFo99IRhS7BL5Q93w1mwSeI,1214 +torch/include/ATen/ops/_functional_sym_constrain_range.h,sha256=JJEL9TLq_zNsj8WNOJ72u_H-QDLx_Sw5acDOu-5QKtg,863 +torch/include/ATen/ops/_functional_sym_constrain_range_compositeexplicitautograd_dispatch.h,sha256=eBXY9h8oUARa1CFRzZg6nJKCy4JxBtl61AcSLAWlBXI,879 +torch/include/ATen/ops/_functional_sym_constrain_range_for_size.h,sha256=O9zHbpzWgQx_BrL333MH2pPjI21FTA0uFJxecXNvPCw,899 +torch/include/ATen/ops/_functional_sym_constrain_range_for_size_compositeexplicitautograd_dispatch.h,sha256=I3v0kG9E4hzE5aTLkyT5qBjfI_n0Kt82RFNaXgYnZjE,888 +torch/include/ATen/ops/_functional_sym_constrain_range_for_size_native.h,sha256=Rw2nSNUUx2ja7vGyqBH2JGnvawwudwF8WJbI8XhVnXg,600 +torch/include/ATen/ops/_functional_sym_constrain_range_for_size_ops.h,sha256=j69Q61s5AtKX5rGJKHfQ3pEiZu2gzC3Gzby8iecqbx0,1359 +torch/include/ATen/ops/_functional_sym_constrain_range_native.h,sha256=8ld3iEJe5ErBsxJJo9i3-TnG_vMGlsWGmFN2D8cRifI,591 +torch/include/ATen/ops/_functional_sym_constrain_range_ops.h,sha256=UnlwnQ5gKt1vmtdqp__vvrqFMx5sCdQ3abrHsnocvGQ,1332 +torch/include/ATen/ops/_fused_adagrad.h,sha256=BfBXxnl7RzGldXsQDPfQkKkLuKXo-JZBoIVhzZG2C80,3563 +torch/include/ATen/ops/_fused_adagrad_compositeexplicitautograd_dispatch.h,sha256=_ymjo9_ZxBgTt6B3nAXOU76NWaKQpi-8spyOZBLJMWo,1784 +torch/include/ATen/ops/_fused_adagrad_cpu_dispatch.h,sha256=mksXbpj0IzkgPgVloQIcT3cOlQuSb9eUjrhbaU8LRws,973 +torch/include/ATen/ops/_fused_adagrad_native.h,sha256=f38_u2UGdsKd-Ohb8_F2Bs45Z6jGljPUTft38RqzAxg,1483 +torch/include/ATen/ops/_fused_adagrad_ops.h,sha256=gLiMOHIQPCAAiodxZPrqkRBYmRFK_aYCLDEByj6EMLs,5069 +torch/include/ATen/ops/_fused_adam.h,sha256=EECg1I7X7lRWYsTnHoDIqng9pmTd3tYFli7o8DaIXnE,8380 +torch/include/ATen/ops/_fused_adam_compositeexplicitautograd_dispatch.h,sha256=cKQIHEQjbq4XtdmdjElr3EQQOg7NiYgsgjP-pnlfzls,3426 +torch/include/ATen/ops/_fused_adam_cpu_dispatch.h,sha256=Od8eilHinkM8AIK7HoNwueUQkMR-XTTG7OxDcGPKgTE,1452 +torch/include/ATen/ops/_fused_adam_cuda_dispatch.h,sha256=M4zOd0u4dEYL_7BJoHd_ajFjVDn9o3y2kMD0B7hyNaA,1454 +torch/include/ATen/ops/_fused_adam_native.h,sha256=6p5e1osCNGXBHS-BrUGbp8qUMug0DWSr6U6owb-RD14,3932 +torch/include/ATen/ops/_fused_adam_ops.h,sha256=AX6Fesb7nZKZ_uv2j7aoBUzri9U6i3LcXd3yl-6LyC8,11769 +torch/include/ATen/ops/_fused_adamw.h,sha256=LnuSoO_Od4P8T8w7U9wFTLX49GZw8R4OLO-lvOQFCss,8405 +torch/include/ATen/ops/_fused_adamw_compositeexplicitautograd_dispatch.h,sha256=_TFA_X_bbIjlT-SbEnm9PToJghik33a8veF0x9e-XbM,3432 +torch/include/ATen/ops/_fused_adamw_cpu_dispatch.h,sha256=_43P1GmRG1bJi66L6azwwf3I0P6lLWCYQJsGHB-8l3Q,1454 +torch/include/ATen/ops/_fused_adamw_cuda_dispatch.h,sha256=cU1bOkbD_YEjXmJhhGRGHrkVWzxL4GSqqGxpqyVA-qU,1456 +torch/include/ATen/ops/_fused_adamw_native.h,sha256=yBtamoVKc7mhjKdb_uQLbCQBGH8FIuJGR2r8mQTrtJs,3940 +torch/include/ATen/ops/_fused_adamw_ops.h,sha256=ZRSWXi1e1vVaRevMi49qJ8Bw0X7esb26rTMIi8Ce77Q,11787 +torch/include/ATen/ops/_fused_dropout.h,sha256=vFb2CAEVodewLRaUxkWcaS7TrdgN0b6TwJJvGIEjEZ0,1627 +torch/include/ATen/ops/_fused_dropout_compositeexplicitautograd_dispatch.h,sha256=hO25KgQqkM_fJBkf-OkDzD9xAXCzkopdW7PnRBJFGi4,1096 +torch/include/ATen/ops/_fused_dropout_cuda_dispatch.h,sha256=uZMetVwx5KS6GCuFRjI-TnvkxEnzRQl_u7-X_2J6cyw,822 +torch/include/ATen/ops/_fused_dropout_native.h,sha256=ujMmqz9FlgerXV-TeD7q9Mwjud_DRnNULUO7fiDneW8,765 +torch/include/ATen/ops/_fused_dropout_ops.h,sha256=k5_JEZ6QM2QVBJ-A_bMdyL1esZRDUwr7QPGqi9aqbj0,2250 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper.h,sha256=HTFo7aYMleqKwt9as9i77o_UTGXwlFhRyih8VAF26Bc,4752 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper_compositeexplicitautograd_dispatch.h,sha256=tBd5nwqRPK_YcyNF1-nW3MwbDw00wS3ggMbTyPAWjH0,2085 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper_cpu_dispatch.h,sha256=72juH36Kwx_GmhJFveNm9GFakNipCNNOVaNfk1vKu7g,1071 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper_cuda_dispatch.h,sha256=3I6zOASUf_AUFl_XBQpQC6syfdXhM6213Dfq2-akork,1073 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper_native.h,sha256=JDeeyZgJzcvqJ2bjk-0-Q5uoGHrUHyVLzfpDalT7SV0,2164 +torch/include/ATen/ops/_fused_moving_avg_obs_fq_helper_ops.h,sha256=-qHAA6TLbDHJRDV1KeXhTt47Y-NYnPrZzIQh7Blk88k,5950 +torch/include/ATen/ops/_fused_sdp_choice.h,sha256=EK4d8dFy47DR6S5FqOrY-Uz2dFZB-76r4e8q5Z5Yt2M,1070 +torch/include/ATen/ops/_fused_sdp_choice_cpu_dispatch.h,sha256=YyQ8YI2iqYOOZPBFiUeMqmHtpRC_5Z6lGUkVtjiFfac,942 +torch/include/ATen/ops/_fused_sdp_choice_cuda_dispatch.h,sha256=m4dlOmbdFe9Xn3reF85XIJSGTJb9xrzArIULRkHwaC8,944 +torch/include/ATen/ops/_fused_sdp_choice_meta_dispatch.h,sha256=kT7mIPHoU5l1Lj75eFz1fepH0oC9P7GKOW-8ogs88P8,944 +torch/include/ATen/ops/_fused_sdp_choice_native.h,sha256=v4PZ6YO3tVN46Sbi-JLAK9RH7QELJwHiytHhmwWA2ro,1264 +torch/include/ATen/ops/_fused_sdp_choice_ops.h,sha256=CMYu-GbW5o6UutopQsspKfAHelWIUV7XdNUiDc4OQf4,1609 +torch/include/ATen/ops/_fused_sgd.h,sha256=4ZyqO4v5_XdTBqaK4ZClqaJksqkqP0pVy0aZ6FNGT48,7203 +torch/include/ATen/ops/_fused_sgd_compositeexplicitautograd_dispatch.h,sha256=ApVD3eFvvzxRpANGN4wv_Np38rwjf-h7bpjqGNLEfWc,2956 +torch/include/ATen/ops/_fused_sgd_cpu_dispatch.h,sha256=9yX7U5SkvQFw9IVijWLyFvwBnLsygMxo4-JN0XBO974,1330 +torch/include/ATen/ops/_fused_sgd_cuda_dispatch.h,sha256=qagIsaJBbcxoS4pPxmBqDI3Z6JCTVasiVo0RKWT0xmg,1332 +torch/include/ATen/ops/_fused_sgd_native.h,sha256=CW9gytsMNug0Q2YSBdf0QcdQx1t5bpGdZWXHP5eN_ck,3340 +torch/include/ATen/ops/_fused_sgd_ops.h,sha256=Lp6CwkZc3PayXNfQ1UpC9ia-2HnykURY4cWyKmlKQok,10053 +torch/include/ATen/ops/_fw_primal.h,sha256=gpW2EHbuNOOy2HwdWuf01SZf7aosfVJLc_9tTEr7uTY,484 +torch/include/ATen/ops/_fw_primal_compositeexplicitautograd_dispatch.h,sha256=NXQhFZcOOWc2XBL77CMAxQ8vOtxfeysFHmPttB2p8ig,783 +torch/include/ATen/ops/_fw_primal_copy.h,sha256=PRDfedFSqbxSo_laWiHdCxLTsIaLvGicVetuHsEtbOg,1193 +torch/include/ATen/ops/_fw_primal_copy_compositeexplicitautograd_dispatch.h,sha256=lATcwWFhcyj-_zQS-09IQxQMTYUIxYv55Oyblg55m0k,915 +torch/include/ATen/ops/_fw_primal_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=E1jl5KdNilfGNuQgYiezT0ISLkd1RcPptxbL_vgFcqM,814 +torch/include/ATen/ops/_fw_primal_copy_native.h,sha256=qVU4Fh0TwplAOF2RIhM5QbXNSxF2_lekfI3fik4IrAc,602 +torch/include/ATen/ops/_fw_primal_copy_ops.h,sha256=-ahcisTRZ4snXodEtQhOPO0G-hwSBzFDgON8NOA6LOE,1744 +torch/include/ATen/ops/_fw_primal_native.h,sha256=DtLuqgevpxd3ONa1QZjkUbxZK7T9O5nbLfpqw1igUFI,495 +torch/include/ATen/ops/_fw_primal_ops.h,sha256=FXyRwaJ8KTwoUMaSphA719lATPfrMtCWJLLygyXe6Kw,1035 +torch/include/ATen/ops/_gather_sparse_backward.h,sha256=wGWv2vUaXekJtSlAEkPEuVaO9GFoYVYEB-xsbzA6tQw,800 +torch/include/ATen/ops/_gather_sparse_backward_compositeimplicitautograd_dispatch.h,sha256=t54YLE3sVhaFbLKLPm5y2uR8ed1-eSvjClTF58tsjGQ,845 +torch/include/ATen/ops/_gather_sparse_backward_native.h,sha256=yEOlmxa9V4QU8Vz6bXSCjUTdBRWexO9kNxb3AbAQ3Wc,557 +torch/include/ATen/ops/_gather_sparse_backward_ops.h,sha256=dWj49Q0our4k4SGCk27OSrTkjXPmcbECGxy-4zwbJR4,1231 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback.h,sha256=QBWEeUrCpu5BST3jtux2AFNz0SB1rJuppeqvtpYtZBY,1918 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward.h,sha256=g1G1OwSQCe_ji1RB6xqxGHIy1ZQvGr6td4ZJ0W5QILA,1070 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward_compositeimplicitautograd_dispatch.h,sha256=0S7kWiPEKRmi0iS8JQ3K5TChzSpZNhAm1mjyXQgaUT8,949 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward_native.h,sha256=c_fVMBPVr0_td2ydjDm9Yw0AoEmiqIdDci1xKy46pVs,661 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_backward_ops.h,sha256=-8P0wwwuaNAjRJP-ssMm15Cm-qr3r2AzdbVE2Nt4Mh8,1564 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_compositeexplicitautograd_dispatch.h,sha256=RNJ5ldvNJNBMZ6RcgTfpPIoIjCoBVAF3X9TPiz7JjpY,1278 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_native.h,sha256=K4nzk5b-wc_Ng__aB32pPFWRbhAXisDaDdPY69XJ_Hc,792 +torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_ops.h,sha256=KuViWyZWvkInJKCmc-4V1KzXs93c4bbrY9BDfEp7T9A,2352 +torch/include/ATen/ops/_has_compatible_shallow_copy_type.h,sha256=MqXyd1j-osHLTh1BMQgQeHzF8hwWU4a7h9skTkDf3c4,758 +torch/include/ATen/ops/_has_compatible_shallow_copy_type_compositeimplicitautograd_dispatch.h,sha256=2xdv2qflbXPUttIRc55S8s2MotOCl0vLyftGJ5zbOWg,810 +torch/include/ATen/ops/_has_compatible_shallow_copy_type_native.h,sha256=qeZT5hSichhkbW4OjM0AeMO7ghJLdJ55XGTEskUQYAo,522 +torch/include/ATen/ops/_has_compatible_shallow_copy_type_ops.h,sha256=AHx5f5Gw84e6bIjFaFns67u2qIjTRkSVIXnI3NmFnPs,1111 +torch/include/ATen/ops/_has_same_storage_numel.h,sha256=ZBfnels-RtyKbFGxbEtufTq-w6ZWzeYa_KZfQ9DTx-s,721 +torch/include/ATen/ops/_has_same_storage_numel_compositeexplicitautograd_dispatch.h,sha256=2KsHY-jtYgiEkqUzRXGTD-UuQAasT2xh3VJnh-Y5UmM,801 +torch/include/ATen/ops/_has_same_storage_numel_native.h,sha256=X-d35dI8TUEGyWm6DGhVWbv5fK6tKXqaMEk4818EdYM,513 +torch/include/ATen/ops/_has_same_storage_numel_ops.h,sha256=DVgjAgTHB5guhSSQR5_CFlKjHwKc_c_GFlah0r6LDes,1084 +torch/include/ATen/ops/_histogramdd_bin_edges.h,sha256=YEOodGaagyBrwDuwYu8MsPRceNS8inUBquUsiDsRI7Q,1895 +torch/include/ATen/ops/_histogramdd_bin_edges_compositeexplicitautograd_dispatch.h,sha256=MViELSWfzE5zkB70kIQEbMBJaPM9JHnQAbH-jaPzoy8,1161 +torch/include/ATen/ops/_histogramdd_bin_edges_cpu_dispatch.h,sha256=cN5bcMk9tbLCU_6TzWimrAJgmjBUZLjRmeqK0wVOcPc,900 +torch/include/ATen/ops/_histogramdd_bin_edges_native.h,sha256=JeYM2ppUxo9SILl4gr1y1yeSf3l4HYdBHXc-Ks95rqk,868 +torch/include/ATen/ops/_histogramdd_bin_edges_ops.h,sha256=Ajnbh0prkKmbSZ-vavDEBUwQ4upbELQKGcAL9D-va_A,2558 +torch/include/ATen/ops/_histogramdd_from_bin_cts.h,sha256=NNWlixXCLKrNW3lVl9chOm0CBMDZUhaFzlElv9664ig,1932 +torch/include/ATen/ops/_histogramdd_from_bin_cts_compositeexplicitautograd_dispatch.h,sha256=Vf56yUvS1uW3OjVRMXOYQpE7NuODGPyrZEMn1bAwXWw,1179 +torch/include/ATen/ops/_histogramdd_from_bin_cts_cpu_dispatch.h,sha256=SCBr01-DMRInbQy4CgFeyQ1_icS6mdsHYh9qMMIIjd0,888 +torch/include/ATen/ops/_histogramdd_from_bin_cts_native.h,sha256=aAtOSR857t_qnR4oVLlVOhvRkRbHBmSfRpSv03I6eEU,853 +torch/include/ATen/ops/_histogramdd_from_bin_cts_ops.h,sha256=5aWQEHM49wERDgi-TI4lJq-f0pwMTlgFCtHNN734oX4,2553 +torch/include/ATen/ops/_histogramdd_from_bin_tensors.h,sha256=l_jzUD3UkvpbDJpEX9kgpaVF-x2Moqj7z4Paqxcp13s,1729 +torch/include/ATen/ops/_histogramdd_from_bin_tensors_compositeexplicitautograd_dispatch.h,sha256=TsJoFuX3gG1uZMr4G14-bMw8cae-IbeJTKdKcWcMZTY,1080 +torch/include/ATen/ops/_histogramdd_from_bin_tensors_cpu_dispatch.h,sha256=AVDmfaUS2CV5uEQucfz3kb6j2slZOrrdnbss4hHVmbY,831 +torch/include/ATen/ops/_histogramdd_from_bin_tensors_native.h,sha256=Kpd6ztUmgZNuaDtpL5dawJI97tYK0j-rzBDQwYMoSDY,750 +torch/include/ATen/ops/_histogramdd_from_bin_tensors_ops.h,sha256=30IgXE46Pbs0UeZOWgqgBmaRSRjzAn9fAL_BzyrhFOc,2277 +torch/include/ATen/ops/_index_put_impl.h,sha256=Rjf-9wnf55NYEPLsi32UdkkjQbc4tsBBd0wtauSmyUU,2203 +torch/include/ATen/ops/_index_put_impl_compositeexplicitautograd_dispatch.h,sha256=KawxZ8MiPYCVXQLyo3UtBpC4lc3Lvzc9w6cymtSC1ks,1311 +torch/include/ATen/ops/_index_put_impl_cpu_dispatch.h,sha256=OcMMvPq-f7cAPOqNbr4w3o18awq7e0rHL-NUaGmdo5s,851 +torch/include/ATen/ops/_index_put_impl_cuda_dispatch.h,sha256=FV23bzfXa7YoUf7lpsj1gOcojbXKnuzIRwb0OQFOVlQ,853 +torch/include/ATen/ops/_index_put_impl_meta_dispatch.h,sha256=LNybZRBzxbDvblRuI2lBS-JqjAFfgbJK4O1UB84-c0w,853 +torch/include/ATen/ops/_index_put_impl_native.h,sha256=yv9ZtUwKdIOCKSP8gwIhHs8x-ANGx2pJ0VV9Vy2X_2U,1394 +torch/include/ATen/ops/_index_put_impl_ops.h,sha256=_He0Q0MZnQaDw-jJvj0S5pC5RLy8FjKXrPcw9DRbnnM,3349 +torch/include/ATen/ops/_indices.h,sha256=k-4UxaQnX0OX658TmckPFuKfwmFqEM5u01k2qgIxFKs,482 +torch/include/ATen/ops/_indices_copy.h,sha256=u2-IP5SExuCOAg12gY7TWEa1omTEZTl9HG0NV3KmU5c,1074 +torch/include/ATen/ops/_indices_copy_compositeexplicitautograd_dispatch.h,sha256=jyTTcUBtctnqGmqzumiA_We9NEUuSbtjr-GlS5olODc,881 +torch/include/ATen/ops/_indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=7As9eYtRYosbx2LHlxoNmrZkYIbgrc6iUpUEutNA7Zs,797 +torch/include/ATen/ops/_indices_copy_native.h,sha256=svS206yL_9pAAkUE5n2z0lNSBbgLAPbENDKhgq-PEDY,568 +torch/include/ATen/ops/_indices_copy_ops.h,sha256=hPtMuivvSBABZHKoTrMnUNsWBaZ38NLjERgmE84u258,1632 +torch/include/ATen/ops/_indices_native.h,sha256=cFMuqOTdC3B5gFWOO0Z0PxhQcNPANBTkWVXidieG2SM,485 +torch/include/ATen/ops/_indices_ops.h,sha256=WJV-AukaEqphCkxNuEcb_lp13tJ_r_o9JAhw7tgdEio,979 +torch/include/ATen/ops/_int_mm.h,sha256=BFpDfgVv1NljX68t7msWCnx3MyVFgqh9SbwOdxtc3kM,1146 +torch/include/ATen/ops/_int_mm_cpu_dispatch.h,sha256=1DFPv8kodBPzreDBqXj-udPUZR_sRYtUYKvY4sEVWj8,955 +torch/include/ATen/ops/_int_mm_cuda_dispatch.h,sha256=iJjZqqVijsGowuCmY8zrbuKbZO8fMLJ0jxK7cX_PHvQ,957 +torch/include/ATen/ops/_int_mm_native.h,sha256=yJTcKddfP9rmk8zCypH-yrtobE_G56RlEewyfnVINxw,808 +torch/include/ATen/ops/_int_mm_ops.h,sha256=NRFABzdgr0o8-CuaTBFkZSTz6eVPix-hSfXRoxV1WC8,1762 +torch/include/ATen/ops/_is_all_true.h,sha256=6f4HKHIIpUxCnMzqeGXGI4Koe49WZDEU574OAbfW5PA,638 +torch/include/ATen/ops/_is_all_true_compositeexplicitautograd_dispatch.h,sha256=cF3R17YPvgyrHCreU1inqg62KpMC3PB48KGkRSHNpEY,770 +torch/include/ATen/ops/_is_all_true_native.h,sha256=pCE1xodo0siKP15x05pHtbZ9kN1aNHGSnrqPBFsnYy0,482 +torch/include/ATen/ops/_is_all_true_ops.h,sha256=pHXirfaKizpEjoDE6gAn2PxR1SuKwj5XVnOg6CKzb-0,985 +torch/include/ATen/ops/_is_any_true.h,sha256=99B2IKh57HndCL2y5VoomeFXBWyzTQonVF1svYYQ-Nc,638 +torch/include/ATen/ops/_is_any_true_compositeexplicitautograd_dispatch.h,sha256=Ffuv62XoptwkDY-X2MWzJrLWrjrm0Dm1EHbA8QwS5Nk,770 +torch/include/ATen/ops/_is_any_true_native.h,sha256=G_KKsHE6Gsdmxb2J8SAsvB2xzElW2FTxu01ZPq4BFYE,482 +torch/include/ATen/ops/_is_any_true_ops.h,sha256=ItKCLbGVLs2eU86tQ0wkcQgOynx0QnKXXnvjIj6pfJs,985 +torch/include/ATen/ops/_is_zerotensor.h,sha256=CeigYC75W9KgQ70ln4trKrPPBA1VS3th7iYAJIUGRdY,649 +torch/include/ATen/ops/_is_zerotensor_compositeimplicitautograd_dispatch.h,sha256=FfbyZJnR2iPS_nbV5pJ0Tqc-uliqX5__GFKpmC6gAp8,766 +torch/include/ATen/ops/_is_zerotensor_native.h,sha256=w-Va65GneYs2toXH2JZCkk9RENaA4zX8I63OLmSiIYY,478 +torch/include/ATen/ops/_is_zerotensor_ops.h,sha256=ov6i5ur226gCwNXnavx3cw5L_DhfsGpvqIlcrnMi5ks,971 +torch/include/ATen/ops/_jagged_to_padded_dense_forward.h,sha256=QguTe3ceSD710EcGgymIoMrW3lSo89ER1OgKFxWy1Xc,2107 +torch/include/ATen/ops/_jagged_to_padded_dense_forward_cpu_dispatch.h,sha256=tDeeWs0bmQ9DKCTtNOp0J7n-vyjtcN3jTU8Dj0EusD4,997 +torch/include/ATen/ops/_jagged_to_padded_dense_forward_cuda_dispatch.h,sha256=jUCHS6vLjowX54vHddVuIYEvjSrzyoyp2DLXUzVVzNA,999 +torch/include/ATen/ops/_jagged_to_padded_dense_forward_native.h,sha256=Hf3w0KEm6GIvbf0WEUppj0pZnz9uDLl8rm2Ec0fbdqw,753 +torch/include/ATen/ops/_jagged_to_padded_dense_forward_ops.h,sha256=Py7RS-lrkioq8ItHvFpvaJSetoT5fjcGNb1mA7_OLrE,1316 +torch/include/ATen/ops/_lazy_clone.h,sha256=vlug5CnKQFdt06LaNpoGUyzqOjlQiMEdqIVKoTYN4Qk,634 +torch/include/ATen/ops/_lazy_clone_compositeexplicitautograd_dispatch.h,sha256=7k9eJxEeAoIJMjetZpYwCy3NJo9UVrZZP83XvPB6Xds,769 +torch/include/ATen/ops/_lazy_clone_native.h,sha256=vkVYm8Ua9d5I1X8b3hp-D3WMMNLRqILPA98ncLFio70,481 +torch/include/ATen/ops/_lazy_clone_ops.h,sha256=MaewWa7hpocsH9Hre-sVXLsYQBwtxTiRq9eJo6qjRvo,982 +torch/include/ATen/ops/_linalg_check_errors.h,sha256=m3buWrQ--TfrWn06AB5v_uJ0elWWOD1MEE-0CFzB6PY,757 +torch/include/ATen/ops/_linalg_check_errors_compositeexplicitautograd_dispatch.h,sha256=Ila6TAxjI4yORIeMU0AaOdS4D1NKKLwkF1KqSlFgFYA,815 +torch/include/ATen/ops/_linalg_check_errors_native.h,sha256=WUrcAL8bKM8WSCUtKeOY9U1YMBknY3E5VYGikvR6G-k,527 +torch/include/ATen/ops/_linalg_check_errors_ops.h,sha256=Wlld4uQmz6yltd4MY2DU-BiFqcZWcAsn4IBiLtELkqA,1130 +torch/include/ATen/ops/_linalg_det.h,sha256=uawFGpqwQhtWG6lMW85Mem1fbI-vsB6UtJeOhIXR6wA,1462 +torch/include/ATen/ops/_linalg_det_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WVjrpZSPqf_pbAlfMdCe-RqVM3pKJCEATFhL8igoqGo,828 +torch/include/ATen/ops/_linalg_det_cpu_dispatch.h,sha256=z_9T-waezr7f5_RXrOIJTWJ4xe3Xkhz8n8zOtZbZINU,1081 +torch/include/ATen/ops/_linalg_det_cuda_dispatch.h,sha256=mwAEh8wiAGNxowNzc6rugiNefKqymhmQgg09VbNvLMc,1083 +torch/include/ATen/ops/_linalg_det_meta.h,sha256=vCK9q9VHQcxclYcF_E5Wa5hf-D4SO5os-bcjixCn21U,577 +torch/include/ATen/ops/_linalg_det_meta_dispatch.h,sha256=hihaWL2tAJfTjkOVw6Xu0gr7G1islScG7ba_sjKwJzQ,1083 +torch/include/ATen/ops/_linalg_det_native.h,sha256=trSmGzxXacohaeGD7OwQdTbVEAEjaxxmThbL39f0pJI,661 +torch/include/ATen/ops/_linalg_det_ops.h,sha256=2tMI2quO0jJjme2jlZo9-9hDFr_UG1Xmhvy9bhe-B54,2064 +torch/include/ATen/ops/_linalg_eigh.h,sha256=fRDW7a9tDUuek7QB_46xbAL6qnFmheVsypzpE5DTeVk,1732 +torch/include/ATen/ops/_linalg_eigh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xjQc_Z7i5elpgIfxBJ4x_t2amOT7NB30PzRVvHn0S2o,866 +torch/include/ATen/ops/_linalg_eigh_cpu_dispatch.h,sha256=tY2qcoGuhgjrbN3haqFpMswV8v2KP6Vu-xWKOY0XykM,1170 +torch/include/ATen/ops/_linalg_eigh_cuda_dispatch.h,sha256=w9wi_MmlNh7OsHv87NeMqP_RpljQZHOR52LKS_Ebdk4,1172 +torch/include/ATen/ops/_linalg_eigh_meta.h,sha256=YjtX7yN9yqE5iPys2gVyaQ0Dpe1or0HFXThA7Vlfdxg,617 +torch/include/ATen/ops/_linalg_eigh_meta_dispatch.h,sha256=SM0z3Y2bzH0ZK2zJ5PGsMd2eZGdFhBB6uTZc7TLGUnc,1172 +torch/include/ATen/ops/_linalg_eigh_native.h,sha256=Er_7xxu3cAzltkO3VSuovffAPpJbXOq2xbNDUzQk8Pw,691 +torch/include/ATen/ops/_linalg_eigh_ops.h,sha256=pkYnMfRd5-Qw4_4f_qYgsS9teqctUA4EGXvvYjW9l7s,2257 +torch/include/ATen/ops/_linalg_eigvals.h,sha256=TqVfhGrgukrYzmrQ7WVNP1oZ0pZFTq5VqE2XBr8zLiY,650 +torch/include/ATen/ops/_linalg_eigvals_cpu_dispatch.h,sha256=ahs5ImtGxpkCvrYzc7-Bkrcx6PevKHDj-ss2CbAbUYo,729 +torch/include/ATen/ops/_linalg_eigvals_cuda_dispatch.h,sha256=hxal6OnofLlMhm0HqtAAB1UZXKR33-k-YUuEEuheC6s,731 +torch/include/ATen/ops/_linalg_eigvals_native.h,sha256=sC9NFOgXZBz_TkX5kE75Q5OgsNVZwREDGPLy1OS6XLY,485 +torch/include/ATen/ops/_linalg_eigvals_ops.h,sha256=PdtBApJhjUZrIh52plPIJ-1yFlXUDWo8hNoil8qnx30,994 +torch/include/ATen/ops/_linalg_slogdet.h,sha256=9uVpPCTvgpa7RLWPra6EJyrUniCK6QJ2s5KttCInJPc,1689 +torch/include/ATen/ops/_linalg_slogdet_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wcgAZj-HWA5gHoQ68MuZuTKVe5q7dp9hwCh49I4bIZ0,843 +torch/include/ATen/ops/_linalg_slogdet_cpu_dispatch.h,sha256=4N9OTDJhZ7h3C9z-yFcMzRp7EIjzFLS4Ts3ZSYPBoUk,1174 +torch/include/ATen/ops/_linalg_slogdet_cuda_dispatch.h,sha256=Vziuw0p8-GrayyOhYJuO32cm1K9kw9mAI_oYEsglTYc,1176 +torch/include/ATen/ops/_linalg_slogdet_meta.h,sha256=ZrmwRZzzoWfYpCU67LT-cJKzL1YMM0dNPqvuhLQhZPY,581 +torch/include/ATen/ops/_linalg_slogdet_meta_dispatch.h,sha256=FmgeiTaxUjXwftjSjyeE3c5K4OqVnSfBP76JSWevIlU,1176 +torch/include/ATen/ops/_linalg_slogdet_native.h,sha256=0Z4wLsBSDA0HhMvd6mJTtg7qXr_wfZTSWHemvVBwXKs,701 +torch/include/ATen/ops/_linalg_slogdet_ops.h,sha256=0e-BklHQ0hJqPh-5Np4QYw0bGEdcu3CZKf91Tq66dQM,2268 +torch/include/ATen/ops/_linalg_solve_ex.h,sha256=GNiq1SElTl1cwLcCGr8sDPa9N3yTmFl4imkYqmE9DCk,2083 +torch/include/ATen/ops/_linalg_solve_ex_compositeexplicitautogradnonfunctional_dispatch.h,sha256=7aMKBqEyDg-ULzCyrUjFgil2tZkBabLavlFLluKS14Q,907 +torch/include/ATen/ops/_linalg_solve_ex_cpu_dispatch.h,sha256=H9U1RBF8PYQVjIfX9keD7WTTN2j35wNkaDmHUw7LY5k,1349 +torch/include/ATen/ops/_linalg_solve_ex_cuda_dispatch.h,sha256=TXUZXz65MCtetPkYashJ1pPS_P83PIKLrFa02jlCQQo,1351 +torch/include/ATen/ops/_linalg_solve_ex_meta.h,sha256=LPktIHPhtoV3us5lcW3P-sjd9aaJPpVCfIQdQP7pAAs,634 +torch/include/ATen/ops/_linalg_solve_ex_meta_dispatch.h,sha256=oMY9eXNYuKwf9XDPLLl4Zfc1_TfIy9EARkNvlF_VNeI,1351 +torch/include/ATen/ops/_linalg_solve_ex_native.h,sha256=y9_Vd0UZuMmmpbp8w9wrwOPwaBQT0rlQ-GBYQFwohnM,753 +torch/include/ATen/ops/_linalg_solve_ex_ops.h,sha256=WC2YIs_STko-sRoGV10LGn3a9bEljXUjg11ZtrJMG-E,2642 +torch/include/ATen/ops/_linalg_svd.h,sha256=NFxNS0PlWdxPBJdxv54_Oddr-XZpoq_DZfkg1ykOtB4,1947 +torch/include/ATen/ops/_linalg_svd_compositeexplicitautogradnonfunctional_dispatch.h,sha256=GqkL19ZfCIB24LkTsaiVpkqGHV4hIF8PTQ8WdPL5UZ4,933 +torch/include/ATen/ops/_linalg_svd_cpu_dispatch.h,sha256=BLY-Rwfy3IDcgLnpO7i5eahkOZPzeTN6GOu_skm1Zns,1350 +torch/include/ATen/ops/_linalg_svd_cuda_dispatch.h,sha256=yyp-aVI3qo5xuJ0UyBeynZ-gN6udrEkZBcdcnasdKOY,1352 +torch/include/ATen/ops/_linalg_svd_meta.h,sha256=OkNEGqz9vjnJmweMziTHqEZOjl0FLzZlYda_h0Dl4v8,656 +torch/include/ATen/ops/_linalg_svd_meta_dispatch.h,sha256=_TYQcZgExT-kwEHBcnaZb7JdFQNbh7ZST0go_6PQjKI,1352 +torch/include/ATen/ops/_linalg_svd_native.h,sha256=hRms8jgyYg4nmgGDAIQcpp0DdIPbiaUwn3bunM3hpS8,730 +torch/include/ATen/ops/_linalg_svd_ops.h,sha256=SlIh9SasgFafQ6tKqwvP-tOkadVct7aahSqcDzhBPzA,2544 +torch/include/ATen/ops/_local_scalar_dense.h,sha256=vzphiEWx4irwnS6dq3IzlOsYxsuCfeehBj6cOcdCVjE,666 +torch/include/ATen/ops/_local_scalar_dense_cpu_dispatch.h,sha256=c9MkDN0UKXdbmFUiSp4GZVB-Nyc3vhdcnyqdzMXMNQQ,733 +torch/include/ATen/ops/_local_scalar_dense_cuda_dispatch.h,sha256=cp6tgJTSD8E2i4UzEABPpSDGdkvPRsg1oZcx9vm4Jg4,735 +torch/include/ATen/ops/_local_scalar_dense_native.h,sha256=4FCWKzkgw5tx48ImymC7foeYsezL1GTiEeWKuo1u52U,565 +torch/include/ATen/ops/_local_scalar_dense_ops.h,sha256=FcgXW9a8nkC8olqEmPZiHTFfMa3WXg6vqU_-vKGkqlU,1006 +torch/include/ATen/ops/_log_softmax.h,sha256=5GKDoh261x8qFZAhUNA7XIhP8xE_3I4H9RZVNRY-Rhc,1310 +torch/include/ATen/ops/_log_softmax_backward_data.h,sha256=sgGECzVB9M7dk8FsGvZt76bCrOxGVNTwJx-LrXEFKUM,1693 +torch/include/ATen/ops/_log_softmax_backward_data_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AtFFuaP4HOc_1rtpvkM723kkaU5B1yfAqu2EOj0PkmU,885 +torch/include/ATen/ops/_log_softmax_backward_data_cpu_dispatch.h,sha256=mYIa9lFri9OvQmGnq99-HTj4im7kUI5dhJPXYwpE1_k,1162 +torch/include/ATen/ops/_log_softmax_backward_data_cuda_dispatch.h,sha256=GjAibzrSWBfnl8lSiuQL2H4R-yCcy6h0DXT4Nh5enoo,1164 +torch/include/ATen/ops/_log_softmax_backward_data_meta.h,sha256=kHd2VHepl2cRaEyxJrNPDnVRjunzr1kAYgbmEF8qr-Q,670 +torch/include/ATen/ops/_log_softmax_backward_data_meta_dispatch.h,sha256=sLpNSJ2Ybc0X-jH_quFwLcLultwvHom2FK5shNd-UKY,1164 +torch/include/ATen/ops/_log_softmax_backward_data_native.h,sha256=B_nfjRuYbPAsKMokvIcngRMTdRrfzp-0Px0zz6RskLs,984 +torch/include/ATen/ops/_log_softmax_backward_data_ops.h,sha256=86qKivw0cYa8pmpbeNz9BNX4insZ97nNza0SqSjCFy8,2210 +torch/include/ATen/ops/_log_softmax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=LxaA31JJPm_ixF6Tw79UyGIIPG9h_hIdErE2weJUVa8,829 +torch/include/ATen/ops/_log_softmax_cpu_dispatch.h,sha256=O-n0FakeBSZc3Y8saOWFIt8KD6Kojkuazkpi4Jud_YA,994 +torch/include/ATen/ops/_log_softmax_cuda_dispatch.h,sha256=UnBbvEEosdPCdx7aLI7F7OCUchy5zJcj4QY8drotYfU,996 +torch/include/ATen/ops/_log_softmax_meta.h,sha256=5mu1vt7UNWlLaXSkAWNpASOh3i1NQSJsrMMNdIVTbM0,614 +torch/include/ATen/ops/_log_softmax_meta_dispatch.h,sha256=c2gpZI3nk0ehsIuV7lXzzmjFWoy11iLxdBNpOyuhmg0,996 +torch/include/ATen/ops/_log_softmax_native.h,sha256=SVqdThvXNjvN3CCWYug7V3U8wY3uDWQrNk1AcfW8J_c,840 +torch/include/ATen/ops/_log_softmax_ops.h,sha256=D6Uwk9WUB4f0C-7X7weFFB2ZIq0Uf3046kDn7zoCRdk,1846 +torch/include/ATen/ops/_logcumsumexp.h,sha256=oQNJtl6RbtsATEJx4EJ_wTAShI9KkeQQwmZ812EzSjQ,1155 +torch/include/ATen/ops/_logcumsumexp_cpu_dispatch.h,sha256=3_RT4yQXkssBg2jSX8My8v-w7W1nw8u5IlKnlRTZr4E,937 +torch/include/ATen/ops/_logcumsumexp_cuda_dispatch.h,sha256=-hPUr5qAWMLY_DdiE3GgQc9KoLXe_Y9C6zErSjkd76w,939 +torch/include/ATen/ops/_logcumsumexp_native.h,sha256=csf5JZ7x5dB81ON_ztE4jbFwG8ThL4jcrKv_hO1I_m0,784 +torch/include/ATen/ops/_logcumsumexp_ops.h,sha256=4W3vgFKwd3hDpl5zhyAnjH_6yRulZz_6cM2WTlao8Yw,1720 +torch/include/ATen/ops/_lstm_mps.h,sha256=tIFkA91TIwlzG5wPsGXAVURQA2YDCCvnkFg_ZT8eDNo,2921 +torch/include/ATen/ops/_lstm_mps_compositeexplicitautograd_dispatch.h,sha256=d3rxtRj5Uu19DNLP0ydQF_sE_4WeHWgt8uDi5NXDDRc,1515 +torch/include/ATen/ops/_lstm_mps_native.h,sha256=sOhFGyqPs92xTgeLhWAnvMwrl3wIX_ryalDGjdpnDsM,824 +torch/include/ATen/ops/_lstm_mps_ops.h,sha256=k81gsXLj4J1b67YeR21ZX06N3cdA4ndQZKpAFWQclD0,3492 +torch/include/ATen/ops/_lu_with_info.h,sha256=BF5HJuhrgEB-NBKt3vG35j8QqH_KJmOaU0xl93QL7ZM,814 +torch/include/ATen/ops/_lu_with_info_compositeimplicitautograd_dispatch.h,sha256=sRAzB0rszPzNHEMEv9NdUSVIxSYv4bElHoVGQBYcUlI,848 +torch/include/ATen/ops/_lu_with_info_native.h,sha256=2XnxswVzpWW3oqF3llF6NmHa1yLZwQHSprK8fkg2Tz8,560 +torch/include/ATen/ops/_lu_with_info_ops.h,sha256=zu1BH9zXaluUpzDMXzAf2hgotl9Ofyyqz_jGUxbLjYM,1244 +torch/include/ATen/ops/_make_dep_token.h,sha256=4VdnrvPtYY1U-l8t2Dp8G8_sZt3Ei8Zj1WOLGZ2m-tI,1520 +torch/include/ATen/ops/_make_dep_token_cpu_dispatch.h,sha256=4fjYSUAm3zTKcdmEQ5IfVfQfk1uvBgsNh3BDlm2ns4s,1030 +torch/include/ATen/ops/_make_dep_token_native.h,sha256=dwpXgDzIbHP6BCgY3PO_KYHIx8jtmxlTAqXuI_MaybA,685 +torch/include/ATen/ops/_make_dep_token_ops.h,sha256=iq72WphXZ_-9a40RPwJ0ZW-KT_SOJJNO-X4197pi0gw,1574 +torch/include/ATen/ops/_make_dual.h,sha256=bI5yZHGQ-be1bpbHW9SK_Z5CVFB51m85SA9wBDYmdsY,728 +torch/include/ATen/ops/_make_dual_compositeexplicitautograd_dispatch.h,sha256=dUzwG4r_3yFWRtDVJyJQlWdx6P5HvTvz9O85j_rL_U4,813 +torch/include/ATen/ops/_make_dual_copy.h,sha256=wnPyPfgn6a10B5idTx6zsllwGew59WCXhq2lHrZrH38,1370 +torch/include/ATen/ops/_make_dual_copy_compositeexplicitautograd_dispatch.h,sha256=s2qGBFT6ci_9R47QLovAHM8R6eX2yhXn9cu5_TJxc_4,975 +torch/include/ATen/ops/_make_dual_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=IeETUThSE0D2ScOlD4cf8fmBAMM0y0FoRRNaJYbOIeE,844 +torch/include/ATen/ops/_make_dual_copy_native.h,sha256=JZT2yeahHZfQHlF51UTaY3CqATTqGcB8VTRSIMfDurQ,662 +torch/include/ATen/ops/_make_dual_copy_ops.h,sha256=04nfiTO1CiW0U9CcG4pJA9SERaZasw4asILvQWkd5o4,1940 +torch/include/ATen/ops/_make_dual_native.h,sha256=Dd8tTIW-W9HEha47mH0X-rCojxuoYtvoyf49bQsA_uA,525 +torch/include/ATen/ops/_make_dual_ops.h,sha256=KMyptSfhuGLlas0NPTBTG5vaNEXTkUyEl1caoTVCpdY,1133 +torch/include/ATen/ops/_make_per_channel_quantized_tensor.h,sha256=UG3U9XhwqUPULzZL2K56C3H35qfk0AM_ZnU1TWeRQsw,1701 +torch/include/ATen/ops/_make_per_channel_quantized_tensor_compositeexplicitautograd_dispatch.h,sha256=FEGlc5nvemr85T-CaFu_DMbac_DhQT8iQ5g0Hy9ZAAo,1065 +torch/include/ATen/ops/_make_per_channel_quantized_tensor_cpu_dispatch.h,sha256=9ZBT-YbWqIEI0zydjeycTU-QoWWizgcOeFy81JXDIi8,819 +torch/include/ATen/ops/_make_per_channel_quantized_tensor_cuda_dispatch.h,sha256=H2NGxzzqFBHSFIQWI3aRzZFgN6Gs_0UPLrw5ubKD1FQ,821 +torch/include/ATen/ops/_make_per_channel_quantized_tensor_native.h,sha256=uV_fR0aVyH3jEVy8T8HTxU9iWHEiyTqO6VYdxyNESwY,912 +torch/include/ATen/ops/_make_per_channel_quantized_tensor_ops.h,sha256=aBKoImPtIquxOe4__xV_eteoMGYxtJ0IMqXJjazoJCU,2226 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor.h,sha256=W5ruymhlmK2x_eUa-3dEhLrFoFl6m2ivdj9vkQNs_xY,1520 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor_compositeexplicitautograd_dispatch.h,sha256=9Gnh-sSsBW4Snm-r8gLBaIgG04QjaUUq5pmIr1MiIU0,989 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor_cpu_dispatch.h,sha256=r3dyXRudFPxxjUy-DpIMp4YQtG8-UgpIQqmc00_l-VE,781 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor_cuda_dispatch.h,sha256=rMFRFnBRUWdJ4cLq6caOYmQPsqAnfTFJ6XMaZh_Bsn8,783 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor_native.h,sha256=0oiDMCdXOLbbk1aKzyPQfUVjJYW7y7ZW3VA0hPW0NUU,798 +torch/include/ATen/ops/_make_per_tensor_quantized_tensor_ops.h,sha256=MzIxYmEzrV9NPzVZiT9fzXVrVk-9g1ZJl603V-4oFZ8,1980 +torch/include/ATen/ops/_masked_scale.h,sha256=PCPDT2F7kKg7umJttXSvfFkajP1gdgWVZAwakAYnQZY,1308 +torch/include/ATen/ops/_masked_scale_compositeexplicitautograd_dispatch.h,sha256=KEQnL0W77ie_hlZngDA2bjPtc_yfaoph4qvijwVrpQI,959 +torch/include/ATen/ops/_masked_scale_cuda_dispatch.h,sha256=WrK_ulgDWj0KicJyIPpMojZw3kJfFZlZ_aQF7VRvzio,768 +torch/include/ATen/ops/_masked_scale_native.h,sha256=cgAry6Yu7NRt480lYyd4LuFle29Onitys3dP9Z1ZrhI,650 +torch/include/ATen/ops/_masked_scale_ops.h,sha256=kiwo679qQTKCXis_PRQZA3ifx69Yxpf_GT_kW89rVos,1896 +torch/include/ATen/ops/_masked_softmax.h,sha256=q4kza04UYLL-MQ8Rv2JaXfcNwgkJ9nUlfc6Y7o_dJxc,1640 +torch/include/ATen/ops/_masked_softmax_backward.h,sha256=ZUsA4aatJWVD3XnyIievTjckSNEkrd5rb0kP2uY_urE,1709 +torch/include/ATen/ops/_masked_softmax_backward_compositeexplicitautograd_dispatch.h,sha256=fp4rU73pHLWELNVE2XoMoWkB9IflyhIrhVHGJx6YeAQ,1096 +torch/include/ATen/ops/_masked_softmax_backward_cpu_dispatch.h,sha256=7QGnaTcBkTAWkRGrLIxzR3nsv7O7UWbncKn4nc1M1Kc,842 +torch/include/ATen/ops/_masked_softmax_backward_cuda_dispatch.h,sha256=4jBc3mHQ2zVHvLQt-q_LgFG8K_iQg4OzNbPQy9_rMeQ,844 +torch/include/ATen/ops/_masked_softmax_backward_native.h,sha256=26nfj_g1HPVKQPMM_GGC6XDNEcIqRIikLGgeZE3AqOs,966 +torch/include/ATen/ops/_masked_softmax_backward_ops.h,sha256=VxvCMicYlirq2Ox6qZBXU0g2MaGenrb7_wbHo2U6CdA,2286 +torch/include/ATen/ops/_masked_softmax_compositeexplicitautograd_dispatch.h,sha256=MQs-a93Z6f2GJxoye2etJ9bFzoEmGpFfTNBhy3aqtTA,1097 +torch/include/ATen/ops/_masked_softmax_cpu_dispatch.h,sha256=lGtmsMJFmgXz5yxEbUDhquW17fdNWXNR1YLSTgi7DM4,850 +torch/include/ATen/ops/_masked_softmax_cuda_dispatch.h,sha256=J7lBFsU6c-qwoYRHEpIdeIiNo_3Yut52Hz4qCJOjJko,852 +torch/include/ATen/ops/_masked_softmax_native.h,sha256=FHsFJpoof2kbN6m47j3XXYaSCmCqSz58Z3yPW7tWbn0,975 +torch/include/ATen/ops/_masked_softmax_ops.h,sha256=0UL93-JTWUWlNBctLGqhfLVuJBER9orW8I1kS7HCw5Y,2250 +torch/include/ATen/ops/_mixed_dtypes_linear.h,sha256=6Z3afAxySYYK03MzRLUAU0g5GfzWNqabR6N0UuOcWTE,938 +torch/include/ATen/ops/_mixed_dtypes_linear_cuda_dispatch.h,sha256=bKD7TDLP4hJSz8-MFTlS1W6SNECR40uDWWdS6Ghz5TI,896 +torch/include/ATen/ops/_mixed_dtypes_linear_native.h,sha256=MQqxi2Zd7us7L1Wlm5_JQmrblrkGCDNGyn8ScNR13ww,650 +torch/include/ATen/ops/_mixed_dtypes_linear_ops.h,sha256=5OIAz4ap_my-czfkuWJqvkdrovNrR1uGc150V3njcFo,1479 +torch/include/ATen/ops/_mkldnn_reshape.h,sha256=veoVNwcFgLg2ZkdqEkQ8HCb_iebfZnH0H9X86noALyw,1223 +torch/include/ATen/ops/_mkldnn_reshape_compositeexplicitautograd_dispatch.h,sha256=dQX0j3rjKvnDzOCC1NPH1K3CgBjUKOUpaN0ntQtYbt8,931 +torch/include/ATen/ops/_mkldnn_reshape_native.h,sha256=2EGYX_D3ffSFh7xGxlqbBmiwww6gs0vhMcbhCmEfP9M,617 +torch/include/ATen/ops/_mkldnn_reshape_ops.h,sha256=F3Bz_G6jLvzHEdxViSWLKnxMXOMmtxfIFJ_NhS94TrM,1796 +torch/include/ATen/ops/_mkldnn_transpose.h,sha256=_XEueHzeSS3FtoYJh6JB6bOn8Bclp9D19cmisMpE9nc,1529 +torch/include/ATen/ops/_mkldnn_transpose_compositeexplicitautograd_dispatch.h,sha256=VRU7fTRIckiRxHBpXF9VJAR_cicO-IggHcJ7AJF-8KY,945 +torch/include/ATen/ops/_mkldnn_transpose_meta_dispatch.h,sha256=5RLin9bnxhXbMZs2kIHb9u6EPyLgKDXE58wvn67EPO4,758 +torch/include/ATen/ops/_mkldnn_transpose_native.h,sha256=3UR58elPwJ__PSa2kDhNN_r5PJQX7Q3XCBYwxkb40Yc,720 +torch/include/ATen/ops/_mkldnn_transpose_ops.h,sha256=p86DkbsgE0NLNALqyoWXkXpl_V_bjT8tHTG1H4hFQps,2503 +torch/include/ATen/ops/_mps_convolution.h,sha256=AgkV4VwfvwysysZZfaERHsDR0xmC0okd-P06aqSs8t4,6862 +torch/include/ATen/ops/_mps_convolution_compositeexplicitautograd_dispatch.h,sha256=tEwqXGHWWXBg7bPsV3NaiHyHZFvzccbgrs0syRhkNX4,1750 +torch/include/ATen/ops/_mps_convolution_native.h,sha256=shFfFFxz6HvO47BxkKfVjUSwr1qtOU5ZpTI9SaA2Xl0,693 +torch/include/ATen/ops/_mps_convolution_ops.h,sha256=WJtqgod_GKTT6WwCQEI7Kd8yegDdUVClKteTa8d_RRo,2816 +torch/include/ATen/ops/_mps_convolution_transpose.h,sha256=otMC_qoSOBaxwYoT9KHsofRab9QJzxS70j50KG_jLgE,7418 +torch/include/ATen/ops/_mps_convolution_transpose_compositeexplicitautograd_dispatch.h,sha256=rVxcIdde4asxWUVBKPFStVeFH2z51EpsGPIQdBEkLkk,1758 +torch/include/ATen/ops/_mps_convolution_transpose_native.h,sha256=SaDs-HbzJ-Q9vMEuaFPRrcc1dHFNoJigT7-fsODzbFw,697 +torch/include/ATen/ops/_mps_convolution_transpose_ops.h,sha256=0IWuxCdcgOPIWZ5_S9QEbDJ0UFPNksbOHJZJDEYt5Ng,2842 +torch/include/ATen/ops/_native_batch_norm_legit.h,sha256=WBkWOvy3LIRP-bSwjwbSQHV3Nogmq0Yf5x4kXbXoNbw,5352 +torch/include/ATen/ops/_native_batch_norm_legit_compositeexplicitautograd_dispatch.h,sha256=J4S2L2GJQ2VZ-XAY0OLRyJ60DF-lOOO5oPpjQkaj48g,1047 +torch/include/ATen/ops/_native_batch_norm_legit_cpu_dispatch.h,sha256=VxIN5roxz3Ley9a2Jk--CX1pOXy6toXjG7e9OmKDOE4,2573 +torch/include/ATen/ops/_native_batch_norm_legit_cuda_dispatch.h,sha256=cec2Zww5TfD9ZoYwnNkVtUousJ4M7fWWlOlMAGB5soU,2575 +torch/include/ATen/ops/_native_batch_norm_legit_native.h,sha256=miNZv5Ocp-HvDgIih1ccTrigrtaIxjPCyxXuJP0JVfQ,3751 +torch/include/ATen/ops/_native_batch_norm_legit_no_training.h,sha256=aroGCzYgvoDObu84mM-n4FgdjMt7H0TaHPV-PzmY0cY,2675 +torch/include/ATen/ops/_native_batch_norm_legit_no_training_compositeexplicitautograd_dispatch.h,sha256=UgH-0dhOsZzmQyiBFg8Ko6oRml8RzEhB_amxyT3y_Aw,1748 +torch/include/ATen/ops/_native_batch_norm_legit_no_training_native.h,sha256=VUB6xfYVxCtf0WBB53mlRIHlvWl_bQT6jH2q3mrLBGo,1084 +torch/include/ATen/ops/_native_batch_norm_legit_no_training_ops.h,sha256=UdCKVD2P72Z96UN6A5Cu485EOOiiP3TwQdh5Himj-EA,3365 +torch/include/ATen/ops/_native_batch_norm_legit_ops.h,sha256=v1ajpsyEZeimpNv1LP8YBgxy-ywlU-NP1hx4_YukSeU,7500 +torch/include/ATen/ops/_native_multi_head_attention.h,sha256=IlbzgYyVvGnpVz7tZEtQDm1dU2dwwNOnrMLGXoDZUUA,3566 +torch/include/ATen/ops/_native_multi_head_attention_compositeexplicitautograd_dispatch.h,sha256=U-qeFursUEuYdGq45ZdSaihK30OPZDPQ5p2b9rZgXJg,1701 +torch/include/ATen/ops/_native_multi_head_attention_cpu_dispatch.h,sha256=OF74FFWbaq8Da22VGGrMAl98mtHEiSzU3fmCbP2O12s,1129 +torch/include/ATen/ops/_native_multi_head_attention_cuda_dispatch.h,sha256=O7ncaaC7XW2FahgyaeNFStryLYw7tmVQhrp7-UjCzb0,1131 +torch/include/ATen/ops/_native_multi_head_attention_native.h,sha256=EmYqriJEYUKjkpf0FhPog83BFAf2VFauBS6Hq6FQLy8,1836 +torch/include/ATen/ops/_native_multi_head_attention_ops.h,sha256=kLoLMa69NNhZVDYAwgUPEuqZV1BqkIlQAJnHi_7bWdY,4188 +torch/include/ATen/ops/_neg_view.h,sha256=NHmDPyT7ZzOzzTTVCBbe3z6vdCB3XTMPdzbP0o6C3oA,632 +torch/include/ATen/ops/_neg_view_compositeexplicitautograd_dispatch.h,sha256=M1LBYfnOR-NeAdjN0g-3Z1PP2J5xy8QCwzgJnh8Twm0,767 +torch/include/ATen/ops/_neg_view_copy.h,sha256=ywB06CM3fU743favcNsY0rLZqZLaCJJuGoY6Y5st5bg,1084 +torch/include/ATen/ops/_neg_view_copy_compositeexplicitautograd_dispatch.h,sha256=ASqnPEJ4_R3Y7woBI7vW83fyQcJfOZjGn6mG0iVFnTU,883 +torch/include/ATen/ops/_neg_view_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=a95U0EWNIO2efX6AxunHMiwKTa6SHmgS38uM0pwG9l8,798 +torch/include/ATen/ops/_neg_view_copy_native.h,sha256=Ub2j0UzHLiaBMWc8xcMP_NLBZ-OPMwy4rI_4ofTtN_U,570 +torch/include/ATen/ops/_neg_view_copy_ops.h,sha256=b8vs_burh7Y7y0jIQPKXH_Pf7DymrV8p4vY6GWdjju4,1638 +torch/include/ATen/ops/_neg_view_native.h,sha256=G4CYBJXhT33gy4p2JXxHYxWTphNV3qviPyliPdn9BBs,479 +torch/include/ATen/ops/_neg_view_ops.h,sha256=LxTKYLNKof5Ot0RyvvDGf267PKDzMEIro7VmBBEsjrc,982 +torch/include/ATen/ops/_nested_compute_contiguous_strides_offsets.h,sha256=xk23ifHHaXycU_hcaLzmBtGXBBLM9mhI-xpkldL5I_o,814 +torch/include/ATen/ops/_nested_compute_contiguous_strides_offsets_cpu_dispatch.h,sha256=7SQFUgUTT25jcg3K79ZlBrVCBh_7mEOieyywhSlTKNw,788 +torch/include/ATen/ops/_nested_compute_contiguous_strides_offsets_cuda_dispatch.h,sha256=kaSxaRr4VaTx_yfHAY_r8tWtIQpuKQUkXzmNnYP5Ds0,790 +torch/include/ATen/ops/_nested_compute_contiguous_strides_offsets_native.h,sha256=v72ooiCnYy6bEQiNXghjL1nvsGAQ5aCpgUwWtgLot4w,544 +torch/include/ATen/ops/_nested_compute_contiguous_strides_offsets_ops.h,sha256=X4_wlDvcWELF40TL1tVv17CgZXKLpH0VzeWbK3ht7dw,1181 +torch/include/ATen/ops/_nested_from_padded.h,sha256=kZNE1SX2XvA4ABHq06wZijIV6mzWy9W1mKraCScPV3g,1713 +torch/include/ATen/ops/_nested_from_padded_and_nested_example.h,sha256=H1kQ7IVM5d0OHDZgTGsb0Fr1VS7NUkxz3jK2tMSbY7s,1528 +torch/include/ATen/ops/_nested_from_padded_and_nested_example_compositeexplicitautograd_dispatch.h,sha256=-w0gJjiXz5II1sm-XSjIYU3n0VhdNOcmkXWlUrTiYfg,997 +torch/include/ATen/ops/_nested_from_padded_and_nested_example_native.h,sha256=cW31Kyf5C0tNSOuAjr-k8AF57kWfXXcieMFOUx9he5E,689 +torch/include/ATen/ops/_nested_from_padded_and_nested_example_ops.h,sha256=Xi5kkRt-LGHt3s0xuzCqQ4-NYD08FEHjx67_L23CBKQ,1996 +torch/include/ATen/ops/_nested_from_padded_compositeexplicitautograd_dispatch.h,sha256=iyZC19WxsyK6vUVPUNFiBQ2rDZdnJgonSzSQJ0Hxzi0,1045 +torch/include/ATen/ops/_nested_from_padded_cpu_dispatch.h,sha256=OAoLdpriJVeuwnzqwpXMrE8fLxx6HpAH8vUv782fhEw,812 +torch/include/ATen/ops/_nested_from_padded_cuda_dispatch.h,sha256=9cdVyHehT1qyqarv5tZfyzmNRpSQ95czCGTgtTv-Lac,814 +torch/include/ATen/ops/_nested_from_padded_native.h,sha256=Mh_YWH2JYtzVzGkkUhTnioNXb9ee9XiXfqd65FpngGE,889 +torch/include/ATen/ops/_nested_from_padded_ops.h,sha256=XDZdgK3u5SZVTHGDYgO6FKIyeQKkFiCMoDSxbqdD5qo,2146 +torch/include/ATen/ops/_nested_get_jagged_dummy.h,sha256=IxUGqKJxc7ESmGzHMk0Cve93xp8sX_HrE3P-XOvUbc4,683 +torch/include/ATen/ops/_nested_get_jagged_dummy_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_jagged_dummy_ops.h,sha256=d6ppv9UDyoihfjYDJgey0nqcywdGwcyG3HTBWAeN-rM,1018 +torch/include/ATen/ops/_nested_get_lengths.h,sha256=KAAaG9XZo3fyvNrRFmRqOFUa854gHPeoj2DYL2jWIf0,666 +torch/include/ATen/ops/_nested_get_lengths_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_lengths_ops.h,sha256=H04XuEi51YAM_gZafPHlds38nPLJ_VJxNAlRadUmEqY,1006 +torch/include/ATen/ops/_nested_get_max_seqlen.h,sha256=6gTHRov7xBsHTaTX6zQvmgU0W5Km2UgX2GM1lnDDTtQ,678 +torch/include/ATen/ops/_nested_get_max_seqlen_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_max_seqlen_ops.h,sha256=t6xwtWztA4tYr6XToE2ouVQWFo2Th4T8710uhR5EP2w,1015 +torch/include/ATen/ops/_nested_get_min_seqlen.h,sha256=uF6E2Rx35looDw_ldqQatwTnflyJ5QWaOn_iOvasMdg,678 +torch/include/ATen/ops/_nested_get_min_seqlen_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_min_seqlen_ops.h,sha256=BtgA6DUey_QoFTIs4c7r8LJRToxDECdueBhrfzHLNAw,1015 +torch/include/ATen/ops/_nested_get_offsets.h,sha256=-qq2keWx4KoqvGkasvxPMGqAQzz-FESsqFaaMTQ0Vb0,666 +torch/include/ATen/ops/_nested_get_offsets_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_offsets_ops.h,sha256=xyTS4TDic6UpR_urIWFePBOZD02296M5jlatzLEp0mE,1006 +torch/include/ATen/ops/_nested_get_ragged_idx.h,sha256=f36TcsBuNgn4VR9u_-xyuCC63rjpxwjLt8T7o7d3B-0,672 +torch/include/ATen/ops/_nested_get_ragged_idx_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_ragged_idx_ops.h,sha256=D5IjuVTGYILjwUYAMYDs4UAw03JGRAc17xwW3Q8J4oo,1003 +torch/include/ATen/ops/_nested_get_values.h,sha256=CFno8Zusjc-mB_nbxQ1y0BnKyhCgDUbf-VKYP8g8oUQ,668 +torch/include/ATen/ops/_nested_get_values_copy.h,sha256=VMl_aK-ihEyRMY7KgLP-eodNDHGYfHkH9EoQ_lGhOw4,1174 +torch/include/ATen/ops/_nested_get_values_copy_compositeexplicitautograd_dispatch.h,sha256=-m7IQ0UzLJBaCPItHEj7dBrgWlmI3L1TzlrbBD5GpS8,901 +torch/include/ATen/ops/_nested_get_values_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=gZzOnSQPabcMIGSX-ic34UTH4orJSnpjCIZKMFnsI6A,807 +torch/include/ATen/ops/_nested_get_values_copy_native.h,sha256=gNbyVmnYt7uyDygf7S-dtNwRvBy97KNfLFOXrFDxZYo,588 +torch/include/ATen/ops/_nested_get_values_copy_ops.h,sha256=KpBKrCBbZQvrHULHPwKC76vv8cJT2JkPdowcn0PEZ7Q,1692 +torch/include/ATen/ops/_nested_get_values_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_get_values_ops.h,sha256=52O29ltjPDDWfv4AHkzdTKENRJJjs8XPNbnfQt8kMXo,1009 +torch/include/ATen/ops/_nested_select_backward.h,sha256=Vnc6075QmPNgwyW_rQjh1D0MthqC1UqF3J_fLoJ2qlc,1779 +torch/include/ATen/ops/_nested_select_backward_native.h,sha256=zrZ6QbvOHNcBibPBlZM7javLsvFNppEaS39UmI7L6aI,564 +torch/include/ATen/ops/_nested_select_backward_ops.h,sha256=S5FHnHcFt1uCEYVSQQo-QeQUkKGH6zCaU513tIN0qWI,1231 +torch/include/ATen/ops/_nested_sum_backward.h,sha256=91U6ijPUVd-Bt20FF7M9p6zdhD1823-HaL5kZJltlaE,810 +torch/include/ATen/ops/_nested_sum_backward_native.h,sha256=-s5BiwhSiWOfEumrHrbC4HjPlt8ow4nhj0yhYSRyIaQ,568 +torch/include/ATen/ops/_nested_sum_backward_ops.h,sha256=BFhSx4Ovmf9K5dy_6AetM7Ln2umDVgORo5dtHO4mhyc,1242 +torch/include/ATen/ops/_nested_tensor_from_mask.h,sha256=3aZ8CLkfusuROSJUco7gd3mSXE3qi2MEh--9dz_AhqM,1452 +torch/include/ATen/ops/_nested_tensor_from_mask_compositeexplicitautograd_dispatch.h,sha256=yG2KKPVZNa9fSt_uN3Q8JAxmTJVGQS8kbasEZZzlBpQ,986 +torch/include/ATen/ops/_nested_tensor_from_mask_cpu_dispatch.h,sha256=_db9ON9nYCPA3jwClpy-5YbOfmtYTEEkRn36GqPzmt8,782 +torch/include/ATen/ops/_nested_tensor_from_mask_cuda_dispatch.h,sha256=Lo80aWfYrvo_W_UxTIsmQlilRAJhjJ4MtYUgGNrYvGg,784 +torch/include/ATen/ops/_nested_tensor_from_mask_left_aligned.h,sha256=jmYUGwL-F1EFkNl20E64e3MY5z4p7vcxQ2G3g0PkvBc,765 +torch/include/ATen/ops/_nested_tensor_from_mask_left_aligned_cpu_dispatch.h,sha256=W2lvBKb80HYKnPA1rppp6TyLBUs2PaJlfdr3lGx-8nM,767 +torch/include/ATen/ops/_nested_tensor_from_mask_left_aligned_cuda_dispatch.h,sha256=6HJcOCX2R8e251BiY7tEjWcmY0MlKF3gzG0bkLDFFUs,769 +torch/include/ATen/ops/_nested_tensor_from_mask_left_aligned_native.h,sha256=HhcabMmXZikf0oMBIgm0Y1dWwN2wHj4I11W0199J7EU,535 +torch/include/ATen/ops/_nested_tensor_from_mask_left_aligned_ops.h,sha256=fskCjiwr6ZrJbJbvYyAQJARxXDclxQ7tNeiCTWIJ8mc,1114 +torch/include/ATen/ops/_nested_tensor_from_mask_native.h,sha256=tqA-6hR07LasfPzYked8RsOh6fgKgShjZpmhHpv1O2E,685 +torch/include/ATen/ops/_nested_tensor_from_mask_ops.h,sha256=kGrYQ13lL4c6ycahfGqpkzXTVH8nb_NUOEzvMhzWq7I,1970 +torch/include/ATen/ops/_nested_tensor_from_tensor_list.h,sha256=aBmZBwB0rMYDNuwgMZFRO0BB9Igmpa_DJHgxvvHXJNw,2175 +torch/include/ATen/ops/_nested_tensor_from_tensor_list_compositeexplicitautograd_dispatch.h,sha256=gFlQZLeWGJ5fsjkRZ5FWEWUDQ9YFxGs4lFODpx1ED0g,1539 +torch/include/ATen/ops/_nested_tensor_from_tensor_list_native.h,sha256=haf40hCL_DdeC0uVFZtjEDOJcWt_j0bYOFipUgPTQPY,946 +torch/include/ATen/ops/_nested_tensor_from_tensor_list_ops.h,sha256=AtN2otdp6eralXi0zmEV8opIEtfi-DMF81gGqDWx3Y4,2706 +torch/include/ATen/ops/_nested_tensor_size.h,sha256=j-lF-fILCQz99RJpF_fb8RfwQEoWiHjsBDxWGoCrLmY,960 +torch/include/ATen/ops/_nested_tensor_size_compositeexplicitautograd_dispatch.h,sha256=kRC_WD1a497H4IcJLxouqjtaNbuCBDtPtFPdTHzGW4s,893 +torch/include/ATen/ops/_nested_tensor_size_native.h,sha256=6GTtwT0Qbb83B4Y7aO4UiaFfPiwLr7cLa8YsK08vut4,580 +torch/include/ATen/ops/_nested_tensor_size_ops.h,sha256=N0e7b-FuX3AVmKUQaF-eoIea6P99I3MfivX218mG02U,1668 +torch/include/ATen/ops/_nested_tensor_softmax_with_shape.h,sha256=lJrGoKmB0VG_B8AilWUL_2OR_ahaAoyTwyziTaEbeF8,769 +torch/include/ATen/ops/_nested_tensor_softmax_with_shape_native.h,sha256=fK8I9pDvDb6eHPOW2Y5kHD9dKrD8AMOQ4ol6PkqJY6M,631 +torch/include/ATen/ops/_nested_tensor_softmax_with_shape_ops.h,sha256=pgmHJWSXe6H82N_433XIqQjHdsjD46zHp5wGNRPc_-A,1134 +torch/include/ATen/ops/_nested_tensor_storage_offsets.h,sha256=0ynDbVhntzDQGWtpdpH0r6rJtpPBE2MukctJoE7j2ac,1037 +torch/include/ATen/ops/_nested_tensor_storage_offsets_compositeexplicitautograd_dispatch.h,sha256=29pbwDFoil8GmlZjcmkDayjvoo123IQ8I5zz7AQWorU,915 +torch/include/ATen/ops/_nested_tensor_storage_offsets_native.h,sha256=dcEA-bj6Aj1NoM5oa66W8ktNekVr_vy05FngMdKea9A,602 +torch/include/ATen/ops/_nested_tensor_storage_offsets_ops.h,sha256=UEGbDsgTcCfL6GfqKoN5AQW2k9ODDcvHnUawo58ElQ4,1734 +torch/include/ATen/ops/_nested_tensor_strides.h,sha256=q1xGC-0T_0hJ1Kd30YT_8hd07Uy_KXO-aWUVTR336U4,981 +torch/include/ATen/ops/_nested_tensor_strides_compositeexplicitautograd_dispatch.h,sha256=PzjmLHBtpG65NfzI2svok35epdEUzO4yvuOd6FtBPCs,899 +torch/include/ATen/ops/_nested_tensor_strides_native.h,sha256=SgdtVuVSWEjz3LDfgXcDP9j4UXj_KF69-Eat4KzwIGQ,586 +torch/include/ATen/ops/_nested_tensor_strides_ops.h,sha256=fm0XMcoz2T7XwsU_YkYhQxP9rX8OWlgiIUeJjzdxeyw,1686 +torch/include/ATen/ops/_nested_view_from_buffer.h,sha256=kFedxw-UWuiwZ15Ec7pLFOKIkVAOeePJaoyr2oG3LAA,884 +torch/include/ATen/ops/_nested_view_from_buffer_copy.h,sha256=6Ngvbx8Ez0rqzIJPvAyz3_QmRI2_Y-0-dN8t5wPQROA,1810 +torch/include/ATen/ops/_nested_view_from_buffer_copy_compositeexplicitautograd_dispatch.h,sha256=4xsnR3nVM-AO3yBQJxO5gJu-n7NpWbvXfXhQrRUA1bQ,1103 +torch/include/ATen/ops/_nested_view_from_buffer_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sDMSMr0Bk34tO9Euu5kbWD98NiTqd6GpQYg1pK7XslM,908 +torch/include/ATen/ops/_nested_view_from_buffer_copy_native.h,sha256=_YAKUuqh1FGlK4qGFatFAWT4kSv8MrgMhH834Op-N9g,790 +torch/include/ATen/ops/_nested_view_from_buffer_copy_ops.h,sha256=dV5tAfja_ifdiNuJ6IL4X2jFfDo24W1Vv0X2QPcqNeA,2346 +torch/include/ATen/ops/_nested_view_from_buffer_cpu_dispatch.h,sha256=ykOzV8r4xLOhbou-kyl1YCass5Dxc0xvdONF1Y1cxaQ,833 +torch/include/ATen/ops/_nested_view_from_buffer_cuda_dispatch.h,sha256=ReUTHWBjjs8oZU3EbRd9Zt8LWSjRiwdXJ0FcuUIN6Fc,835 +torch/include/ATen/ops/_nested_view_from_buffer_native.h,sha256=SzaXu2OpiF0LAq4GfyXjQ7_8TYQ-YwMfVD0ERPxo7PA,589 +torch/include/ATen/ops/_nested_view_from_buffer_ops.h,sha256=QsJRnkWw0C3PxPPslNngdMUrLmvbGaCVYWP94cghxaU,1336 +torch/include/ATen/ops/_nested_view_from_jagged.h,sha256=6Uw31pxocKrKMvITWvAWFi4cUjHb2Mcg8vSwoqjVHI4,1099 +torch/include/ATen/ops/_nested_view_from_jagged_copy.h,sha256=eniZZ6pRNGGQmJ7TM-LWPuKnS3syMGaHQkHnjfXMWSY,2444 +torch/include/ATen/ops/_nested_view_from_jagged_copy_compositeexplicitautograd_dispatch.h,sha256=uwnHVeJT5DxsneTZ1gSJcqRUY1eaBBP5kGs0Hb1e0WY,1354 +torch/include/ATen/ops/_nested_view_from_jagged_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=hJaob43NAAae_jqXC5nCr5I8nGnYqyzF2UNGBCpPLjw,1039 +torch/include/ATen/ops/_nested_view_from_jagged_copy_native.h,sha256=qu1lZQ8n3qlPfpavPO-daDa-Uh5rdtUjkd_EHFZGoow,1041 +torch/include/ATen/ops/_nested_view_from_jagged_copy_ops.h,sha256=SbgstuXqK6nqGCf8RYQLW4oUckZZNHAsky1Xaim3EjA,3148 +torch/include/ATen/ops/_nested_view_from_jagged_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_nested_view_from_jagged_ops.h,sha256=auKS5M-vZEnXOpM7wzs6h-tL5tK9ZJ8YJUDtsIqNO8Y,1737 +torch/include/ATen/ops/_new_zeros_with_same_feature_meta.h,sha256=qS_fkji7gXnEo0UGojYSpj_5ooourPkLUPWqx-XoaGA,1653 +torch/include/ATen/ops/_new_zeros_with_same_feature_meta_compositeexplicitautograd_dispatch.h,sha256=9BENUHd0r_WwACCsYcRe3j9eT4RpazOGyJg7e6y4CQE,1171 +torch/include/ATen/ops/_new_zeros_with_same_feature_meta_native.h,sha256=CBmVt7sRU3uUcoctkKFFhnohr_GASzHJemhh4iqcaMQ,720 +torch/include/ATen/ops/_new_zeros_with_same_feature_meta_ops.h,sha256=QZLWHgKu7WVFhMHCeAaALHRg73MgPNcdlE8WopP434E,2115 +torch/include/ATen/ops/_nnpack_available.h,sha256=vG3C7biBJmbyQKs5RII6bXOEQwv7kiUQkhslNiATAec,612 +torch/include/ATen/ops/_nnpack_available_compositeimplicitautograd_dispatch.h,sha256=2oNwH-lWZpytHSxG8tJmeiHlNX8f7_nzcktVxqFpigk,746 +torch/include/ATen/ops/_nnpack_available_native.h,sha256=NU4jkiwZCK4BK_Bn_D_T4p24_gGk98dWBlRGQRhO54s,458 +torch/include/ATen/ops/_nnpack_available_ops.h,sha256=AqNpeodVuXzOvhCL_Tys9n8jWONvxPkc8sabaL4NeYU,903 +torch/include/ATen/ops/_nnpack_spatial_convolution.h,sha256=mJu2Clcu93TbvhHDeKc1m6zJUbKSdeLqVCEKXE-JU2E,6197 +torch/include/ATen/ops/_nnpack_spatial_convolution_compositeexplicitautograd_dispatch.h,sha256=iuB92Rh4qWfLW2AruDg6UEfAN5Ujcgduu4LxC_OnrNs,2051 +torch/include/ATen/ops/_nnpack_spatial_convolution_native.h,sha256=jubJ0IPto0mHhwfsHO6ba31deRFyzqkMaLWuy17WZv0,851 +torch/include/ATen/ops/_nnpack_spatial_convolution_ops.h,sha256=GDM1MCLXBC4tR32ItVp0LmZF4kcLhlTZCkzkpbaSsxw,2560 +torch/include/ATen/ops/_nnz.h,sha256=vigSvXPTuGc26a6D8CAfnCPx7rk3BqODpUUXuOg2Pzc,478 +torch/include/ATen/ops/_nnz_native.h,sha256=jw0invqXBjDYiKKHiUMFIeMfc6fFspjSbtP_LCXBwXk,538 +torch/include/ATen/ops/_nnz_ops.h,sha256=VCACCHDc_7lTBH_RwpwRGj6WWH8_uWCER9G8NJ-xNkI,949 +torch/include/ATen/ops/_pack_padded_sequence.h,sha256=Pbc00CixftEraNNvs3j4Y20IwdxEGBhnuw_bnjhQUXc,1676 +torch/include/ATen/ops/_pack_padded_sequence_backward.h,sha256=6yhqvamXfwJemB2L714VwrT7K4m4O1MPQYkhUlBlFH8,2064 +torch/include/ATen/ops/_pack_padded_sequence_backward_compositeimplicitautograd_dispatch.h,sha256=yCm5tI9c10nPNYM7zmL6r_Z3extYX9DLNCBxJd2RGCk,1033 +torch/include/ATen/ops/_pack_padded_sequence_backward_native.h,sha256=Fb64S-Np3OGjj9elba6Eh2KtNluxYo1quHwDXqXEZmM,589 +torch/include/ATen/ops/_pack_padded_sequence_backward_ops.h,sha256=-xoeEeHL5tk4U3lItTCSgOLvWzrMt8lLDaWiXh1nm68,1309 +torch/include/ATen/ops/_pack_padded_sequence_compositeexplicitautograd_dispatch.h,sha256=gzDQ_JNr51ONerugHWK1PYQoR8nBXA8Vw-6N6qwNkZc,1226 +torch/include/ATen/ops/_pack_padded_sequence_native.h,sha256=e4kC3JlvbutkwmlBHjXrNq-esT13PpBenc_W4NkEM88,750 +torch/include/ATen/ops/_pack_padded_sequence_ops.h,sha256=jWi_ZqRCU_oXI8TdEcxE51hIw4gu8fbkitdz3glJ-VM,2242 +torch/include/ATen/ops/_pad_circular.h,sha256=C9uliME-i2IYwh1bc6eUt9l6yuwcGtRAkn2jPnR62R0,1427 +torch/include/ATen/ops/_pad_circular_compositeimplicitautograd_dispatch.h,sha256=QtTIA4I4E2HDYpcvc3WSw_4m_eBNk-ZlcRvGpsIt3Z0,885 +torch/include/ATen/ops/_pad_circular_native.h,sha256=JZwjVzvgAyfZb0iWYCkOWy-vgS85on4DKtcH2uFDzfw,515 +torch/include/ATen/ops/_pad_circular_ops.h,sha256=kicbZBO8hLj4RChZhnqXZG8ZG0AthdfW8Q-iRVq-AZI,1073 +torch/include/ATen/ops/_pad_enum.h,sha256=xNAuO-ZOjiwBQc6ttwH3DsoykHQT09jfYx72MEA3g1s,1733 +torch/include/ATen/ops/_pad_enum_compositeimplicitautograd_dispatch.h,sha256=_Ul2T3WfMKsBgHBLecQG7k5CxLU5PsJTxxWGtrLipAk,997 +torch/include/ATen/ops/_pad_enum_native.h,sha256=xaoAjJT-gj6ZCHgVctzHPS8u25--c8djm6VW_pvF45s,571 +torch/include/ATen/ops/_pad_enum_ops.h,sha256=iZByWhwWMBmU5cZ6Iqjy0_lfNdyU2jkKddnYdDFWPvo,1214 +torch/include/ATen/ops/_pad_packed_sequence.h,sha256=4GOw3GfdhzIwSoKYwrIA0QGz3mCpH99HKAxNaue5piU,944 +torch/include/ATen/ops/_pad_packed_sequence_compositeimplicitautograd_dispatch.h,sha256=9G5cHee8yRICx-GiKm1ArlAkMn5awQqQHH3xTIYGO1Y,909 +torch/include/ATen/ops/_pad_packed_sequence_native.h,sha256=eAPciXJcYnb6fJYMQjTUOv6BljAEiOXZrkve2SEXt98,621 +torch/include/ATen/ops/_pad_packed_sequence_ops.h,sha256=pKEurR9nKg4d7M1QfVIh6GPGeeHoH9TuvOnLyDzghlQ,1439 +torch/include/ATen/ops/_padded_dense_to_jagged_forward.h,sha256=bdO0XYjY4_SWXRq8NgJ8-I1K6sSH2G5CJMVRsR8X_aQ,2045 +torch/include/ATen/ops/_padded_dense_to_jagged_forward_cpu_dispatch.h,sha256=AZjvbhFpE6fSMKmOSDsVleqgbZCVdI7NEUMC4xJwawc,983 +torch/include/ATen/ops/_padded_dense_to_jagged_forward_cuda_dispatch.h,sha256=K_tFEMqc-lZY7uFfTFvj4BJgtN2MbDMag5pP92qdfBw,985 +torch/include/ATen/ops/_padded_dense_to_jagged_forward_native.h,sha256=j55D-SNQKIXmhcxDYUgrQpipCYqneAdVhxnc2faBudk,743 +torch/include/ATen/ops/_padded_dense_to_jagged_forward_ops.h,sha256=Uyd6bRLEYEJ9nF4kv_9LooWheuwpkkg3lsWA9lJIEhY,1255 +torch/include/ATen/ops/_pdist_backward.h,sha256=_pvLquV1Tw2PlpKgIurovB17Eg_zLvJST1oEonA0-C0,1433 +torch/include/ATen/ops/_pdist_backward_compositeexplicitautograd_dispatch.h,sha256=H77KYk4noXgKrj5PWFq6QBNLGCcFXWJelo8ra8uHptk,1007 +torch/include/ATen/ops/_pdist_backward_cpu_dispatch.h,sha256=a0N0hDzke-MrHpOOshIQ0d5cDt3owOjIuhHw-GN33vw,790 +torch/include/ATen/ops/_pdist_backward_cuda_dispatch.h,sha256=X_MnIgPf0TDhjxhvF0WBUqtHBCudUXjakoj4wD_y5Cg,792 +torch/include/ATen/ops/_pdist_backward_native.h,sha256=RZw8vFMKw0tj6rdCc3VlBhx3kURJ-BpZeMTUVE4ANEc,694 +torch/include/ATen/ops/_pdist_backward_ops.h,sha256=SXQPjx-HpAQqbw1Sr6Xln_nR2k42huYpzFBLyBvaREA,2056 +torch/include/ATen/ops/_pdist_forward.h,sha256=UcVdxWKGal8Yom6aDlQTfX2Ccnp1MFqR-0T8zicEKxM,1160 +torch/include/ATen/ops/_pdist_forward_compositeexplicitautograd_dispatch.h,sha256=rF6HXPJB3pFFLxcZ0POzqR-3Qk0XtPoA7s2EsT8nhnY,905 +torch/include/ATen/ops/_pdist_forward_cpu_dispatch.h,sha256=u2s3-RvtmlNDg1NunA4fvlG4zmeIOUQmwDGt7ZLmWFE,740 +torch/include/ATen/ops/_pdist_forward_cuda_dispatch.h,sha256=cXE4nR7YrJpW095VS1G6BQV9PsdQs5CfvMYxIiuODws,742 +torch/include/ATen/ops/_pdist_forward_native.h,sha256=-YH_I9GBAj2U4UG8YETxGrDprrU8p0W6Ps5uGnNGeqA,592 +torch/include/ATen/ops/_pdist_forward_ops.h,sha256=pxce3iLIcgoiuNMu7s6251eVgv3vl2z5Zq_C8rTbQ6s,1716 +torch/include/ATen/ops/_pin_memory.h,sha256=vBHPFdEy3jKfrAVmBi4X_pMaRYhLOeHfJ4W_3P4AHGg,1279 +torch/include/ATen/ops/_pin_memory_compositeexplicitautograd_dispatch.h,sha256=xe3pci6o1fKBcgotklmg7ddO9xR65h2gtF4H1t8eC-U,1074 +torch/include/ATen/ops/_pin_memory_native.h,sha256=V-BYj_U_nYP5reuKqgmfaagf443T1SEMKbQnQovkMF4,1017 +torch/include/ATen/ops/_pin_memory_ops.h,sha256=4boypU8IiCqJtmJ1eXi4wov7Z2lRe8zxeLxMyuuJtIU,1864 +torch/include/ATen/ops/_prelu_kernel.h,sha256=XC64isXfRsmdrJ959D_TgNCrhX5V1VlTJAJpw1sm1TI,692 +torch/include/ATen/ops/_prelu_kernel_backward.h,sha256=PQLLjPJ6IAAK1Je9m7SMB8NrCVFMuBNq5rYo-JDboGs,828 +torch/include/ATen/ops/_prelu_kernel_backward_cpu_dispatch.h,sha256=BmPFx4Bemy8zHsuSquGti1mRPy_wR76KKnfI9ixHFtI,820 +torch/include/ATen/ops/_prelu_kernel_backward_cuda_dispatch.h,sha256=C0v-2Nhx_sYL7mHSbmw4Ct6IvA92oPLq_PHThlNzhsI,822 +torch/include/ATen/ops/_prelu_kernel_backward_native.h,sha256=tIpTnIEtIrw6GG7EFeL0fDPhPj0O69mQdoROuT1IVxA,729 +torch/include/ATen/ops/_prelu_kernel_backward_ops.h,sha256=ufMctZ6ftVAsd7FCmLseylTL3Bf5ZEijlaFAwzIOpKo,1293 +torch/include/ATen/ops/_prelu_kernel_cpu_dispatch.h,sha256=qNj7fNqiaxYmsptuzl8BBDkus1kso9PrFRkXxQzW3Ec,754 +torch/include/ATen/ops/_prelu_kernel_cuda_dispatch.h,sha256=G5XP80uUHs7AlHaKFBER66SCi6zJG49wdu4v-Ki0Cdg,756 +torch/include/ATen/ops/_prelu_kernel_native.h,sha256=Lo8r7sv2rl843cvVHk5d0tmwEJVz0T2ySrGReh8Gr-0,699 +torch/include/ATen/ops/_prelu_kernel_ops.h,sha256=yCbGPMp2eSydUw8GfPb5b5vW8-0QFYKLHhOCgNy3j6A,1077 +torch/include/ATen/ops/_print.h,sha256=-rCnvbzsZgWVlbU4_QGjQ66UWDdG0l67tTGu9pzkA7c,590 +torch/include/ATen/ops/_print_compositeexplicitautograd_dispatch.h,sha256=7atO-mRnQLIHquBmE-UcnShUZsV7bnvNzSCjSMrp6GI,753 +torch/include/ATen/ops/_print_native.h,sha256=wjgagQSENvcf5nzHhUUmPngwPLGT3V1AwLXEEUTeKHQ,465 +torch/include/ATen/ops/_print_ops.h,sha256=X-MsVZ18Zq1iOdG1u0YXDLhxWDS9IAmpZOIyhrlmk1s,927 +torch/include/ATen/ops/_propagate_xla_data.h,sha256=G255OYunFFhnyolIpnTUcKMeFDgGP46mNlC1aGKiIcU,709 +torch/include/ATen/ops/_propagate_xla_data_compositeimplicitautograd_dispatch.h,sha256=wsy94CSqBa32NaPK8WoQg3q0WVuOMNaVcMfuKuYRY9Q,799 +torch/include/ATen/ops/_propagate_xla_data_native.h,sha256=WzdmczalCsYmbZyYSiV1VYMMEq-xHIQvYSonrdGBN_c,511 +torch/include/ATen/ops/_propagate_xla_data_ops.h,sha256=B_uvuKU3DFRCxjpkT8qnGoZBRukTJuuKy7v-eJuF2gU,1076 +torch/include/ATen/ops/_remove_batch_dim.h,sha256=WgLv3QjhB7UfRc1oUFEciAx1Ro9AAg6vJAwiQEuNcRg,778 +torch/include/ATen/ops/_remove_batch_dim_compositeimplicitautograd_dispatch.h,sha256=phcRcWafxPoNscTpHVDZUj1YOsXcm1HtKDZHdvYyuVw,827 +torch/include/ATen/ops/_remove_batch_dim_native.h,sha256=tBJIZ3AoHyynQ-MGrmYY5EOFoHtznqoQ6S76gZtzZDM,539 +torch/include/ATen/ops/_remove_batch_dim_ops.h,sha256=RX3K0HMRAGhyq93C0qHP_ql5HOmk8-z1s7gwyWlZqnI,1171 +torch/include/ATen/ops/_reshape_alias.h,sha256=jwW1nQFXvXgy7Q_3VcFd49Nzmb4yrjG7VZ32RQ134j8,1682 +torch/include/ATen/ops/_reshape_alias_copy.h,sha256=y-nynvms6zmtYsesVERhh6g8l6iyojL2UBvcy335sNM,4567 +torch/include/ATen/ops/_reshape_alias_copy_compositeexplicitautograd_dispatch.h,sha256=cS5BkWxA2x-5DTjbVBtIdSrO1nVMxssyeFxVNZEaayg,1290 +torch/include/ATen/ops/_reshape_alias_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=_CSBaxNQzhLePnilQ9owacP8PBKrP2qlPbk9ef42CTg,977 +torch/include/ATen/ops/_reshape_alias_copy_native.h,sha256=Nkq4m4zeDhHmwDLoo_lFfyPAS5FW-GxhaPpRqW5s-DI,702 +torch/include/ATen/ops/_reshape_alias_copy_ops.h,sha256=4bAH-cdxmTpFiJJR-Be1B09koxWg12wCwxp6bhooZ8Q,2032 +torch/include/ATen/ops/_reshape_alias_cpu_dispatch.h,sha256=DdyfDNAZC0K8R_mkZv6gxXYf_NssjGVkhuPVUVu-Kow,897 +torch/include/ATen/ops/_reshape_alias_cuda_dispatch.h,sha256=xhTPREjP6wX4Ihy0FeZZwGNFSeskRVikcUPOsVORxNY,899 +torch/include/ATen/ops/_reshape_alias_meta_dispatch.h,sha256=FASzp0vwnNXOpA0Ri3aEypREKxq7W3edJ0BO5rrsgNA,899 +torch/include/ATen/ops/_reshape_alias_native.h,sha256=nh9_TdNFv5T60GU8253qQX6lAuF2RzsVsE2i9mH0vPk,530 +torch/include/ATen/ops/_reshape_alias_ops.h,sha256=CcPCVqxKPi5jd0k2-ENO4lz_kHZIIRtSpVc0sIrJ2Mo,1179 +torch/include/ATen/ops/_reshape_copy.h,sha256=OB7SjontRolvJBCdYX9LD6YTOulZG1D5FDzvjPVl8L0,1437 +torch/include/ATen/ops/_reshape_copy_compositeexplicitautograd_dispatch.h,sha256=24ldQ0ihNYXysT-kNOVChfQn8sc24KGFnDWi92E0A4w,887 +torch/include/ATen/ops/_reshape_copy_native.h,sha256=3pZ9_1teFm0KESU3USL8Ph3Jw1L09UlYFrmGK692TM4,516 +torch/include/ATen/ops/_reshape_copy_ops.h,sha256=_5un3lWGnPCTT2VrCGfJvitGEmv9UKjtjr39uQQG7PA,1076 +torch/include/ATen/ops/_reshape_from_tensor.h,sha256=gnJLlN0HTUYgyYW_9SNf0rPih3Wz46mkbYHs2Yau9yw,717 +torch/include/ATen/ops/_reshape_from_tensor_compositeimplicitautograd_dispatch.h,sha256=RL6sIJViMUBVUHxeYzsgX0UAk3FeW63g8nx9qKJL0CA,804 +torch/include/ATen/ops/_reshape_from_tensor_native.h,sha256=eJLbxQCT7mmsdeyYcW7S4m5v03GbpkmtsmECuTjfqOo,516 +torch/include/ATen/ops/_reshape_from_tensor_ops.h,sha256=DoR9ubRDjeBRnhLsIo1n2ettqAUZK3xK5j0nikqDo6M,1095 +torch/include/ATen/ops/_resize_output.h,sha256=KQ7gQdJpXxxe2669O8rVjpuH042Nq5kWj6j1pr2VUP4,5413 +torch/include/ATen/ops/_resize_output_compositeexplicitautograd_dispatch.h,sha256=pgloaeJYto62i30hAB1KicDJoHaVdx_jY5c_9VzXpgM,1507 +torch/include/ATen/ops/_resize_output_meta_dispatch.h,sha256=3vo-D6zoG0rJv3vIMlo7tbMGBATzWK6JzR0y_EI68h0,903 +torch/include/ATen/ops/_resize_output_native.h,sha256=ppRF1p0j0VoQ_AJUbpZqrkYv9wtxWfPffBivF5KvRTc,798 +torch/include/ATen/ops/_resize_output_ops.h,sha256=UIVJLS-6gBvsSbZarHKIN9wMz6QkcpBjdjhrAHJze-8,2725 +torch/include/ATen/ops/_rowwise_prune.h,sha256=-6Yb-bQJlMQy7Nne_HzjyfjOYe01M46UEXRrVSufb5E,835 +torch/include/ATen/ops/_rowwise_prune_compositeimplicitautograd_dispatch.h,sha256=TocpoDp_G7GtscYXJughflRSJ-y1EMt8bPcfnXSctAM,865 +torch/include/ATen/ops/_rowwise_prune_native.h,sha256=EZEoSQaCB8JsfopiZN09FXxpfnhymc0pGXNDAhPmaM0,577 +torch/include/ATen/ops/_rowwise_prune_ops.h,sha256=oAF6NIgt3drcgLhu6ke_wWCglBzZLoVtn6ywI3O4g_Y,1300 +torch/include/ATen/ops/_safe_softmax.h,sha256=UkUno7itFMiMvh5pw-ptw7otxdVF3mXPK8wQ4aWpUbM,754 +torch/include/ATen/ops/_safe_softmax_compositeexplicitautograd_dispatch.h,sha256=2y0s8ks6f4R4IxQZFctTZCLpvJG4Cm9EZSEZtnK8FMw,838 +torch/include/ATen/ops/_safe_softmax_native.h,sha256=MHWYlW3AkBW2u2P8XWkOHtf183XkqOpQaM-p_h59gXM,550 +torch/include/ATen/ops/_safe_softmax_ops.h,sha256=X-TLsglDohIj0YLMGd3yPoAhyaUlYEUhOTL7oT2jnak,1167 +torch/include/ATen/ops/_sample_dirichlet.h,sha256=NVN5KKmbvoYwF3EebBco0UsJ1uFrCftaDTAJNpRph4Q,1384 +torch/include/ATen/ops/_sample_dirichlet_compositeexplicitautograd_dispatch.h,sha256=cnuA970mzvFwenVPw9bFzLv7nm5H9soQtOMcPePKEBM,988 +torch/include/ATen/ops/_sample_dirichlet_cpu_dispatch.h,sha256=JtFQW9wFFi6NYmBrhkYOROGUJppcNyRegF9ob3UMo2I,788 +torch/include/ATen/ops/_sample_dirichlet_cuda_dispatch.h,sha256=AbluI8upWiRlaOBlrjslAghkgbGQzym0Jm6F5qwKDXs,790 +torch/include/ATen/ops/_sample_dirichlet_native.h,sha256=2KuA3wlSDf34omOK8w1XEJyLHJVKop6j5p3onATbrkc,796 +torch/include/ATen/ops/_sample_dirichlet_ops.h,sha256=dUnRu9yGniziL6vB4Qpngxve5TSi1QAFiTC3-DttoIA,1942 +torch/include/ATen/ops/_saturate_weight_to_fp16.h,sha256=aAIBK7TGy21_lpxP1CH3VL82DXnfaCYKcR5N-hGfpnA,692 +torch/include/ATen/ops/_saturate_weight_to_fp16_compositeimplicitautograd_dispatch.h,sha256=aftCRpQ_qbwk9mA6C4LJ68KjR2Tk5Zj4FopWMN246LA,784 +torch/include/ATen/ops/_saturate_weight_to_fp16_native.h,sha256=uX0WqBWkvuRWwt4Bhw2i63eB1tt6XrUfonzkpiojC-M,496 +torch/include/ATen/ops/_saturate_weight_to_fp16_ops.h,sha256=6FIvNi48DEMk98zoWUyuEDxpmq5We4DAX4CGQfMz17w,1027 +torch/include/ATen/ops/_scaled_dot_product_attention_math.h,sha256=mO-TOA-VgwcFSAziTYG5iQDNbkdGXR7EB_CQJonNQgw,1273 +torch/include/ATen/ops/_scaled_dot_product_attention_math_compositeimplicitautograd_dispatch.h,sha256=KBL2OG0_r9efvDyHPtdS4agCiF54RxT9lmWEKGNI_IY,1084 +torch/include/ATen/ops/_scaled_dot_product_attention_math_for_mps.h,sha256=desGEPOHT5E0NMPPqwBNyeWoK3cknR87oambhUHXTC4,1247 +torch/include/ATen/ops/_scaled_dot_product_attention_math_for_mps_native.h,sha256=DW69q74kL-hRSBtQODQ_1TtZD8Pa55kFQGY44qsx-do,422 +torch/include/ATen/ops/_scaled_dot_product_attention_math_for_mps_ops.h,sha256=Y9NWWiul6hjgZ6CMCNR_ZrTq4xvyeuOsNoWjrX8WAUA,1882 +torch/include/ATen/ops/_scaled_dot_product_attention_math_native.h,sha256=Ky5PeK4bQpJVBQwJuw5RW5AaHOFmXxFuBBoV52ki43U,796 +torch/include/ATen/ops/_scaled_dot_product_attention_math_ops.h,sha256=ybjYrQ0ndDY6jBZx0WFPgedAj5Kxp0rB5tjd1pXTMpw,1921 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention.h,sha256=95-UEajGAX8YbMSvlrG9VOASQrQ9zIb6sgtVif3lxVk,1492 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward.h,sha256=paL7qw89DRONOcSz1nFbep21nc5E1YVMZlJjbBEAVkc,4450 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward_cuda_dispatch.h,sha256=aCeHqzhK-gW-dzM2QoEcg3Na9RXAuoLBu2y42WzR3gw,1739 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward_native.h,sha256=gn9zV7m3maUmbanuqqj1PgSQj1W-Zzq8UQQEKWAISSk,955 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_backward_ops.h,sha256=puoZu6VmI8hnGJ_EmvVkyGfbSCrnvKoud8RetWJcMII,2511 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_cuda_dispatch.h,sha256=LPwvjcfbIBV0nFCUPqJORo-nJY-JKbdAeq5p5q3i_cM,1098 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_native.h,sha256=WqLVqy-T15HMv9gaQZuZLoKw2_8O14yvrTvEMECMF8c,857 +torch/include/ATen/ops/_scaled_dot_product_cudnn_attention_ops.h,sha256=FTHrHfmru1Z8GY39Rs9GUKmFoYIQyb6_wliPhg6tQ34,2241 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention.h,sha256=fSspExWSQYP-QH79krY4VhKK3bVcEuiu66ByM7xx1tI,1285 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward.h,sha256=95H5RHJKvS9LOGwjI50YkGkEc-o2JSEXOp5xg0DbS5k,1556 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward_cuda_dispatch.h,sha256=aZKsQDCmYK8-76kHnty6AdBC1HNLkiuKx80fuJ1DsLQ,1166 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward_native.h,sha256=NMA_G0gKlIHg7NuByPU1Mj26u_v8L3I6iKiZ2iKOQ5k,925 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_backward_ops.h,sha256=ix4neGaj7GqkmKBoZaJrnQ4GHLTVoqm7IT8POS1tmpo,2370 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_cuda_dispatch.h,sha256=DNq7vkciN2mtViOx7QvVZmLkBspKittT7pePdK2WGcM,1015 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_native.h,sha256=bu4NDYCGTlXjKgzNSGbp0lWCH9wo2uS2xVeD_qsG43w,1139 +torch/include/ATen/ops/_scaled_dot_product_efficient_attention_ops.h,sha256=8vCXKoFeuZWxPRYg7CBv1i227moYO_Ey2q956T_JGM0,1911 +torch/include/ATen/ops/_scaled_dot_product_flash_attention.h,sha256=QgClptsIqUXms79VByfTDTKW3wwtPrPviiOLBvfRXZU,1345 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_backward.h,sha256=KlMy9GVf-xNZJUsIlRdJzmxGJGP32qHgJoUTlxcLpAA,4312 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_backward_cuda_dispatch.h,sha256=iiI2_d7ZHwmC_7zTd14780qdEujp5S_hblJQbLy0n0o,1679 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_backward_native.h,sha256=1v12TqJJR6oVDgbTfJXng9oE3fhZoa8iHinz1q7mCKU,1430 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_backward_ops.h,sha256=Fo2ijRX5M5jCscfQNcIMAU8D9hZPEmPrP2EjJz3fNmQ,2444 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_cuda_dispatch.h,sha256=fE966EM7n5ItLCED8HohWZhn1zXGF8u9isSRaJp_xjQ,1026 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu.h,sha256=Flk49W04LnD-9cvrxJGfa6vrxWIVuTDTPQr7WOq0KiM,1174 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward.h,sha256=fwJu08BPcXwDYWYZJtxagD8DwWNLkVHKBGGEVwNfAYc,1379 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward_cpu_dispatch.h,sha256=b73Ga-umXeHdO1RlFOyknVSjxkGF-BFeV8Xy8v1eX14,1066 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward_native.h,sha256=HZBDOOHlcjSTHWw1lXVp11p__hiQRPSGEkRwte9-3uY,818 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward_ops.h,sha256=rjXj7e8GgUk2cPA_PASwXcNAizPdl_dvU9pxYpbIKv4,2083 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_cpu_dispatch.h,sha256=xp1A7L7ZtuQ384_yUvLu8LSxbUpm8Ve-qDwGO8urri0,973 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_native.h,sha256=udcJOpeFSpkw48yBxkPW5-fkg7QjHO0JfizfVbzuYds,725 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_ops.h,sha256=UHfdIw2j0pWiCLUzIhP_dUBbr_AG0PbXqYnYlpHvQi0,1738 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_native.h,sha256=gxfhDHf7j3I2rZarZrOeSH0G7HiX7TjWSG0qc5TZ-XY,1161 +torch/include/ATen/ops/_scaled_dot_product_flash_attention_ops.h,sha256=S6mAKkb--UQkBE0QKkzrzqaZboldZNkdwCXycmEMitQ,2010 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable.h,sha256=oyhJWHDRYHWmYV1oxT4skB5Wd2m5II8OXNxXupR1YH0,1482 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_backward.h,sha256=_kJKAWpRDVpegkl-HBenooUE4B1gdNBfYz8xwoHVeJg,5015 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_backward_compositeexplicitautograd_dispatch.h,sha256=4aguaGOCX_zLpG4bsRDqX4kuHrAhziHyzNKIEqYIANw,1905 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_backward_native.h,sha256=lZcVRnnoUA8pvY3lUog3ikREuMwmKWnILZ91FM0il9U,1012 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_backward_ops.h,sha256=XWJ7YsuTVdbQQh76wTu89WzFcPD8wjlMZ9R7mCe-HGo,2760 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_compositeexplicitautograd_dispatch.h,sha256=pJVZ3tn2gRboKhd5VD1b3wI7_UWDq3qJr59E79oDBjE,1131 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_native.h,sha256=DqF3eRODWzL5kj3ZyujqgK8x_xtuPNiw0A-dbH57aqM,843 +torch/include/ATen/ops/_scaled_dot_product_fused_attention_overrideable_ops.h,sha256=V_o3hO2Mraryg2lEV3UVFyL1PGfGQcQAC736rkzTDA4,2204 +torch/include/ATen/ops/_scaled_mm.h,sha256=QF8h-7d2Bfq8u8miiTwtWqVwbLWZs1oan6FvpqoIado,2460 +torch/include/ATen/ops/_scaled_mm_cuda_dispatch.h,sha256=zr-YSy513lHImQ1kjM3QERWNKPMZqwqffDr5mxuGe6Q,1656 +torch/include/ATen/ops/_scaled_mm_native.h,sha256=nIm8h-ktwbLisCByCIT2sFzu3X38cwdszpvZ6dVtaOc,1073 +torch/include/ATen/ops/_scaled_mm_ops.h,sha256=iUlMnNI1STMjX4T2FGGig1PQ5PxmWeh2XYwQ0be_ynI,3200 +torch/include/ATen/ops/_segment_reduce_backward.h,sha256=ykwD1cT9j8XsFpQTjt7Biat9Nxk2o23btuwHVvmxqbc,2430 +torch/include/ATen/ops/_segment_reduce_backward_compositeexplicitautograd_dispatch.h,sha256=tu-RuX0wEqCq5-QKdusGcQXL092zVnEn2rUGN9a82fM,1378 +torch/include/ATen/ops/_segment_reduce_backward_cpu_dispatch.h,sha256=dDIXzFV7f5g1HAIHVRo7bs3IFEBecC1_T7RCPF_wLWQ,987 +torch/include/ATen/ops/_segment_reduce_backward_cuda_dispatch.h,sha256=vPpCYBkOm8KXhwZqAh67YiWFVHJKI9QeV5FthofiBW8,989 +torch/include/ATen/ops/_segment_reduce_backward_native.h,sha256=aKM7nP9XJf4r9tFNC42K_HEqHmrPUQlbI3rQ49u8FdY,1072 +torch/include/ATen/ops/_segment_reduce_backward_ops.h,sha256=fTLgBPX43Z58eKf36gyryYrhVhp6jD-22KZRw7PQqC4,3197 +torch/include/ATen/ops/_shape_as_tensor.h,sha256=foMKGxkjrbUH38h2fSNVbovKmdjiddjrqYRLXOsMop0,654 +torch/include/ATen/ops/_shape_as_tensor_compositeimplicitautograd_dispatch.h,sha256=ZCQ-FBAebbCO5_RewQrirlmsICZLi6el5fA75zifXcI,774 +torch/include/ATen/ops/_shape_as_tensor_native.h,sha256=VrY8AxU9mCK5CHYjVPzhJWb4oztuR4E8gn9N46vEpuc,486 +torch/include/ATen/ops/_shape_as_tensor_ops.h,sha256=MTjt7AEYQ4xtinWUpB_wBpW6qk80bI42y6xbnrq7i78,997 +torch/include/ATen/ops/_slow_conv2d_backward.h,sha256=m81WxOZQCc5YteeIiq0nbfxs7EHvEov7M7DoP8L_jVQ,14557 +torch/include/ATen/ops/_slow_conv2d_backward_compositeexplicitautograd_dispatch.h,sha256=SPKoxCWOVsZtWFFMDNgntymGWajJRm4vycwcgTfdJs4,2122 +torch/include/ATen/ops/_slow_conv2d_backward_cpu_dispatch.h,sha256=AjxOTWVSuWk-ZehnE6JzFVMUWalOi1RJP6rKX1Axzn0,2585 +torch/include/ATen/ops/_slow_conv2d_backward_cuda_dispatch.h,sha256=u2bRQ_iKJPWpwOMyyxNezBlCRy4rO7Stu-GtiQU_Jyo,2587 +torch/include/ATen/ops/_slow_conv2d_backward_native.h,sha256=wrEWE3igO0Ph4OYejFm5lxXqv2lAmtL9I8InkFy1pSc,2016 +torch/include/ATen/ops/_slow_conv2d_backward_ops.h,sha256=8TOYVtPJbXVOpEFuXid5R3AnDHnMtlaEDOrLAPRIdc4,4937 +torch/include/ATen/ops/_slow_conv2d_forward.h,sha256=Kgo5MmNUcvxLrCaDnSqWByBu_iet94Qely_EkNBwOtU,6788 +torch/include/ATen/ops/_slow_conv2d_forward_cpu_dispatch.h,sha256=mUomlv9PiZ86j7EY81n2TzhkzXS44Ffcxk5rKlipjsQ,2123 +torch/include/ATen/ops/_slow_conv2d_forward_cuda_dispatch.h,sha256=EDaWI-HSk8BPNRnAqXPIto6IB6fr_wvYb1VhDziCl0Y,2125 +torch/include/ATen/ops/_slow_conv2d_forward_native.h,sha256=hpIJnDWntOU51DWA8B_lgitMeDVSdQLy5BJFOF-owQk,1350 +torch/include/ATen/ops/_slow_conv2d_forward_ops.h,sha256=tw7WINmMFmnAY9Jsm2sJhJ5EdbE0UMspsncvNSPBAqg,2746 +torch/include/ATen/ops/_sobol_engine_draw.h,sha256=NycFuN3GzdyT9AO-OkIdKHbjstZOBtQbXJ6vFD12RBI,950 +torch/include/ATen/ops/_sobol_engine_draw_compositeimplicitautograd_dispatch.h,sha256=SG6As2PX2mjB1s7ygreDjTdMsspv3YTmXugNRkt9UME,925 +torch/include/ATen/ops/_sobol_engine_draw_native.h,sha256=Mxq8jj-shWmWuV880ZKjho9UgYTYRcKsWY6i7mMGIxA,637 +torch/include/ATen/ops/_sobol_engine_draw_ops.h,sha256=SGuJtd1F3RdqBRHeKHaIyjVcUsLDGc9K60v2iigkhRU,1496 +torch/include/ATen/ops/_sobol_engine_ff.h,sha256=6okttuGqEkA9feq-1bUg2XeMaCcQg3hAGCNqwVP0X7E,846 +torch/include/ATen/ops/_sobol_engine_ff_compositeimplicitautograd_dispatch.h,sha256=oN69ykgERDPFAUqxKcMz4J14glgmlVnjpQWGvxPGh_4,855 +torch/include/ATen/ops/_sobol_engine_ff_native.h,sha256=kKfg1yZnm2QtFoSNfNs8-4MJBURnENh9ceGUdQHYK8s,567 +torch/include/ATen/ops/_sobol_engine_ff_ops.h,sha256=4Ui3Ic9FO7YLzt1FZl47xwuaKAs9U04LLKOqrF3Gw00,1271 +torch/include/ATen/ops/_sobol_engine_initialize_state.h,sha256=uqBNgxWheYKEddOgpphb9nZa029VDu6nwytEhPhiqSU,762 +torch/include/ATen/ops/_sobol_engine_initialize_state_compositeimplicitautograd_dispatch.h,sha256=Xkt5-X095rrIFIOK6rbCCOYagoBCA8oLmeWBdE7V4pk,804 +torch/include/ATen/ops/_sobol_engine_initialize_state_native.h,sha256=MzRXomiP3onKERN_7kqYZTw5RpKwbidFA3i2qYv-UKY,516 +torch/include/ATen/ops/_sobol_engine_initialize_state_ops.h,sha256=RYq71Hx9FTazydpyrW2RPjx84ODjCop9YmuRL24w2bY,1100 +torch/include/ATen/ops/_sobol_engine_scramble.h,sha256=7hr5n2VfKQBjgTUJ0mTvWHAhhtIO7pGoehlrPr1CbIQ,771 +torch/include/ATen/ops/_sobol_engine_scramble_compositeimplicitautograd_dispatch.h,sha256=rf2qEeQ3UScwdPD7bdd8jhU5hhrwTb6YCANYoqMjd-k,820 +torch/include/ATen/ops/_sobol_engine_scramble_native.h,sha256=wqI0YSOO5J3Fog_KSlsq27gjSNrIGMbzLATYIM5DH7I,532 +torch/include/ATen/ops/_sobol_engine_scramble_ops.h,sha256=EFxQLWKJyu27flsfj_6G8hTct9by5Xi3TD9HtI3D0yQ,1156 +torch/include/ATen/ops/_softmax.h,sha256=Hn6m9NP_m54s-OMc5lBXAT1LccDOY-CcfHeTpTirqgI,1270 +torch/include/ATen/ops/_softmax_backward_data.h,sha256=9ofQGAwlSrpmvRR_GAvVHPtZRIfOiW7Pz5ZcrlvBcGU,1695 +torch/include/ATen/ops/_softmax_backward_data_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AyTIfPXb_mSBKwj7rFSev5REwPB0SZxUkUW7o3riwlk,881 +torch/include/ATen/ops/_softmax_backward_data_cpu_dispatch.h,sha256=bElt_gfrs4qXOP8a_fQJ2vrYFtcco7EU_uwwgp1AN_0,1164 +torch/include/ATen/ops/_softmax_backward_data_cuda_dispatch.h,sha256=qv7BKWg1r1FXzxI1ltZC4siFS9LyiZafQ9lnvAHrxYA,1166 +torch/include/ATen/ops/_softmax_backward_data_meta.h,sha256=G1eEcTR4LexeKBBwmPp4zbstnQUG0v264LHXuboCE14,666 +torch/include/ATen/ops/_softmax_backward_data_meta_dispatch.h,sha256=El7vZpGqS_wLRRTlcPi2juas9dtwWg-UeE3eBhCmWFY,1166 +torch/include/ATen/ops/_softmax_backward_data_native.h,sha256=0g9Sw_gz-jwbPU_80G-BcuBXKlLexQFYH9O0vcb2RP0,1124 +torch/include/ATen/ops/_softmax_backward_data_ops.h,sha256=g5SVE3Z0GLVsihyY_oawrCYlh7GOuef8H8lAU6ctvtc,2207 +torch/include/ATen/ops/_softmax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=svyT6QaGMAIAHozmiEuUso5WkiZPc4FTRse5XhmOqPA,825 +torch/include/ATen/ops/_softmax_cpu_dispatch.h,sha256=pmc7Nl8jx12s3tJH_yPd2ZxcCI_GDVgzFOzRVvOGw7k,982 +torch/include/ATen/ops/_softmax_cuda_dispatch.h,sha256=C-jhRtEUJPSJMeiOF0vP8O-Mx8bhnNiaN0ZEFILHwTA,984 +torch/include/ATen/ops/_softmax_meta.h,sha256=q8GWdomzTPhozQ7CqWfh0KlrEaylzN2d7HMBnvx6N4Y,610 +torch/include/ATen/ops/_softmax_meta_dispatch.h,sha256=JpHotH4q9vQmja9hgTRK7XskfqU2peQ5fD9qgtcv1qQ,984 +torch/include/ATen/ops/_softmax_native.h,sha256=rmJyjunC491jNBlnCaNfVnZ6s82lQgMzABJrCNvfQn0,1010 +torch/include/ATen/ops/_softmax_ops.h,sha256=OmOJrA5MyYVDcJZFZDYWhgWI2kOQbg-GcVyfKbKvoZI,1822 +torch/include/ATen/ops/_sparse_addmm.h,sha256=4r_z9UqpeAJw2H32ylN4prQbzN8okc_EX8vI46GWNk8,1634 +torch/include/ATen/ops/_sparse_addmm_compositeexplicitautograd_dispatch.h,sha256=QTGVUSqtiKwq3EVtbVY1kvPMYFYVCGez-gZW9UNsSG0,1253 +torch/include/ATen/ops/_sparse_addmm_native.h,sha256=g_6AwJqSUAgsq3D5IbH0GL7KXL_7tIo2ZVCexCQEu4w,774 +torch/include/ATen/ops/_sparse_addmm_ops.h,sha256=FGI6RwiC8BmY_a7bayKcVGivudFATVukCgmzMJzuWb8,2313 +torch/include/ATen/ops/_sparse_broadcast_to.h,sha256=zvM6h77kCGmm2YaLNL43ahdHQKwpHTEfFH3rmRflQ5M,716 +torch/include/ATen/ops/_sparse_broadcast_to_copy.h,sha256=hIGt6xLGziVLzBj_jyAp1VQDY4PY4tPV7DFnjLUxl0I,1314 +torch/include/ATen/ops/_sparse_broadcast_to_copy_compositeexplicitautograd_dispatch.h,sha256=eDKmw089UZ0MxTEbTSAtd2bW_WIkcDgTiw7e_C3494E,949 +torch/include/ATen/ops/_sparse_broadcast_to_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=nTpWGiCH7KauFWBEVOW2YlzqFoh_qmG9LFcrWJnAEKw,831 +torch/include/ATen/ops/_sparse_broadcast_to_copy_native.h,sha256=14g-p5D0uFFh7NrSNUjgaQl7xRQWVz-j9qBV8RUX0J0,636 +torch/include/ATen/ops/_sparse_broadcast_to_copy_ops.h,sha256=g41nhOFl2xN1NAdJ-Lvj-Pgusd9KwXDV04z-lMMW1fk,1850 +torch/include/ATen/ops/_sparse_broadcast_to_native.h,sha256=7PVG4YXRvpH2SPVMEITqW4HZXyLBrP2eTaTVThPtBmg,511 +torch/include/ATen/ops/_sparse_broadcast_to_ops.h,sha256=dm9h_3E1RrC5Ije1Bf15fAro6-BsqvY_kL3Wp3pf584,1088 +torch/include/ATen/ops/_sparse_bsc_tensor_unsafe.h,sha256=M4MNXHwwVzQzZXl63tRKzZljFMIDgDN-qB7XMIfPb_c,1757 +torch/include/ATen/ops/_sparse_bsc_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=eXnrqplLcECr568OWH6mlTp_9dNFoBCW-pS9Ge-wJiw,1209 +torch/include/ATen/ops/_sparse_bsc_tensor_unsafe_native.h,sha256=HsWzMTV0Wb4LHT8CXLv8is9EhWDPxnbDlrpEgBfklnY,741 +torch/include/ATen/ops/_sparse_bsc_tensor_unsafe_ops.h,sha256=HDcKa0LI77c9Gm40CLRPvbC6QoJifZsC2eGq8rPD_uo,1810 +torch/include/ATen/ops/_sparse_bsr_tensor_unsafe.h,sha256=m6G7H-0WfUGGCx7LP17mq6MmMTlPjV-ytmRVd5BWCIs,1757 +torch/include/ATen/ops/_sparse_bsr_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=iGzRhza_-BuNSbVWZIqiYkBh-OO7_DfSIagrwvPblls,1209 +torch/include/ATen/ops/_sparse_bsr_tensor_unsafe_native.h,sha256=hx8-aHty4HGxmjGxwl3DYn3XdST2mS_Vl0TCklIynhE,741 +torch/include/ATen/ops/_sparse_bsr_tensor_unsafe_ops.h,sha256=wvg_UhR4wrdYYpUP7LzTVhjLyIwtF_T9heLBR1WTR_4,1810 +torch/include/ATen/ops/_sparse_compressed_tensor_unsafe.h,sha256=D1y8H7TMQfx96lF0RVi__xb8IliCuL_jTP77doLLnh4,5591 +torch/include/ATen/ops/_sparse_compressed_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=5wiGdlCob4zxSm5Q-2Pnk26CypgYSvO53ciCJaxpxJg,1790 +torch/include/ATen/ops/_sparse_compressed_tensor_unsafe_native.h,sha256=0jQymVmnl9GOQ7JU3pb_sx_HyTkQTFxEHLc-IW4bT58,767 +torch/include/ATen/ops/_sparse_compressed_tensor_unsafe_ops.h,sha256=90KiVojrwS6vDjcqrf1mvi3DsH8QSmiQEVR9inNpBdw,1870 +torch/include/ATen/ops/_sparse_compressed_tensor_with_dims.h,sha256=vn5i8rSaditmT5l63v926A-PjyY5xwYUCr1uGncVOXw,1844 +torch/include/ATen/ops/_sparse_compressed_tensor_with_dims_compositeexplicitautograd_dispatch.h,sha256=dFIwgdVguFXDBnL_Ip2tirWbthKztdwbMW4oPqAUznk,1216 +torch/include/ATen/ops/_sparse_compressed_tensor_with_dims_native.h,sha256=W00Fch00eKVt1oxTqbZVi8lbeimeGv-fZdPXxWK1nq8,745 +torch/include/ATen/ops/_sparse_compressed_tensor_with_dims_ops.h,sha256=fo7Evbkg8LR-BoKJvcDRlvuU5nTGT8tcqpSHAV2mvAY,1831 +torch/include/ATen/ops/_sparse_coo_tensor_unsafe.h,sha256=9EwVX-0vszpxxwUwhhNz-VOY_FJRRJnT0EpWsdP1Nzk,5304 +torch/include/ATen/ops/_sparse_coo_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=zMolWRDUy85Vl49F8tyrwAbZemucJtG23PxsD56dZCY,1756 +torch/include/ATen/ops/_sparse_coo_tensor_unsafe_native.h,sha256=UWL3pIz3EhXIa7ks1jpzqJzwDyE7-fy_xOjQ0iIrORo,766 +torch/include/ATen/ops/_sparse_coo_tensor_unsafe_ops.h,sha256=92m_crv5PijJx-nEx0NY08RK-C6l0QW-HREc91g1sXM,1826 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims.h,sha256=xaVOph-NxdxE5zvlY7K5xQlDlm91qXXEJTUntigyfVo,2297 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_and_tensors.h,sha256=_Cu-3YXboCVCtG4IoUOfG2Wgajj3YVv48pzD6hKeSdU,10629 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_and_tensors_compositeexplicitautograd_dispatch.h,sha256=_mreM1o9QCqyy4e1c2j3_MpHVb6pCjXQ4K0bTNrKTzg,1720 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_and_tensors_meta_dispatch.h,sha256=STtjhqkpTsfIkmLchc6fFTdOZO3VQV3J5zLdsSjjPI0,1924 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_and_tensors_native.h,sha256=XjxVmr5rBX0DBfPHnI3lHldZjdPm2k2QOp_i4-IEnho,1061 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_and_tensors_ops.h,sha256=ooceElMAPoyDVrdjgiVAWBL6FRUjBYIklxkVXBLumQc,3157 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_compositeexplicitautograd_dispatch.h,sha256=3_qpd3dRzrUGpaNZVexCFanmNev_Jt5MHsTFkOIbGPc,983 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_meta_dispatch.h,sha256=WqRjAa2k3kaVl4d6vPReHoF-_BQk2VmHK18iWv1_DAQ,1064 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_native.h,sha256=laHnbzabxmDveHHLF_EY5XvRg9wx4Ao2Mt2ACESzAGM,819 +torch/include/ATen/ops/_sparse_coo_tensor_with_dims_ops.h,sha256=ypFV_DbVz0W-iV-BtMpgjCkDjyiwoMQRIeNGfE6sCis,2453 +torch/include/ATen/ops/_sparse_csc_tensor_unsafe.h,sha256=wP9xVg-GI5zXAepVKM-6-uqqt5hVaTNV60NT42PFn8U,1757 +torch/include/ATen/ops/_sparse_csc_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=qjt-_PnhHbHELtzrxNsBe7ausDJTVRKtCZQZ1AjLexk,1209 +torch/include/ATen/ops/_sparse_csc_tensor_unsafe_native.h,sha256=ALyl3JpspNdmR7rE9uwmvnlhQGDmRexIux5TQ4TYTQY,741 +torch/include/ATen/ops/_sparse_csc_tensor_unsafe_ops.h,sha256=zTnC8qIPHSCMwZNY1F7kuvC8vecyNUH8qMF9YcU_QAA,1810 +torch/include/ATen/ops/_sparse_csr_prod.h,sha256=BiVPVLEJA4lFmVfQzzezitc6SX65iINXUIfe4WDPrzw,1662 +torch/include/ATen/ops/_sparse_csr_prod_compositeexplicitautograd_dispatch.h,sha256=I-EBXSGOBvJIv4m-7Z1zu7G1ZKjCBTnLtc144padtDI,1056 +torch/include/ATen/ops/_sparse_csr_prod_native.h,sha256=aLlX_wUWch3P7iUkQF7GmkBnD0YaGKAdqQTY8u6YL7A,921 +torch/include/ATen/ops/_sparse_csr_prod_ops.h,sha256=kO5Wu_TV-TLq0nEXo7_SMeA6X2k7GPDkh2vppdMo3u0,2232 +torch/include/ATen/ops/_sparse_csr_sum.h,sha256=kOEW7ozrhTJb9Cnfhrh0-dNC2AD2CPYv01V5Wu3hVK0,1652 +torch/include/ATen/ops/_sparse_csr_sum_compositeexplicitautograd_dispatch.h,sha256=moUOACPsrkd1KK1YK4d5h7D_cEMV4LPSAADoV4OHezA,1054 +torch/include/ATen/ops/_sparse_csr_sum_native.h,sha256=qFUNnqmD6aOuyEJKKD3keIJO1MTmhL0GBed386w_4PE,918 +torch/include/ATen/ops/_sparse_csr_sum_ops.h,sha256=H5jg_oq-URJSusiVXaTkD-YMdBtYV6r7Uxdh9x_hk2w,2226 +torch/include/ATen/ops/_sparse_csr_tensor_unsafe.h,sha256=CfPcmqkwoTXSi1mqLPjTUnm4A7z5D4IuakSolhP610M,1757 +torch/include/ATen/ops/_sparse_csr_tensor_unsafe_compositeimplicitautograd_dispatch.h,sha256=ooE0BK5M2RUnj3kwbMycqG7wzLDtZUE46Gq1jJ5U6vo,1209 +torch/include/ATen/ops/_sparse_csr_tensor_unsafe_native.h,sha256=yqwu3EdE0s-hK4RWEyOw727oI5K9a9B013el16rt5xQ,741 +torch/include/ATen/ops/_sparse_csr_tensor_unsafe_ops.h,sha256=1yMVeXgOoIeNIkJZkPyZrtFRROeKZEO9pJM9Xdqvlak,1810 +torch/include/ATen/ops/_sparse_log_softmax.h,sha256=wsRz6JmuMmWcqxy5h8U4bFGbNdoZPBVzd2Mbb9paslY,1987 +torch/include/ATen/ops/_sparse_log_softmax_backward_data.h,sha256=-f-U1hy0f10rd4pyZbKsQtvvn9X-h3I7hhhZ-IJPnw0,1700 +torch/include/ATen/ops/_sparse_log_softmax_backward_data_compositeexplicitautograd_dispatch.h,sha256=MSFjlT3B14wEGoxznxPm5tgApZYCwmgXuIG-xfn0izg,1065 +torch/include/ATen/ops/_sparse_log_softmax_backward_data_native.h,sha256=ngckj-dgjBJMBldp8O9T6tLq0bp4xN0boeW4rBaaSkY,902 +torch/include/ATen/ops/_sparse_log_softmax_backward_data_ops.h,sha256=z8mxUsfkJMNHy1MSwX90iShhqMalbwO5K6mTIrab8Rw,2226 +torch/include/ATen/ops/_sparse_log_softmax_compositeexplicitautograd_dispatch.h,sha256=hPFxbead6hMjkXtDX9CahezrpBKolM8DcwDA6VRh-tU,959 +torch/include/ATen/ops/_sparse_log_softmax_compositeimplicitautograd_dispatch.h,sha256=BZC_zTYchTJuPMmPdR50_qCMzGBf4cvOcAn7ETeawK8,982 +torch/include/ATen/ops/_sparse_log_softmax_native.h,sha256=yAe6u2N3QQlBh_knrN-3S6_gRmuOo9d0OKjAbMblsR4,1025 +torch/include/ATen/ops/_sparse_log_softmax_ops.h,sha256=BCpJhxjEuQqoseVVGQ90mZGg58rk-6dngzvBjwrFB5M,3443 +torch/include/ATen/ops/_sparse_mask_projection.h,sha256=ILhVaFtCxWAs84KGzRfdacEov01LcIXWLdS5S7XWBWY,1234 +torch/include/ATen/ops/_sparse_mask_projection_compositeexplicitautograd_dispatch.h,sha256=2WJL30FoTJjCjZgIv9ZCRVirrPSeGmHg5GhHokJh0Cs,1007 +torch/include/ATen/ops/_sparse_mask_projection_native.h,sha256=rIhal0x-2O3u5SGUp_csXpYI3OIMRAmwG7Z-LzXMfTc,693 +torch/include/ATen/ops/_sparse_mask_projection_ops.h,sha256=Wt1cekVcrRiEJ3x6xmD0vq8DWEBJpIlTM11JcdSsKzI,2032 +torch/include/ATen/ops/_sparse_mm.h,sha256=wekZnVOMDWtoEMl89qLL4Q1IHxnYrUCrtYHWpljWI-Q,942 +torch/include/ATen/ops/_sparse_mm_compositeimplicitautograd_dispatch.h,sha256=5j2KIVy9W-sE7_AlMZaUqu9_bCfhRe5cAU_yq82DNoY,907 +torch/include/ATen/ops/_sparse_mm_native.h,sha256=jzJJn_oC9KyLBR1cmF3Ix4PpWlTBlrBNhlx7XcrgB1s,619 +torch/include/ATen/ops/_sparse_mm_ops.h,sha256=7XHCZ007htw2sSXPXYnIdJeTFQdyKxuKdhV11GPfuLQ,1808 +torch/include/ATen/ops/_sparse_mm_reduce_impl.h,sha256=xNJty1RoIoYMkjS_HwsI6Ele_1gXcvST61XjWqt3RsA,805 +torch/include/ATen/ops/_sparse_mm_reduce_impl_backward.h,sha256=0THpgWowWmqMH3iaS1TRfx9pKi2R6XDFHKgiaBLtZJw,1021 +torch/include/ATen/ops/_sparse_mm_reduce_impl_backward_native.h,sha256=s-luHuDfdolpjeq35ZBmORssLdTPRMOjqKXfuN5Rdsg,684 +torch/include/ATen/ops/_sparse_mm_reduce_impl_backward_ops.h,sha256=jMSBdYibUSiHAopqczdhZwKbb5wjqMDvVQRavmTIInw,1594 +torch/include/ATen/ops/_sparse_mm_reduce_impl_native.h,sha256=oxMRDp1D3fDv-HE5AkMJ2fcYLX7XiIC0ejoojd-XNEU,583 +torch/include/ATen/ops/_sparse_mm_reduce_impl_ops.h,sha256=mxBmr3msR5W7REa6M1gaznLeDuO9l5kFhAut1VmL2no,1266 +torch/include/ATen/ops/_sparse_semi_structured_addmm.h,sha256=b7nCId94ogGC90457fizhh3-XrqcmxuyQipDuk8IxXQ,1055 +torch/include/ATen/ops/_sparse_semi_structured_addmm_cuda_dispatch.h,sha256=yfgeOlwtwVa6t6B4ZUF8LDm5WUJ9lCq30QYR0qw1oCA,939 +torch/include/ATen/ops/_sparse_semi_structured_addmm_native.h,sha256=82yftHzOTpwfxPE2s4RR3RbNbQcBw2GgyhmmUKZWGV8,693 +torch/include/ATen/ops/_sparse_semi_structured_addmm_ops.h,sha256=Czi4tzl-eLoCuYBqRWaYYpnYkzJNp2sOOxXdUFR1eLE,1626 +torch/include/ATen/ops/_sparse_semi_structured_apply.h,sha256=boNLTb7NpU3Vib3y1hgDJSa4Eu6ob0HN4z35x0nl1Wo,812 +torch/include/ATen/ops/_sparse_semi_structured_apply_cuda_dispatch.h,sha256=9N8eXvAAbuHB-OKGeCIIu9d3actuwXF9K9E9N72SWqQ,804 +torch/include/ATen/ops/_sparse_semi_structured_apply_dense.h,sha256=qIxckLoPwH9DOSw_HBCFi5P5rb0YCyXehV9lIbKDISk,801 +torch/include/ATen/ops/_sparse_semi_structured_apply_dense_cuda_dispatch.h,sha256=QCLgDQlaTESjRUYxB2tAYC6ILBAhHL34SGKJdDYeqmo,785 +torch/include/ATen/ops/_sparse_semi_structured_apply_dense_native.h,sha256=HqSu3lG7MY7lbcT2neexrgzOGfqIKNRR7_MM-NUAzpk,539 +torch/include/ATen/ops/_sparse_semi_structured_apply_dense_ops.h,sha256=Y4pCtl80bz1bOcgqLcwir9NPnzSzq5lqX4gxeCYMq0k,1164 +torch/include/ATen/ops/_sparse_semi_structured_apply_native.h,sha256=93RFs97FX5gFXytRN2CdQUYTIkvPmIviRXLswb4iyRI,558 +torch/include/ATen/ops/_sparse_semi_structured_apply_ops.h,sha256=SwFKPvh4kD1KGnpQ7O0bugPgFJGeHUBW3we0GGlO3SI,1231 +torch/include/ATen/ops/_sparse_semi_structured_linear.h,sha256=BDw0jYOq_HxHpN4aAjPdOJzf1u1BeL6h5yX2vUDzMSI,1072 +torch/include/ATen/ops/_sparse_semi_structured_linear_cuda_dispatch.h,sha256=6pn1Jc1_lG9dy20VmMeTqbQ4VppJO1i4nxm4mx-h8B8,963 +torch/include/ATen/ops/_sparse_semi_structured_linear_native.h,sha256=kcngFTu5kX5icSIjydZ4m6PqXN0WVnwj24lS2Cn_eDA,717 +torch/include/ATen/ops/_sparse_semi_structured_linear_ops.h,sha256=RFKhcX0cVw_nS3koNUWonRuCgRRUkKvy7QLla8kReaw,1653 +torch/include/ATen/ops/_sparse_semi_structured_mm.h,sha256=Sjp7iQ39ZOyBTp66B7tQEC3DxyVowrjdx_7rngU9Dec,897 +torch/include/ATen/ops/_sparse_semi_structured_mm_cuda_dispatch.h,sha256=uEd6LmDafSXsop86lHP49xMqVX7R4GwLr3vlsZxh59s,855 +torch/include/ATen/ops/_sparse_semi_structured_mm_native.h,sha256=iL0WJtr_sDSYey0BQF3PgCJJXIroIGonPD_Gfd1cAGo,609 +torch/include/ATen/ops/_sparse_semi_structured_mm_ops.h,sha256=E-o3FMcfqPvQkWYOiBerrWWJ24t8-3DqEOz9cULm5RI,1358 +torch/include/ATen/ops/_sparse_semi_structured_tile.h,sha256=5NmWvlAMNPie4ifq1sREE5VNnJ_MI6Bmi-IzLAiiPzI,916 +torch/include/ATen/ops/_sparse_semi_structured_tile_cuda_dispatch.h,sha256=LvNes56locG24C32tN2-k0GeUouxhOypBlHzx6G2oa8,857 +torch/include/ATen/ops/_sparse_semi_structured_tile_native.h,sha256=EZhkqb08iX9eDYPnVjVEx0OsvPXC8586NqGOJ_l6Zk8,611 +torch/include/ATen/ops/_sparse_semi_structured_tile_ops.h,sha256=1LV58ThUjB775wCd_9p3doHn-xswGtWJhtNJHlTWXII,1403 +torch/include/ATen/ops/_sparse_softmax.h,sha256=Z825sv6vNTQTuulvXZ2a6PdtxmnfWGsHUNWTUTJuG74,1923 +torch/include/ATen/ops/_sparse_softmax_backward_data.h,sha256=yEMYC6kSsh44GZ3_2PW8N9dJ-ER4VhaJVjsjkuDio1E,1660 +torch/include/ATen/ops/_sparse_softmax_backward_data_compositeexplicitautograd_dispatch.h,sha256=pK9L-SCFibpejshNAes-ep8Gg7lEFD4YuO9vY66Fcc4,1057 +torch/include/ATen/ops/_sparse_softmax_backward_data_native.h,sha256=vekJYLeUuNcyUsunkKP_kFGG7gnJ9MTipef6a9qR06M,890 +torch/include/ATen/ops/_sparse_softmax_backward_data_ops.h,sha256=ooW8oxQ9WJyM82iUYcW5t-Hc02WRcldvONRbGVBNUgo,2202 +torch/include/ATen/ops/_sparse_softmax_compositeexplicitautograd_dispatch.h,sha256=CAI5KiocSavdh38pDa8PD37YZwBRx5NnL_jj3RZ8hg0,951 +torch/include/ATen/ops/_sparse_softmax_compositeimplicitautograd_dispatch.h,sha256=aewpMWURX5ZylN4hbtiyZf0HbEfVp8xPI-hLxOwlfXY,974 +torch/include/ATen/ops/_sparse_softmax_native.h,sha256=AYz-qZHr7EmJdvhMUwQdK_CMWUo2GrmNGaFbo3N7TSE,1005 +torch/include/ATen/ops/_sparse_softmax_ops.h,sha256=YELPuCSd518bZSsaxrQ16qg5uDKkKalsMyLig9Vd_YA,3395 +torch/include/ATen/ops/_sparse_sparse_matmul.h,sha256=zEY3TEyItE4u5cREsLUSLrTKbTc-GTVUFw1txcDe-1I,1295 +torch/include/ATen/ops/_sparse_sparse_matmul_compositeexplicitautograd_dispatch.h,sha256=BG_mISTQJ1_3dGvtKunqxjIm0PY17tRazzT67aBNEgg,949 +torch/include/ATen/ops/_sparse_sparse_matmul_native.h,sha256=XrLYtiicyaE58VAMNEszgBPLIQ5Gruu4SVYITEAiDJI,738 +torch/include/ATen/ops/_sparse_sparse_matmul_ops.h,sha256=4G-UkMZG7kC2vh4H-onhsBBf2zUfIdo3XCkUxlXhFfU,1852 +torch/include/ATen/ops/_sparse_sum.h,sha256=RtGSqMWovwXOV4RjBjGtqofhle1SznnHQ64KMJtgHhs,1812 +torch/include/ATen/ops/_sparse_sum_backward.h,sha256=i-VVuEBSK07Odly09TYt3JGZAznxKF08jSEKJ-bo5lY,1387 +torch/include/ATen/ops/_sparse_sum_backward_compositeexplicitautograd_dispatch.h,sha256=-ZHZZGrACRdFt9RXETr9wPG7c2nGthaffhhpiQ8oQlk,987 +torch/include/ATen/ops/_sparse_sum_backward_native.h,sha256=AocOLb2vBwLL9DgIhTRIggjWjKMCmHr6nZBl_knDCuc,797 +torch/include/ATen/ops/_sparse_sum_backward_ops.h,sha256=XV0rqNvzgrEVeiWi6CH-RecDNw0bCO6FLflwxLKfixM,1980 +torch/include/ATen/ops/_sparse_sum_compositeexplicitautograd_dispatch.h,sha256=-uMzTNOdApGGoqSAxhIpPcbPY0jm2I0eBG4w6JtzQf0,999 +torch/include/ATen/ops/_sparse_sum_compositeimplicitautograd_dispatch.h,sha256=hxbc6Mn0RoBi7lPxx-DicjlKRXz7dALEIkH9oeWzYEo,952 +torch/include/ATen/ops/_sparse_sum_native.h,sha256=5jit57UVkhup7OVIMvhAToGhrAafIobORmA3Urlajko,852 +torch/include/ATen/ops/_sparse_sum_ops.h,sha256=Alj_thgbdIe6zDbyr4weiEafJf5oGeX2YWet4sAn7s4,3708 +torch/include/ATen/ops/_spdiags.h,sha256=8sSLAwbiBDCHUHEIufBqONcNyPXq3q3J0B77Lvdmp6U,1582 +torch/include/ATen/ops/_spdiags_compositeexplicitautograd_dispatch.h,sha256=7Rflpmnf4HZ1ZYr30DmKXyG7fc3JHv-22GoYg6EnVzk,1070 +torch/include/ATen/ops/_spdiags_cpu_dispatch.h,sha256=PbUFKZ6z7o5JMVi57kq8AcJyJj3oHaND98NlOLYUUNs,829 +torch/include/ATen/ops/_spdiags_native.h,sha256=zg_Gh9M57znJeCPpX-fE7pDLaBRH4TYtz84ED9DA3S4,756 +torch/include/ATen/ops/_spdiags_ops.h,sha256=OJLiMa8YawKOGdabgFByb_EFXgLDEDodAhyQvTMyjMs,2212 +torch/include/ATen/ops/_spsolve.h,sha256=_pz2jUQxPCdp_b-vxuSOrx_sqQUy1MlQ_jDJ22SzJzc,689 +torch/include/ATen/ops/_spsolve_native.h,sha256=DMecf5sGDt9h_CSwHHTY0Y2Rw_5XIwAgDGu1jGwJrns,529 +torch/include/ATen/ops/_spsolve_ops.h,sha256=dtLAeMwpYbVdjAUtDaZwr5_YgEN6alXL4RPXs1bhSNY,1085 +torch/include/ATen/ops/_stack.h,sha256=Cvyu0khNNCOS-fZgg0CFspsVjWKMd60P0kwzCBwV_L4,1116 +torch/include/ATen/ops/_stack_compositeexplicitautograd_dispatch.h,sha256=XPmvbCkXjicSsxv5hT7byCXYEVLWLbP-GpbR8sd483o,961 +torch/include/ATen/ops/_stack_cpu_dispatch.h,sha256=gyMVPfnMJFNwIesKmE9NCvaWnGf0G671tK4rfjbUgQ0,917 +torch/include/ATen/ops/_stack_native.h,sha256=2haJpjEPorP9tzUwRyUJjV7kO6r-byZKoNzoKeise9Q,746 +torch/include/ATen/ops/_stack_ops.h,sha256=tVNv5tNiHcPlt5j_0ywItOEljBkIU28_XtrT_mSYm08,1680 +torch/include/ATen/ops/_standard_gamma.h,sha256=MxhBtpmaN9WOu1k9_sL3G6gtv8Lr4ghyjnrEekgdbTM,1364 +torch/include/ATen/ops/_standard_gamma_compositeexplicitautograd_dispatch.h,sha256=9OJH8Di-WTnXQOft-tbnUThpyj81XBA_0EsYCEdIM9U,984 +torch/include/ATen/ops/_standard_gamma_cpu_dispatch.h,sha256=OjEKytZd5zc_742IRJz83jz04fXOwyzDxBGcNFEok_8,786 +torch/include/ATen/ops/_standard_gamma_cuda_dispatch.h,sha256=MZc6fPC5Lsk_hBg51WMqXTC466G7yKzS65qngyIKFzY,788 +torch/include/ATen/ops/_standard_gamma_grad.h,sha256=IwrMlRALNsErgra3KzTGjKr0mzdYdaoL5URbJiTN1k4,1294 +torch/include/ATen/ops/_standard_gamma_grad_compositeexplicitautograd_dispatch.h,sha256=2xR6UnXRYLzFRExycBln4i7-yD52j-TPLmLrCmAjy9Y,949 +torch/include/ATen/ops/_standard_gamma_grad_cpu_dispatch.h,sha256=xsLiDQmTUeVhfMZKQaEHdUcSo_cZ1frj5GG2nJOOfmc,761 +torch/include/ATen/ops/_standard_gamma_grad_cuda_dispatch.h,sha256=c39AgyrLB-xOC2-4VsfkvmKeVupoapqZ0sB5ydlR8fo,763 +torch/include/ATen/ops/_standard_gamma_grad_native.h,sha256=mkKucfTVHOqUgo1PHIjdFoXx1uLfESDHnLxPWP9ciFU,740 +torch/include/ATen/ops/_standard_gamma_grad_ops.h,sha256=DD4iXAJ66_BlOo47_xPLNL0JaL8fCE1fUGrzR8hEDlU,1852 +torch/include/ATen/ops/_standard_gamma_native.h,sha256=GdE8XkYlMmumIObiAxawAjl34bwBE5T4FhE4dyv1Wcw,786 +torch/include/ATen/ops/_standard_gamma_ops.h,sha256=1gFvnq-om5Y7jCrvz8OUFWcIB5h5GLbW3xCQSlDJ8x4,1930 +torch/include/ATen/ops/_test_ambiguous_defaults.h,sha256=hrRRIOzidk4K1r9fsHJGIOvF9Y4xP2EtB9jbDUKITWU,996 +torch/include/ATen/ops/_test_ambiguous_defaults_compositeimplicitautograd_dispatch.h,sha256=47byKGQMPtyd3SUg4xtKqkffe-SqIc6D4dvhSJpYRcg,913 +torch/include/ATen/ops/_test_ambiguous_defaults_native.h,sha256=G8RggWiTAtIuHa6LmLPrC1Lx1uzPLo6nQ2qZINKB0Mo,631 +torch/include/ATen/ops/_test_ambiguous_defaults_ops.h,sha256=vCUCEcpB7LV228_Lc-dj_pm1quDzt78IfEpxdq3Xows,1815 +torch/include/ATen/ops/_test_autograd_multiple_dispatch.h,sha256=zybFhffwBmX9kjNiNtNHqxCzqZEQWeZjPslRLfy5omc,1588 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_compositeexplicitautograd_dispatch.h,sha256=_4SgReFkok3NlUaXE70PKKcCMwOBlAdOgs9Ok9qjmvY,999 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_compositeimplicitautograd_dispatch.h,sha256=u1fU6MDlxVnjGEaMWEud_5l8DC86P_8IfkqetI0loQM,798 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_native.h,sha256=KY_U0z1UDcFiCcSj55Ljc-ut847uosiI3EqHYK71mPA,727 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_ops.h,sha256=Pg9niWFkQvTctSH1tvIueMNxGhxCN0aLgKFe_cWEKMY,2484 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view.h,sha256=SfbXAvv8RIO_RSfo-x_9Gvj8lqUwheZmlhb2aQOzXgI,744 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_compositeexplicitautograd_dispatch.h,sha256=ShG32Y8O22F1mikUvW-HKUu4BQ2fvcltU6jT-TJl-K4,795 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_copy.h,sha256=RQGBqbazQ3WDBxOra8Gzz5XBif9EvQyq9tewiGIPFSQ,1364 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_copy_compositeexplicitautograd_dispatch.h,sha256=0B3WYKdaoOO9F0gmsprXOafJu58zDJDtSlhQGZbPM-w,939 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9_LCvXLK5y8zBux6-buoGdOjW4SkXVTI0_j4CPZQLXE,826 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_copy_native.h,sha256=NRCGaF9VqWe-ytCmYU0jmqRuehyKW8I3c57TzjTgD9I,626 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_copy_ops.h,sha256=Ym_NgGDQ70xd3q6u0Ex6Kgw0GuhyvBmzrR-Xj-kuUvQ,1806 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_native.h,sha256=faXJkTR3nGFa8sWHrgvt7Sw6cor0Ny0EA3KB8B8PUjQ,507 +torch/include/ATen/ops/_test_autograd_multiple_dispatch_view_ops.h,sha256=9WwjKbjWzT7Hf7FoN26MPDSvH_xgWTC5kkcxzF5j-GU,1066 +torch/include/ATen/ops/_test_check_tensor.h,sha256=9v29DvEbzOF-HkCnPktMm_58i_m6AHltlVQvnfISqOU,662 +torch/include/ATen/ops/_test_check_tensor_compositeimplicitautograd_dispatch.h,sha256=E8I0_VWxQ1AAjGOdFIvKJx1V_NNhSOo1ur_jfI3J2_A,776 +torch/include/ATen/ops/_test_check_tensor_native.h,sha256=FZdKhy0cD9P4AcWD7Ri5HuAsPrXV8-PAiz6eKq4Jk8E,488 +torch/include/ATen/ops/_test_check_tensor_ops.h,sha256=q4EystZZPVsoP3OcHnDjVbAg2IWAG_Qyso153XgAYW8,1003 +torch/include/ATen/ops/_test_functorch_fallback.h,sha256=udGjBjmZnoablaAF6A66zzSgsC7H9cj7l6GljX0nAoM,1325 +torch/include/ATen/ops/_test_functorch_fallback_compositeexplicitautograd_dispatch.h,sha256=92mowvVjVXdV8L3vPlGYdJFOqcBgqNfVEeYWzjzlo-8,955 +torch/include/ATen/ops/_test_functorch_fallback_cpu_dispatch.h,sha256=KtazLBaKD5akwda0xUfVXVTjrraI7P0jqykNg1ySnG8,764 +torch/include/ATen/ops/_test_functorch_fallback_native.h,sha256=8GTojP0hZEmmKUnT_aIAR5zr9PZWh9WMDaacF9XbCyQ,642 +torch/include/ATen/ops/_test_functorch_fallback_ops.h,sha256=-47Trb3dIEmwG4yoOVN50NhHWgY7uDZtICfrr3o691U,1870 +torch/include/ATen/ops/_test_optional_filled_intlist.h,sha256=uZSe4wr_iYKdVL06mUUz6gYea6c3IbM7pwxyCdIWeWA,1429 +torch/include/ATen/ops/_test_optional_filled_intlist_compositeexplicitautograd_dispatch.h,sha256=qr8tdDam2XUh6D4Mz-H7zK-qhweREUt5EnnfhJbTh1E,983 +torch/include/ATen/ops/_test_optional_filled_intlist_cpu_dispatch.h,sha256=273YyCcDdwyN39oHF22kHPOrhb5Z_M3hoex2VkXtBRA,778 +torch/include/ATen/ops/_test_optional_filled_intlist_native.h,sha256=1CC-1J13yvzQdSvBk0othPKQRR6T7Tc9nmA-wxPFXkM,663 +torch/include/ATen/ops/_test_optional_filled_intlist_ops.h,sha256=u-Ox2f1OqH707QxYai1N4s6vbzglpnoyjampO-Kw8HE,1956 +torch/include/ATen/ops/_test_optional_floatlist.h,sha256=uW-RA6QzqpwvFf0CmYtfVxWMJr_Sc-z4q9HmaaOXWyE,1424 +torch/include/ATen/ops/_test_optional_floatlist_compositeexplicitautograd_dispatch.h,sha256=kmBhJ59q09URze4QEx9_MbLiru6Ypy0LhW2epM8Aq4g,1001 +torch/include/ATen/ops/_test_optional_floatlist_cpu_dispatch.h,sha256=q4W_UftuacE2giDiICfAZKt6rsvXLcSVMRj9nVPAt6U,787 +torch/include/ATen/ops/_test_optional_floatlist_native.h,sha256=7YYpDoEFhYq9k1C9b4_cLy1S9Bdr8ZW3yOj97ndQF_I,688 +torch/include/ATen/ops/_test_optional_floatlist_ops.h,sha256=2l-BzUblcMTvBF3iA9Y12KEeRJbRptIJwWHuibcuzzw,2012 +torch/include/ATen/ops/_test_optional_intlist.h,sha256=jn9K-6p_a4DJcdlqOtbF72AbdH29JUMwttQtlGpfpVA,1356 +torch/include/ATen/ops/_test_optional_intlist_compositeexplicitautograd_dispatch.h,sha256=eVJi6ab4DkHA1hpRYMU20_i-t6JWl5x4Gwd_3Qlem5c,969 +torch/include/ATen/ops/_test_optional_intlist_cpu_dispatch.h,sha256=4fatPAnfN2rAQBlteXcNtAfobxwhvOhbAGKYnLA6mTk,771 +torch/include/ATen/ops/_test_optional_intlist_native.h,sha256=dJskxa7ELJnLxfjjBNJtYQDhdJ6myKFBGu7LoOn5L0c,656 +torch/include/ATen/ops/_test_optional_intlist_ops.h,sha256=ytTamYcqCS0sl-_j9NAVXp2-hMO7OBirf7XOwI4y7vM,1912 +torch/include/ATen/ops/_test_parallel_materialize.h,sha256=zw57oIXt_WFyURPHy9cTbIqjnEeOezH10lCNsPo-aWw,806 +torch/include/ATen/ops/_test_parallel_materialize_compositeexplicitautograd_dispatch.h,sha256=XE4Hk9a9vPYma_3WtJeaiWF7KjPC5_ZSYAl8ap08b3c,829 +torch/include/ATen/ops/_test_parallel_materialize_native.h,sha256=aByqHFEr_ej7oorX9TRWbrUpkj1zSy0pS9ooZNg9k4Y,541 +torch/include/ATen/ops/_test_parallel_materialize_ops.h,sha256=Qkaxo6WLc2395IzmU0jN8jkhdij6AwugJ0bYMqt3gOQ,1161 +torch/include/ATen/ops/_test_serialization_subcmul.h,sha256=ysnc6XPZlheOKp06-Fz3irbvzGIe-9cRbLypis75Ngk,796 +torch/include/ATen/ops/_test_serialization_subcmul_compositeimplicitautograd_dispatch.h,sha256=Wp0aj5ZRbbyZWS8wtq5talYpwhixe-oonjXzH1etaos,839 +torch/include/ATen/ops/_test_serialization_subcmul_native.h,sha256=mXdkiCSVbgfNj9WjVKyzb7OSV5pZtM5-I34YKLhq8fc,551 +torch/include/ATen/ops/_test_serialization_subcmul_ops.h,sha256=EA4cQCu311LuRKCER1skRoUEq4uhU_PCNKz8Zw32cM8,1204 +torch/include/ATen/ops/_test_string_default.h,sha256=ZSBM8j6e-fyygQ7lYPZ3Dl6-e5Oagp_eL-xI9hUZLXE,765 +torch/include/ATen/ops/_test_string_default_compositeimplicitautograd_dispatch.h,sha256=ykuGwGkSaYvf8c3IFy7ShiYsFUpHg-0gU39GV9vurFc,835 +torch/include/ATen/ops/_test_string_default_native.h,sha256=H7PAeBn9l_ruo-rHRHh3gX-jZueNASft3IxfAYhQrQc,547 +torch/include/ATen/ops/_test_string_default_ops.h,sha256=-lyYiqQHGcce7KwkWznsxecxICjuGElprfrLbCZ96ow,1168 +torch/include/ATen/ops/_test_warn_in_autograd.h,sha256=-mRX0jL2ZI4PPtOXLN8OR1rvnWYZ3WfTC3v8CGH8Q0s,1164 +torch/include/ATen/ops/_test_warn_in_autograd_compositeexplicitautograd_dispatch.h,sha256=tTOb7ZwkeT68ED7U4iQ36i5JV8nc5oae7fZyjgpSvMY,969 +torch/include/ATen/ops/_test_warn_in_autograd_native.h,sha256=aEbLX58hGF0yNaUEWCub5zcGt_c61uqCjUPaeRc-Pkg,586 +torch/include/ATen/ops/_test_warn_in_autograd_ops.h,sha256=LXQsxBTpmqq49IOEQDP2LiId-2Vl-8R5AzCwcbSsDMs,1686 +torch/include/ATen/ops/_thnn_differentiable_gru_cell_backward.h,sha256=X9hnqBNSqrtvxTB9XIy0xl7912iqX-Z-2HgR2rdG-jE,1177 +torch/include/ATen/ops/_thnn_differentiable_gru_cell_backward_compositeimplicitautograd_dispatch.h,sha256=V8AkkPDHv9J3yVgQMNzPIBrbplBOsYWFX9Ru9SX67Xc,1042 +torch/include/ATen/ops/_thnn_differentiable_gru_cell_backward_native.h,sha256=nq7LGsXDmtt8Iy8-XkH8R0GNPxTb40_SbFVo4BLV7i0,754 +torch/include/ATen/ops/_thnn_differentiable_gru_cell_backward_ops.h,sha256=seOUxRIc1Sj02OcgR_4PJO6l0qxucwvSj4qS0FPsYYA,1877 +torch/include/ATen/ops/_thnn_differentiable_lstm_cell_backward.h,sha256=Wiacd9koA3c7t8oxRT80Y6StDMLg8hVOZg-Kw0Ab1DY,1308 +torch/include/ATen/ops/_thnn_differentiable_lstm_cell_backward_compositeimplicitautograd_dispatch.h,sha256=NHeGssyYpSOd8iW5-vTxf8Nv717NFZ09r0wGcDJPFKo,1128 +torch/include/ATen/ops/_thnn_differentiable_lstm_cell_backward_native.h,sha256=GM7irVmgvyvsfYjcZt9gxMfZSKUJg2zM2YVUIyayy0g,840 +torch/include/ATen/ops/_thnn_differentiable_lstm_cell_backward_ops.h,sha256=TAYRn6WClcwgdlMs7E4ouIlftA8X4m95OQMKybz6weA,2153 +torch/include/ATen/ops/_thnn_fused_gru_cell.h,sha256=5uSfITWweN7X0VHs-LsjRJ-LHQAyrjwvXRtUP1Vj_VY,2263 +torch/include/ATen/ops/_thnn_fused_gru_cell_backward.h,sha256=cb1k1y0bPfB-BToJsaEMazHHwovtW4hBCeFxuNKuKso,2224 +torch/include/ATen/ops/_thnn_fused_gru_cell_backward_compositeexplicitautograd_dispatch.h,sha256=pliN6RJ8qCKuNQFrlK3ihS6popLP7Qas9Ir9Hxs7lsE,1295 +torch/include/ATen/ops/_thnn_fused_gru_cell_backward_cuda_dispatch.h,sha256=8HjHcxn6nL6fJrElSvtigF-RRAilTePjybsy8ll9k0U,851 +torch/include/ATen/ops/_thnn_fused_gru_cell_backward_native.h,sha256=ZlM6BijtPOX9GqwHpaNOs5Hi8ujQBEZW8e_9vGMgPOY,902 +torch/include/ATen/ops/_thnn_fused_gru_cell_backward_ops.h,sha256=khhs5hOygQqbaOKp5mkVGtjS3yaL_3O_nNWFM5luKS0,2779 +torch/include/ATen/ops/_thnn_fused_gru_cell_compositeexplicitautograd_dispatch.h,sha256=HohqvDNSWsfTLfReqnuRGwCf7J1eCc_X06t1g0AkO_A,1315 +torch/include/ATen/ops/_thnn_fused_gru_cell_cuda_dispatch.h,sha256=_hbLys3SLktulDkF_Y1qNuiqQm32y8OcgkepCTiECoQ,927 +torch/include/ATen/ops/_thnn_fused_gru_cell_native.h,sha256=LxKpLO-wfEc5cFqmHgoWrNuFwdsjXV3j_JuANi9lZKQ,985 +torch/include/ATen/ops/_thnn_fused_gru_cell_ops.h,sha256=FCZS5GOJ6wRwA3CLMB26ww0_cAo0-chN3MTglnLAj8o,2974 +torch/include/ATen/ops/_thnn_fused_lstm_cell.h,sha256=sZlHAXTBCeuHL8vF75_G3kork05Bpeh3TFp6X0zh3Uc,2426 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward.h,sha256=pxMqYYF221r8LqjMDTc1UUbVjPFSCSw-tATCzQPilYU,1075 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_compositeimplicitautograd_dispatch.h,sha256=WsdQJEWMCssxvPgFD11M-cE2n-Mv8bsMhnu7eKco7oM,1002 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_impl.h,sha256=uj6rJLQDoxAiRxa1OOgQi9kJu9k_AmYgWK2WOHISRlM,2473 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_impl_compositeexplicitautograd_dispatch.h,sha256=1ty0WCM2UI952dD9jaZ25ICybHYY7nfN7WM6MtvPJSA,1395 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_impl_cuda_dispatch.h,sha256=FJrzXVS32VJxIuvmlChbWqkVaXwUP1gzMSIndjaVjCc,943 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_impl_native.h,sha256=3xccIgEXSlx5W-dsSP2SUbCFgwm23z0N_t-jcXyAnyM,1044 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_impl_ops.h,sha256=naoIPezD1EYTWc8UIQtibgC1xBuGfrJPZkINMkYVtOU,3193 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_native.h,sha256=JpbWkNTdJzin2_rgT_UvRGX19Vs5NyHYHyIchO4l7Qk,714 +torch/include/ATen/ops/_thnn_fused_lstm_cell_backward_ops.h,sha256=Yl4xdvXEfXg07h3pXHpXciRXNckj-00BttSp5FK1HGA,1755 +torch/include/ATen/ops/_thnn_fused_lstm_cell_compositeexplicitautograd_dispatch.h,sha256=b5aoBRWXPqoaJDDFPHc_-5zst2egppFQxfMflgA9Qrg,1381 +torch/include/ATen/ops/_thnn_fused_lstm_cell_cuda_dispatch.h,sha256=jNik6chSYEEGVfSemghHNfwAzcQGGSqTJRG_daSiwrM,939 +torch/include/ATen/ops/_thnn_fused_lstm_cell_native.h,sha256=g7euIgapGo1p4kwP8wJ7r5yGXC-uZf5_DD-s9lNo1aE,1030 +torch/include/ATen/ops/_thnn_fused_lstm_cell_ops.h,sha256=AlwzIhtsNbRU4I3WyGfjSfWP7_6YdR5v2z6V7izAR88,3141 +torch/include/ATen/ops/_to_copy.h,sha256=kzLou8xcpFroOv4NjweVndRQkHt89dQG7zTG1R2n45I,2416 +torch/include/ATen/ops/_to_copy_compositeexplicitautograd_dispatch.h,sha256=lPdzG7mnGboDpIM7vLSTU1mP_1YrEr3Yg1Pzr9WDDTM,1472 +torch/include/ATen/ops/_to_copy_native.h,sha256=vYXG9-S4inX9mpz8k88QHVqrP-n68C1NFHS1wf_wCb0,1181 +torch/include/ATen/ops/_to_copy_ops.h,sha256=fgdRE6B7cIR5FaDvgvcI4ga2_vUHDrK8JWO8nOUItik,2570 +torch/include/ATen/ops/_to_cpu.h,sha256=P1PHm1ns67GBD5XSXEaIc6yS9FkdyhpT7qunVR3JywM,642 +torch/include/ATen/ops/_to_cpu_compositeimplicitautograd_dispatch.h,sha256=-EBNmOaU_6yFQfctFFWNuJN4cjKyq6MleA4NIZe5QCg,779 +torch/include/ATen/ops/_to_cpu_native.h,sha256=pG5Zexz8q4Fc6lyQ1MctwR8j3iJPW08wUL2PI2da44M,491 +torch/include/ATen/ops/_to_cpu_ops.h,sha256=D1D-mmaWnyxhouCFEO_-ltW1FYZ3xourlm2LS7Rl7bU,1016 +torch/include/ATen/ops/_to_dense.h,sha256=bo-f0Ug-_mKYXCd8mUVZFGRgwhxZGDxm7kwOItPqvwA,1204 +torch/include/ATen/ops/_to_dense_compositeexplicitautograd_dispatch.h,sha256=ZAT64IcmJmrbLFB-XloOPAfuHfI0UlQxgqD6dmaeYmY,1051 +torch/include/ATen/ops/_to_dense_native.h,sha256=61rJ9X5ID4vP3mkRu0YRps1xtdBteQaKsjawhJjGYT0,1089 +torch/include/ATen/ops/_to_dense_ops.h,sha256=cAjbQMvHpf_-v1B_7J1r98gCt6eRbdSRbvrOSeAWIUA,2112 +torch/include/ATen/ops/_to_sparse.h,sha256=_AlOKJdw5FibDSJQ3v0_U5YnDJoGqMpfPDdAy5O3dNg,1902 +torch/include/ATen/ops/_to_sparse_bsc.h,sha256=WQhkW_-uAxAMdDc_5M0HE7CuEUjH3UfidC5aZV_vcM0,1188 +torch/include/ATen/ops/_to_sparse_bsc_compositeexplicitautograd_dispatch.h,sha256=fam1VQzSLLCZB3J1pahOgLKtYJf1tE0GNK5DY0cUoh8,1024 +torch/include/ATen/ops/_to_sparse_bsc_cpu_dispatch.h,sha256=blP7DE3uY9tz6im7ZexiFS2BPK6ZU3lNKXTGFESjnxo,806 +torch/include/ATen/ops/_to_sparse_bsc_cuda_dispatch.h,sha256=fIKb3dsD0QpLSEnnjH1qhOaAdUjX7psYxggIMu1-tO0,808 +torch/include/ATen/ops/_to_sparse_bsc_native.h,sha256=4KBK2zVd-AYyQiwAyKFziz5GRl7ntrf31BLxtp7lJrU,1016 +torch/include/ATen/ops/_to_sparse_bsc_ops.h,sha256=HxOPOy1C7CMHoRc-AHFcXl3uFydUYE-eNmS19jBusPQ,2054 +torch/include/ATen/ops/_to_sparse_bsr.h,sha256=gBWX3R6X3y37Yk8pwkiisAMX2rEfUKhyf3C2obIyFpM,1188 +torch/include/ATen/ops/_to_sparse_bsr_compositeexplicitautograd_dispatch.h,sha256=3bHuExdABZIVZNGg0xT4kgMqGVezBIffU4fSrbNuEDw,1024 +torch/include/ATen/ops/_to_sparse_bsr_cpu_dispatch.h,sha256=Le4t3heVElAg3yXv8QMedDHAUOGNotq9-OQJfgUMD8Y,806 +torch/include/ATen/ops/_to_sparse_bsr_cuda_dispatch.h,sha256=nKQmh6kkgl5lpidMrQaT0c3QiVgF_zEed81w-vkYRv8,808 +torch/include/ATen/ops/_to_sparse_bsr_native.h,sha256=ULb-RPfmp6TrPLD4MT9-_ntjAJPDZEfOkoc4UovImNI,1016 +torch/include/ATen/ops/_to_sparse_bsr_ops.h,sha256=In7KgcMzHUy_7nTMazI9oYWTGVx9ld60L4tEf10yA2I,2054 +torch/include/ATen/ops/_to_sparse_compositeexplicitautograd_dispatch.h,sha256=EPyrKRTez9xEKtEcCmMyggR7TO0vfbKfkyX0zYcABj4,1339 +torch/include/ATen/ops/_to_sparse_cpu_dispatch.h,sha256=SBcCDseQjd1QxOINig5bGipQmP24BzJC3HhQjvadL44,954 +torch/include/ATen/ops/_to_sparse_csc.h,sha256=hMf2Fukt7B5Fcq267XpxUm2kahPCug7E5BMcRoYGdns,1076 +torch/include/ATen/ops/_to_sparse_csc_compositeexplicitautograd_dispatch.h,sha256=SQbXYWbGMoIpg8PMyOI2IXOzAW66jGkoPvqTx30rdfg,970 +torch/include/ATen/ops/_to_sparse_csc_cpu_dispatch.h,sha256=IqRFFzFseCoJh2hFAzMpJJVBzxMp7k5lgoLSHqhcBQU,779 +torch/include/ATen/ops/_to_sparse_csc_cuda_dispatch.h,sha256=KLaRadeIl0aQ49EuiB2X2-WuDPo3vqtWje0tAQvkAuI,781 +torch/include/ATen/ops/_to_sparse_csc_native.h,sha256=yhzoNsarm1vrkSqIM4rnsC3FQkWo9a2JR5VJo0MSkZg,908 +torch/include/ATen/ops/_to_sparse_csc_ops.h,sha256=01GBwgNvhh2sYhmotGY1mjgQd8_F_2Pg-6Ex2xS67r0,1876 +torch/include/ATen/ops/_to_sparse_csr.h,sha256=czx9HRSSVtIWpeoDopD7I0CmR7sZgf8XHpgYin4A_Qw,1076 +torch/include/ATen/ops/_to_sparse_csr_compositeexplicitautograd_dispatch.h,sha256=7uATwNV0f5Ddc5lXsYienkBuHlUHsKaqArO8OiGbz40,970 +torch/include/ATen/ops/_to_sparse_csr_cpu_dispatch.h,sha256=9y7osDwlpsQAOgAqQ5noFhBFVmfPKf0VIrgJdfm91KM,779 +torch/include/ATen/ops/_to_sparse_csr_cuda_dispatch.h,sha256=ksNj7dh79crqynpIOsvXgcy832JZFDqrw-IaKmwFgSs,781 +torch/include/ATen/ops/_to_sparse_csr_native.h,sha256=Gd6d3_XWzO2I3B_h7UMdtw-uskfrFQ-N7JQP5ZUULcY,908 +torch/include/ATen/ops/_to_sparse_csr_ops.h,sha256=mDXBzMewnQQSpSLu8MQffdJpb5B1nxC8EMSpLtm97fY,1876 +torch/include/ATen/ops/_to_sparse_cuda_dispatch.h,sha256=yIU_SizcV7c9oWajtExEsoGLzs-Ph2IlCtAMktuKCCk,956 +torch/include/ATen/ops/_to_sparse_native.h,sha256=1cl1V0zYiPXuLzEhc1TG8ozpEnhD_ZtMFNUvys_vgKs,1652 +torch/include/ATen/ops/_to_sparse_ops.h,sha256=mUkZMbtCuP67Y2RjF7AbtlM3vBPInZFQSThKZMHYt-0,3712 +torch/include/ATen/ops/_to_sparse_semi_structured.h,sha256=agGz9uVL0iZWBbY11oN13jtVNeUDjq-wOV0YGaHrZiQ,732 +torch/include/ATen/ops/_to_sparse_semi_structured_cuda_dispatch.h,sha256=fuBR9LuFkYTnI9RMXfM_oZXyAnYtbbZ0K1F8030-BA0,768 +torch/include/ATen/ops/_to_sparse_semi_structured_native.h,sha256=-wCaWb9t7ih6goIWqlnsmlkGRpYpoWwVizFTkRqQQq0,522 +torch/include/ATen/ops/_to_sparse_semi_structured_ops.h,sha256=cXZ7KKgUEnHo9Pxu6XY97AlnkOczyfJYMigcgP-gZZU,1115 +torch/include/ATen/ops/_transform_bias_rescale_qkv.h,sha256=rGLjXbiv_qK6IPcMs5dowwOw8h01RX7JNWb7qBIzOlk,1868 +torch/include/ATen/ops/_transform_bias_rescale_qkv_compositeexplicitautograd_dispatch.h,sha256=xST10llBXdLyPiW3bmxHbESKM2zndrNbf3uOM65BOHY,1161 +torch/include/ATen/ops/_transform_bias_rescale_qkv_cpu_dispatch.h,sha256=BcWO5Q2hcIw00yN3xIuT0gkkX-j-2q_k9l-eqTgJ5U0,824 +torch/include/ATen/ops/_transform_bias_rescale_qkv_cuda_dispatch.h,sha256=HlM9SZU5PEIeSefcEVD-QZATP5m5NxkpgC4lssLFbqQ,826 +torch/include/ATen/ops/_transform_bias_rescale_qkv_native.h,sha256=sEdanw4EAlrDpjclyPPkVM-2tHfhvYYY4cixzeHefpg,970 +torch/include/ATen/ops/_transform_bias_rescale_qkv_ops.h,sha256=rSokP-aJdCh5OAbxBi7ngfdYEVeVHAXv-wygcOQHXv4,2437 +torch/include/ATen/ops/_transformer_encoder_layer_fwd.h,sha256=Lo8L54FGDmRwS7JMswU3rhZEUM6o1BQaw0OToxowWjA,4610 +torch/include/ATen/ops/_transformer_encoder_layer_fwd_compositeexplicitautograd_dispatch.h,sha256=VNeaKUWLSxqN8VuxDFQETMbUGC7vle0bOWdcxDqRxok,2015 +torch/include/ATen/ops/_transformer_encoder_layer_fwd_cpu_dispatch.h,sha256=Ud2yGJcp15VV9IpwyRzxKNMt1_8nWvgIHF63Eh_ahao,1303 +torch/include/ATen/ops/_transformer_encoder_layer_fwd_cuda_dispatch.h,sha256=nHbccHpB65qoAnJFr4fafUEe6r25uX5CJAnF4shhQkY,1305 +torch/include/ATen/ops/_transformer_encoder_layer_fwd_native.h,sha256=KKohB7vuIFJhmPR_mADpO__DWycyWQ37VyetBMSn3Sc,1705 +torch/include/ATen/ops/_transformer_encoder_layer_fwd_ops.h,sha256=zs0p7WNXv72i7v9XQVDxpjQ_yd9VM8Nnjoba4Q8NImk,5280 +torch/include/ATen/ops/_trilinear.h,sha256=V3iUUqcVVvlSPi20yUlrFmwvX8Zes90q7W7Xvre91D0,1987 +torch/include/ATen/ops/_trilinear_compositeexplicitautograd_dispatch.h,sha256=h4OFRkJpe-f2PPOHOBxmhEstKlJLY06dhn-fT7ij8Po,1203 +torch/include/ATen/ops/_trilinear_compositeexplicitautogradnonfunctional_dispatch.h,sha256=MofkLAAcO-AxDBQLVW4wrhHwI8xsnREFX1HpFHNcfRo,959 +torch/include/ATen/ops/_trilinear_native.h,sha256=2DI_DU5RCu8OfjN0oDGdhShe9x6g6XnoG5lmNDcBT3Y,890 +torch/include/ATen/ops/_trilinear_ops.h,sha256=8o23MDDPtpbQvZMLbnqaryJUDSjh7iwxWCx3-mEsql4,2694 +torch/include/ATen/ops/_triton_multi_head_attention.h,sha256=tTf3e8_Hx64KdgEFi_XCr9sPeIkXQmBrzhzQJRc3s0M,2691 +torch/include/ATen/ops/_triton_multi_head_attention_compositeexplicitautograd_dispatch.h,sha256=1HnNA-rZks7QiEOTrczhNBk44OlXpbwQL476uj77mmc,1418 +torch/include/ATen/ops/_triton_multi_head_attention_cuda_dispatch.h,sha256=DAl2gI5EpPYhnp7wwgwO0v6bLGgtTDcO2dSstYgAzQw,999 +torch/include/ATen/ops/_triton_multi_head_attention_native.h,sha256=khrXr5Lmhx__s4IFDyjyTNlTkAdRpb9QkYnBJYio12k,1104 +torch/include/ATen/ops/_triton_multi_head_attention_ops.h,sha256=4cAp8PChMaccpOo-oiGbR7JSB79bp8-RB_9_BYz0_vg,3378 +torch/include/ATen/ops/_triton_scaled_dot_attention.h,sha256=ag8wTmQzWanN_kMQLfKHFaVmZbEhuNFlT9p18DqF77U,1565 +torch/include/ATen/ops/_triton_scaled_dot_attention_compositeexplicitautograd_dispatch.h,sha256=vz8X1LBijz7MhuCMP1lqfWfP9LSDjC8HpJFjhh6N75g,1033 +torch/include/ATen/ops/_triton_scaled_dot_attention_cuda_dispatch.h,sha256=bDy9UkXTBmbkpbGFjHO1J9vUjhfvFQ2RyEK06rj7PoM,807 +torch/include/ATen/ops/_triton_scaled_dot_attention_native.h,sha256=KB0j40GD3RzFH4QeM_vCdPhGpcwM24KO86jiqHUCGYg,719 +torch/include/ATen/ops/_triton_scaled_dot_attention_ops.h,sha256=oO6bObXAXfERTE_do0_gvNO7KKQdk8ESGCLalx5zlAQ,2130 +torch/include/ATen/ops/_unique.h,sha256=kNDdgBGoCTemlCAU6k2Sx-6ha_6GfJrO-gDSM0l3RVc,1552 +torch/include/ATen/ops/_unique2.h,sha256=UVx0BP12YLH3eabF8TYi_HSGSeUSoCRqmnY8-wpGK4U,1910 +torch/include/ATen/ops/_unique2_compositeexplicitautograd_dispatch.h,sha256=RFxgYNv-dkWh-NMPvBuc9zOZ_Y2pkzqj3_7a1NdL638,1154 +torch/include/ATen/ops/_unique2_cpu_dispatch.h,sha256=SBLWdFoYgOvUSkjW-f9d-4MP5yfZJImBiL-3dJ5cIjs,829 +torch/include/ATen/ops/_unique2_cuda_dispatch.h,sha256=JPmSRM4Dy00a2hdVIPLjkhtMopQqZNePESg8t2dlW1c,831 +torch/include/ATen/ops/_unique2_native.h,sha256=-iYnfzSMjj0YHHzbn4Zku5fp-AHoRoXY4-VLCtWgAHI,970 +torch/include/ATen/ops/_unique2_ops.h,sha256=cpOGx2tY1qhyiOWsj4DChsk_aoI-sWpy8pMEOLgRIr0,2409 +torch/include/ATen/ops/_unique_compositeexplicitautograd_dispatch.h,sha256=WHmU251GvT2zvtZbiWnp3rSjh4X-KuOivcGoUw8JFgA,1042 +torch/include/ATen/ops/_unique_cpu_dispatch.h,sha256=hp3VmTSL8ofYTOcxEeO58D6QPP7_e_qYYhjW19vrrRg,791 +torch/include/ATen/ops/_unique_cuda_dispatch.h,sha256=Lj3bVAgjF7sNcWF8Daz2Q28ETSXVajJO8nGC81mSCoA,793 +torch/include/ATen/ops/_unique_native.h,sha256=KH1X0pYxC-cnVg4OWoEHZlDFEbGhdV6drs1KdpTW63g,841 +torch/include/ATen/ops/_unique_ops.h,sha256=Srpb1gf5HRuHZJEtE9Q9E41n8g39WOwkD4ZEZGDdums,2098 +torch/include/ATen/ops/_unpack_dual.h,sha256=jebqM_89LgLgCwGKbDyfsTtJzBvyMtGkN3Gp0ikMlUI,727 +torch/include/ATen/ops/_unpack_dual_compositeimplicitautograd_dispatch.h,sha256=sIKK7Hdi3-F01RcfasGV7xgAC6vbkdoWehxWmbuMeB4,810 +torch/include/ATen/ops/_unpack_dual_native.h,sha256=E3aQzkZa5BRcVxU81Ui_Xfp1I2CG47RXddLDXQz4xZw,522 +torch/include/ATen/ops/_unpack_dual_ops.h,sha256=euObiPIQLXb-haqujwP4Aok7iTjgfXj0M6g-3yaR8Y0,1141 +torch/include/ATen/ops/_unsafe_index.h,sha256=nbzimOhxsy-dyrrUEAF1NtRp_qjj9HJG1GkpJNITK_Y,740 +torch/include/ATen/ops/_unsafe_index_compositeexplicitautograd_dispatch.h,sha256=3NsCnxhKNw1hpG8xuQEPi6n5289oxPMqnWGfpl5f5Z4,827 +torch/include/ATen/ops/_unsafe_index_native.h,sha256=DPvOhrc2AI1WIPh46zTaqc8OKama5u8eAwJpBdbWjJ0,539 +torch/include/ATen/ops/_unsafe_index_ops.h,sha256=pDkdCfKhe6GTH7AN_WL7-ebMH1P34Nj9WiWrwVYkqGA,1187 +torch/include/ATen/ops/_unsafe_index_put.h,sha256=8kM5pOCTW4dEJ2SMvEMnApn0ZbUdo34DyuL5DaaUZqM,850 +torch/include/ATen/ops/_unsafe_index_put_compositeexplicitautograd_dispatch.h,sha256=H_-4iw21a2Saa8HaMsslOG9Du_TN492cOGHd_ByuvXA,881 +torch/include/ATen/ops/_unsafe_index_put_native.h,sha256=aQTdhdlkFDAbOqebSu7r5OLHvMifT6OXW8clmRoMvao,593 +torch/include/ATen/ops/_unsafe_index_put_ops.h,sha256=-evZoGvlCRzso1ZWd1q4mMkz6Pr5EJ954WFrLRC6-Lg,1331 +torch/include/ATen/ops/_unsafe_masked_index.h,sha256=V7gg596FpFOELl4qOecsAGc3jqnvNTGUAugOq_9kzXM,842 +torch/include/ATen/ops/_unsafe_masked_index_compositeexplicitautograd_dispatch.h,sha256=NpCYdjOWFe5gqhW-2NUIGH4dGhlBgGS1Q61YMbL85S4,884 +torch/include/ATen/ops/_unsafe_masked_index_native.h,sha256=qbiwi3jX10FuDGyH_sfG5duWfKwWsigHU4D7L8jkBbs,596 +torch/include/ATen/ops/_unsafe_masked_index_ops.h,sha256=8JdQnSNagA8bHTgdP5UX3pgACjZD4DHdvN1VsKdiBTo,1354 +torch/include/ATen/ops/_unsafe_masked_index_put_accumulate.h,sha256=9wHctMdLFzG0Jvr_fZCFfxesnKY69pOGBlUCboQoUFA,908 +torch/include/ATen/ops/_unsafe_masked_index_put_accumulate_compositeexplicitautograd_dispatch.h,sha256=iKG9W5zLeBBaUvp7e2vQZTCKSSeIGJuzV_ZJI779c4k,901 +torch/include/ATen/ops/_unsafe_masked_index_put_accumulate_native.h,sha256=xn393XUbsWOLDzes4dxo7bG2irG-Tebgr_Bzey5QBYQ,613 +torch/include/ATen/ops/_unsafe_masked_index_put_accumulate_ops.h,sha256=yecbHHIOZ4h4oUpoJdBIXQmn892wbYU4YgyFZFOcGZw,1405 +torch/include/ATen/ops/_unsafe_view.h,sha256=K1UYebCpcTg4SWsDn4yZwQ59VHn8hxDDVqwIr5_c3k4,3684 +torch/include/ATen/ops/_unsafe_view_compositeexplicitautograd_dispatch.h,sha256=__cRFBu4oiCt8v1aOz1Pu38u8ejj2H5DRi3vr3YWmJA,1333 +torch/include/ATen/ops/_unsafe_view_native.h,sha256=4Z7_zPyuL-_qM6JIt5xCCNuSC0xGMhCLrCIzdnJAzmY,621 +torch/include/ATen/ops/_unsafe_view_ops.h,sha256=103Oj-96TFdcGVQ267hDG5rY-bJ6jko32SHMBfubUiI,1802 +torch/include/ATen/ops/_upsample_bicubic2d_aa.h,sha256=xZXJDpKAk9wKtvspMN5jBUwjXwMBuOPm9hAFXYO_8Cc,8119 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward.h,sha256=6VDkNHv1rPBHEg-_h94Xo25T8w2kdOq_iwhzFYf4Tj8,7815 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=mjgBYTXriMTbvYdmqJ6WlSxm4TQIVQIppE1boDM5kyg,1273 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_cpu_dispatch.h,sha256=RUWLSKGJm7OREtOy7xMf3rEdmeNXKxcdPyXc4Toq3kA,2343 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_cuda_dispatch.h,sha256=1W3iZtie5QdmeW-eX_f7AUMNs_oLaMfS89kkBNTdYlA,2345 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_meta.h,sha256=KEIpH_Fo748zbftpVwMRQoGxVfLkkr4FxUsWNF6VlTc,764 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_meta_dispatch.h,sha256=tvlrACoIP6obBxmAVoneB5sKJs0eDfW3ihpjT1iT8Uw,2345 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_native.h,sha256=-P9R8bMkn3kmxDnbYl6m2tI2EIyCFKlHd6rfAIaa6Uo,1213 +torch/include/ATen/ops/_upsample_bicubic2d_aa_backward_ops.h,sha256=nUy9gNhf7t2GsL9GHJbCR7MS6UyJi0eUsSK_GWLwVcM,2850 +torch/include/ATen/ops/_upsample_bicubic2d_aa_compositeexplicitautogradnonfunctional_dispatch.h,sha256=h1rRskuYn3rT9kK6jEaeOoqIy4YQSzed41wHVIvc3iQ,1181 +torch/include/ATen/ops/_upsample_bicubic2d_aa_compositeimplicitautograd_dispatch.h,sha256=f6s6fJvLezEgR2WVAIJiBig-NpTUKHs3DYzukG7kwNY,1082 +torch/include/ATen/ops/_upsample_bicubic2d_aa_cpu_dispatch.h,sha256=e6A31185Om3JsWahpGgBRY5N3DxdtZTefCRjBFwzZMw,2039 +torch/include/ATen/ops/_upsample_bicubic2d_aa_cuda_dispatch.h,sha256=u0uBdlTL3HmZbzl7WVBit_-YIv_IKZqSPzB5STz_Lx4,2041 +torch/include/ATen/ops/_upsample_bicubic2d_aa_meta.h,sha256=ehU26YvmRrf53OnGJ3VBExMtaihyXQoMy2goVOO_EzQ,714 +torch/include/ATen/ops/_upsample_bicubic2d_aa_meta_dispatch.h,sha256=Z41q_q9vvKtQpWc9cMvmkqmbEAO0SxeTdgnVxkBmhCc,2041 +torch/include/ATen/ops/_upsample_bicubic2d_aa_native.h,sha256=bL06iwoAH16xOQoMor51p4KSgXwHxAwITa8jh8c3qpM,1253 +torch/include/ATen/ops/_upsample_bicubic2d_aa_ops.h,sha256=ReRaRX6A8BZjH12VW8P3E79LzlGEIL3J1ADKl33dsm0,3459 +torch/include/ATen/ops/_upsample_bilinear2d_aa.h,sha256=dg5og1Xa3uDP0aOiZizvd8UWgl2keU4JRw-eiZhIVMA,8160 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward.h,sha256=GAS4tMAatUn5vTP0WFkyS8bTaIrpJtr90YfJBMXnqDo,7846 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=VvlCjCtOc9zr6vocH8Lc6_QOdBimCs6al80nKW7oLzQ,1275 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_cpu_dispatch.h,sha256=D8__U9vuSC0bq7uE6bNEB0xLDbcLvbxOKZww8tiSXKc,2349 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_cuda_dispatch.h,sha256=U5Z8rQ3Mnap8BSzNRxVL4G5zvk4RnYyoI0y2vS0udXE,2351 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_meta.h,sha256=xY4AxWP7ebESKNl0Bap_ULEkOmMyLfW7cmVvE8nGjOc,765 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_meta_dispatch.h,sha256=3PIcj7xnJI-i5Nl_UmM3sZ7spQ3IZBvGXCMOlGtdlSk,2351 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_native.h,sha256=h2wEPtzpmx2GOaK6CDpfKgKlDu5ag6nob5woYYq6aNg,1218 +torch/include/ATen/ops/_upsample_bilinear2d_aa_backward_ops.h,sha256=Ph5gVmr9InRudwLO9UM71G9Nig2XQesjMPhFmVm8tws,2856 +torch/include/ATen/ops/_upsample_bilinear2d_aa_compositeexplicitautogradnonfunctional_dispatch.h,sha256=asBqyvdfFzEBKc9WmbI2QntBjrFU-kCY-zrmUQME4pQ,1183 +torch/include/ATen/ops/_upsample_bilinear2d_aa_compositeimplicitautograd_dispatch.h,sha256=4P5v_o4DuJGpdcmlb2aMFbUMpYNdgUovkuGKKbzn7i4,1084 +torch/include/ATen/ops/_upsample_bilinear2d_aa_cpu_dispatch.h,sha256=RgYJi2FBZt2k6MupmIYLTTl6JZAQliOcNBaKfziWTLs,2045 +torch/include/ATen/ops/_upsample_bilinear2d_aa_cuda_dispatch.h,sha256=7yYqMDLzbdAcZpgdTxCSSB5ISk1di83qcVQ5u5KJjgU,2047 +torch/include/ATen/ops/_upsample_bilinear2d_aa_meta.h,sha256=1GlYy75xUcadjfKPyZUqjWNPvSpfBPNs1LG0qsWeToU,715 +torch/include/ATen/ops/_upsample_bilinear2d_aa_meta_dispatch.h,sha256=SMvhEig57M3ZdBFxC9qFdWlJjdYObepiZdNK3e0kCUY,2047 +torch/include/ATen/ops/_upsample_bilinear2d_aa_native.h,sha256=dCqiRGS3iLCOeHAxVLvjiADU8nIYr3UQgIaOzC1siYo,1259 +torch/include/ATen/ops/_upsample_bilinear2d_aa_ops.h,sha256=oso7q792rMTXl_XnUQ1jO989_nR9yppN-jlpXpbW3w4,3468 +torch/include/ATen/ops/_upsample_nearest_exact1d.h,sha256=6nM_9HK2oi0M2-xowcWKVlxC0w5h-KiznSqyfFJbT-o,6682 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward.h,sha256=bokD-TzSj90ZD2RtWRXtELENhyOkG9ldmFf72KqItSk,6528 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=jgloO5KLAbQ1O31ILM640r8ltLQuTptNgrnhV3xqaXg,1137 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_cpu_dispatch.h,sha256=rJQoTh0S7jFSPcqkrRM6eIb-yx8TBfCQnvgbjkGw8PA,1965 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_cuda_dispatch.h,sha256=1LZMkisIrY05B8Gt0d7HqcYlTsZ6VgsYSS6icKWFFCw,1967 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_meta.h,sha256=mlXeCuEOtZj7KGMjucjgG1lz-abPgEST4KYvNSrC3eg,711 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_meta_dispatch.h,sha256=1dWZe1EKuWSC4Tc5Bz-lSvdUSAxtroaOBwX_oDRi5aA,1967 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_native.h,sha256=_N44JcuOXQjnXmlHVl3eZMgGXWJ_x5qkcjnN6qWBUKg,1116 +torch/include/ATen/ops/_upsample_nearest_exact1d_backward_ops.h,sha256=ry-7mnFS7tmwP_aXYvWGH0eWlHHBmAgOVHChlILLILM,2494 +torch/include/ATen/ops/_upsample_nearest_exact1d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xpcQPkojWbxNXHjRlLbPqigQ-IIcCv2AFBK6w4gHgPI,1045 +torch/include/ATen/ops/_upsample_nearest_exact1d_compositeimplicitautograd_dispatch.h,sha256=UuEgMAWviJpiHB3bfIKqf7z2VOz-Z3g6PRBl2ECJEdc,1048 +torch/include/ATen/ops/_upsample_nearest_exact1d_cpu_dispatch.h,sha256=-pB_Pd9ttETOe29P8DNK6hJHHq3iB6mkYhYOaqlfOtU,1661 +torch/include/ATen/ops/_upsample_nearest_exact1d_cuda_dispatch.h,sha256=Q7L67s9JzWxu6LRKENc7Dz3-DV-0CBbxIlRHXHl9934,1663 +torch/include/ATen/ops/_upsample_nearest_exact1d_meta.h,sha256=334cxCj4qkz_UKg0g3Sgstb4sCYgHIPiDq6hd2FQSQA,661 +torch/include/ATen/ops/_upsample_nearest_exact1d_meta_dispatch.h,sha256=BcTTzGmNwrIzyx29HCLic8lkmIy-Z1B7i6kz-WNuwT0,1663 +torch/include/ATen/ops/_upsample_nearest_exact1d_native.h,sha256=n9aP0zwcRJiEgLRnFNQyqgQfeKNmpI-B4yTVmuvVsrw,1139 +torch/include/ATen/ops/_upsample_nearest_exact1d_ops.h,sha256=tFGaJ-dO8DEz3ZlI-EPlLhravnv8YZF0SUNeyQN78_s,3046 +torch/include/ATen/ops/_upsample_nearest_exact2d.h,sha256=ZDPxySXp_YWvGRrFuV5DiUmkWM1zkWlF2lsiPu3X-dE,7522 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward.h,sha256=iett4QCe3eLObGbf2tDvpl7a9ofz4peFkZshlaDZsho,7368 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Figkb7pZLrYcquDC-TP65R60aJpEjm84HKxFiYwEmpU,1239 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_cpu_dispatch.h,sha256=Eo-NB-pZoHPmTg8teueJnQmMIoRVeF-ZkccJfhdgPlU,2241 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_cuda_dispatch.h,sha256=7C20N4N5wzhkqA9ie-7hYFeHerF8B7XE-id_KfXrArU,2243 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_meta.h,sha256=n9Lexa0BmrwG34sfW7ONddLj4J_-k71RTIC2rAdjwuo,747 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_meta_dispatch.h,sha256=TKWTX0th05SvUrEUwC4cwWarI5pQTQvhETZFEFrT0uA,2243 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_native.h,sha256=2E6VJ7gktkoI2t-DxzBfLMvsWiJDrrUP6sdfIfUPgFw,1188 +torch/include/ATen/ops/_upsample_nearest_exact2d_backward_ops.h,sha256=MFecL2RysqcUy5Kwav2_ih0lxXHhIp3msLNZzQpf3iM,2736 +torch/include/ATen/ops/_upsample_nearest_exact2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=mTG9GRUS9omcfZ1YV4MgQhFO4Hv4rdVwnuOD4d_1nW4,1147 +torch/include/ATen/ops/_upsample_nearest_exact2d_compositeimplicitautograd_dispatch.h,sha256=sEBmQ8ADwPxrwADA0wXKCE6o-iai-kazJ3gI5zJotmc,1048 +torch/include/ATen/ops/_upsample_nearest_exact2d_cpu_dispatch.h,sha256=MAu0fcjWe3o3_Jo5M0cRF7-DvDeyrby_GGS7vOFx4N0,1937 +torch/include/ATen/ops/_upsample_nearest_exact2d_cuda_dispatch.h,sha256=o0GMGmNHq5CuL480E3qUO8v2NVIFpS_lEC4gVc5zusg,1939 +torch/include/ATen/ops/_upsample_nearest_exact2d_meta.h,sha256=JM6YEXBAIeBrC1v-BXz2JtA5zJKocQso-jFeaXFIS0c,697 +torch/include/ATen/ops/_upsample_nearest_exact2d_meta_dispatch.h,sha256=YYUbR8HnEBU8bQD1sA1HWiL8f49FKkKyM1tWOfb30hY,1939 +torch/include/ATen/ops/_upsample_nearest_exact2d_native.h,sha256=0vcLx6d7lrOgsjSRsw_v7pqeks9v2j3osWUNrkL-I88,1425 +torch/include/ATen/ops/_upsample_nearest_exact2d_ops.h,sha256=IGfEYDdpX3qt5gZh-_pWZV4a-UzsmZwLPR2FtuWJesI,3288 +torch/include/ATen/ops/_upsample_nearest_exact3d.h,sha256=1TzQnIagPXvUXKwSVDMcHX3GRFwIgfnt6w1MGc6Vkm8,8302 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward.h,sha256=p0vVFrOK84daN4MOvw9cSjEN3iKXxRgczx7TMiDOeD4,8148 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Ml5CfjG1ygJ1baFYZELuxmEJzXq8v7ZmmLtxs0kTmB0,1337 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_cpu_dispatch.h,sha256=fjgZj9B4_PTV8ApdclaZIFbPxVt9U-O-cIzWSG4OyK4,2505 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_cuda_dispatch.h,sha256=kXZjYa2ModKGKFsx9dy3FIcnRvuPetqMgu3DDhBIXiQ,2507 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_meta.h,sha256=gXNdXTesQYlYYg0BabWuXG3p1SbCoNlutUJ5jlAJuUI,781 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_meta_dispatch.h,sha256=aq4B053yyEtxQd0hmGxYOYnHqwEvWIpxuEtmOrAj_wU,2507 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_native.h,sha256=VzrR0rlSIFNzQHr4CbK4AccOlUzcte0u6dT2Hkf6Wr0,1256 +torch/include/ATen/ops/_upsample_nearest_exact3d_backward_ops.h,sha256=692LqsgKOYNcFm_mgQH3tFm_s0KsewPuq1DnyW1XO50,2966 +torch/include/ATen/ops/_upsample_nearest_exact3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=qzxWUtSqF6RJmqiWaoQma7bhMN3aYe1sqUcnsxWjYxQ,1245 +torch/include/ATen/ops/_upsample_nearest_exact3d_compositeimplicitautograd_dispatch.h,sha256=X12SmZXn6tt_dSoqrbwq4RJ2DQuJO6wD9t7coPAyjpE,1048 +torch/include/ATen/ops/_upsample_nearest_exact3d_cpu_dispatch.h,sha256=QUyoYaEKdbBd3cyuqriPu-nhdjlgTxtBgWa9Rj6A-pM,2201 +torch/include/ATen/ops/_upsample_nearest_exact3d_cuda_dispatch.h,sha256=zDnu5af7LmyPItr9urmA3i89LoDkS_R3DhNENvc0Ags,2203 +torch/include/ATen/ops/_upsample_nearest_exact3d_meta.h,sha256=mkQpA_2D_IWHtSttPF9olErVIx5ES3uC2_0ap2vSSNs,731 +torch/include/ATen/ops/_upsample_nearest_exact3d_meta_dispatch.h,sha256=JwueezvAjnGeahTKJ3YNE_0J2MJjUHquLzgAZ1ldJ-M,2203 +torch/include/ATen/ops/_upsample_nearest_exact3d_native.h,sha256=azyktqFJbeN8JN8bytvQ2KN1PJK-Lkqdts3OzOb_lKs,1542 +torch/include/ATen/ops/_upsample_nearest_exact3d_ops.h,sha256=ocjI7Dd3ISRFN7IfrZazuxHFhcoc5q-LSGJXpN0IOas,3518 +torch/include/ATen/ops/_use_cudnn_ctc_loss.h,sha256=XsiV-LQ2U81fnKM1V8eCVxIWjeb4Hk0_PDU0wTFw8GY,1322 +torch/include/ATen/ops/_use_cudnn_ctc_loss_cuda_dispatch.h,sha256=_JdPWp-bKk5rX5Kl_Rcyfc_raJGUKB1uoELuKQt6LTw,1018 +torch/include/ATen/ops/_use_cudnn_ctc_loss_native.h,sha256=R07eImaK4YDd9WskhcdhavGBKx1g4wxfG7zeMUWst88,779 +torch/include/ATen/ops/_use_cudnn_ctc_loss_ops.h,sha256=h9EsRG2eToyiRF_v91bG36w1H01ARfWOCa2mlIWWkF8,2298 +torch/include/ATen/ops/_use_cudnn_rnn_flatten_weight.h,sha256=xdtIbCozNlmySE7OXBEGcxy_dAXE3pFMbDw3ykbu6xM,660 +torch/include/ATen/ops/_use_cudnn_rnn_flatten_weight_compositeimplicitautograd_dispatch.h,sha256=i6BU07fqmo_ZVFMdOr5cuvGL-g5jXTnHCC4xqyoCG6A,758 +torch/include/ATen/ops/_use_cudnn_rnn_flatten_weight_native.h,sha256=VHgmDbRVs30B7NsxJ6mfG1crFC6AplXoaqVK4WZwBbo,470 +torch/include/ATen/ops/_use_cudnn_rnn_flatten_weight_ops.h,sha256=8vsONtUEyv1-OyR_bSEalS_2YLlANwbobrcUn_JXuFk,939 +torch/include/ATen/ops/_validate_compressed_sparse_indices.h,sha256=m9ubzx-vQUDxz70aW_JY0b_3KRYzNY5cO09bVJVcsYQ,930 +torch/include/ATen/ops/_validate_compressed_sparse_indices_cpu_dispatch.h,sha256=JlPzyXxgay2-czVFdBPfVlK1Y6MhjCdKlDjFaF4hmew,837 +torch/include/ATen/ops/_validate_compressed_sparse_indices_cuda_dispatch.h,sha256=XzfESsww_gubv4HgOO0Wbv7bb8D79GjWDfn8VMUdI1w,839 +torch/include/ATen/ops/_validate_compressed_sparse_indices_native.h,sha256=iM3PS0euMOTJILTxnVufBZ0M0GCjg5TcZb2s30Pox2w,773 +torch/include/ATen/ops/_validate_compressed_sparse_indices_ops.h,sha256=X2xiIy-OlWJL0JCTFoBnqdk36YFWCw38Yd-Ylz6qpNk,1343 +torch/include/ATen/ops/_validate_sparse_bsc_tensor_args.h,sha256=3bsnVUMqa6bNbDI07dXjvuX3n2ufwZhAcVBes7thrio,887 +torch/include/ATen/ops/_validate_sparse_bsc_tensor_args_compositeimplicitautograd_dispatch.h,sha256=rl7t3mamj1oDgb1XdfwfKqimPIEY3i12sF8x2xNkCqY,873 +torch/include/ATen/ops/_validate_sparse_bsc_tensor_args_native.h,sha256=2ParPf7OYYk1llVXJhb2eIWKXPetDR6qy2cQ6wo5sas,585 +torch/include/ATen/ops/_validate_sparse_bsc_tensor_args_ops.h,sha256=RRjSc5zatqnMG74atyzUG9wIlcewIO7lU2b0gfSpXSQ,1313 +torch/include/ATen/ops/_validate_sparse_bsr_tensor_args.h,sha256=8Lx3a2CxyH5LX95Vx3KtdqnuQbbSRBa0qqBInDYHAZ4,887 +torch/include/ATen/ops/_validate_sparse_bsr_tensor_args_compositeimplicitautograd_dispatch.h,sha256=4DPwWFqCNHh2d4QhbvkI0uRTb2vnT0vG6qWS5Bn0O3U,873 +torch/include/ATen/ops/_validate_sparse_bsr_tensor_args_native.h,sha256=zgzS88UGaXbLTgiahayCjyibtFa-ikMiio9Eh9yp5KU,585 +torch/include/ATen/ops/_validate_sparse_bsr_tensor_args_ops.h,sha256=iaCqOb-Gs3FYtgzsch8lblm7CgNM58s1tfyjo95E8Q4,1313 +torch/include/ATen/ops/_validate_sparse_compressed_tensor_args.h,sha256=KCgWUuCzPJaz1R6otbhLV1zBH6NKL3qySeKc4zBrQ1Y,981 +torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_compositeimplicitautograd_dispatch.h,sha256=SFeuUQe70FZIJWGkTW-YXMu24dsH8sBn7pNEjQbJ-CU,907 +torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_native.h,sha256=2WkW9kvrSwRNXP_I-8lagSFs-DesFilHXfztjXjfXQk,619 +torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_ops.h,sha256=JyirtCwe5nT-P3ZTKIMD9Ggg1D58IR8OEUjchsNpaa8,1423 +torch/include/ATen/ops/_validate_sparse_coo_tensor_args.h,sha256=WgbLQ245JsY2NfNZrhj2Ym03OWGgRZm0pwLsA3BqpkE,897 +torch/include/ATen/ops/_validate_sparse_coo_tensor_args_compositeimplicitautograd_dispatch.h,sha256=ySWzlzAJ4ghA2igNjhjRMLni7xRb3lz-9zZ329voy2Y,887 +torch/include/ATen/ops/_validate_sparse_coo_tensor_args_native.h,sha256=ON54WXyrj3HcplgQ4RMqy6W9F_5BypMaLOMznCks3QY,599 +torch/include/ATen/ops/_validate_sparse_coo_tensor_args_ops.h,sha256=StQnfIt7IqX7dBSHe__-QnLfPFRP94zK4y-Z9xzcmCY,1314 +torch/include/ATen/ops/_validate_sparse_csc_tensor_args.h,sha256=47w5-UyTYDV8HJu1K4iVmgONAjZRjzAZXOPFOTI76vk,887 +torch/include/ATen/ops/_validate_sparse_csc_tensor_args_compositeimplicitautograd_dispatch.h,sha256=jEPx5Wzd0rft4gt_rXby_TaUWQrr4xXeQSEDSi8R4-8,873 +torch/include/ATen/ops/_validate_sparse_csc_tensor_args_native.h,sha256=AtNWz-f-inoCz8dlOKEn6yaQyLzroTtIi4anyoeqkMI,585 +torch/include/ATen/ops/_validate_sparse_csc_tensor_args_ops.h,sha256=Dr47CC7ja4z3PQvmQNaD-yjC5rNmx9nSwAVLxifa4ds,1313 +torch/include/ATen/ops/_validate_sparse_csr_tensor_args.h,sha256=LVqA0I2NLcRpfQ6PeDEI_IK_vLyj6_TdNS12hAU7mhs,887 +torch/include/ATen/ops/_validate_sparse_csr_tensor_args_compositeimplicitautograd_dispatch.h,sha256=97tjm0S9CMuHTyZOxoWvxIWSts1fGu-oZxXsZIaAqXE,873 +torch/include/ATen/ops/_validate_sparse_csr_tensor_args_native.h,sha256=fdenCeHPkAfKT-6L5ivkQF5wBqJzJayrFOX3HDZvRy0,585 +torch/include/ATen/ops/_validate_sparse_csr_tensor_args_ops.h,sha256=3tfxgKKVkKM6rwnVwhOoLf31a9dk0DRPuOI3vuuqYhg,1313 +torch/include/ATen/ops/_values.h,sha256=oVaIqf95zk4kQ9ae6MoMzaL-ewojrxQGPS8hsEGXKN4,481 +torch/include/ATen/ops/_values_copy.h,sha256=-LTsszfHrdGX3zOeKIi2XlYMPa_CPlGCwhLoNnZPKNc,1064 +torch/include/ATen/ops/_values_copy_compositeexplicitautograd_dispatch.h,sha256=GUoRJIEBtqQUJUch6ZeSi4AL4qcdBTcunlETGm79B7Y,879 +torch/include/ATen/ops/_values_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Kl9X6GR53jLoaeVz6TmqnybLLozZ6eoMceGsJy5ECfs,796 +torch/include/ATen/ops/_values_copy_native.h,sha256=PCHNB5fHnbJFXx9wWPAdncT3jCSJ3Olj46rM7brSBV8,566 +torch/include/ATen/ops/_values_copy_ops.h,sha256=dTG-JF5s82k-LrtQLMx1WNZef3VrZGa_sBbvTSAnk48,1626 +torch/include/ATen/ops/_values_native.h,sha256=cRczV12q-vFaOjymmJYEgS0qdHYESRLfx_HIwTP_ll8,484 +torch/include/ATen/ops/_values_ops.h,sha256=G61xxYj8kWYYyC5vYoXstjvXxD412RQJZoed40hoh50,976 +torch/include/ATen/ops/_version.h,sha256=AC0exTk9vk6U9ByIrzoBh02pH4Tquzy-T7VE6djVyOM,482 +torch/include/ATen/ops/_version_compositeimplicitautograd_dispatch.h,sha256=VQe1SgTRZUcc1Q89pAXI0sPrXrCmizxErnitfKmkEiw,763 +torch/include/ATen/ops/_version_native.h,sha256=VvsYSog7FaJO9ro7wOWy7r4xACEnbRYLLBQ_nBNicXQ,475 +torch/include/ATen/ops/_version_ops.h,sha256=eHq5xXvANEty1z3dnXxzF6FQgZoy32mJcc5XipXsjuQ,961 +torch/include/ATen/ops/_weight_int4pack_mm.h,sha256=i8pvXZrtWDvsrO9AbBy-bYuiy9SucNy1Vh6KBbMYDHM,832 +torch/include/ATen/ops/_weight_int4pack_mm_cpu_dispatch.h,sha256=f-RtE70jrbwLPSAzoH4jUtlCB2XIpJDDb7GwbTml23o,813 +torch/include/ATen/ops/_weight_int4pack_mm_cuda_dispatch.h,sha256=K9-5RIQv8I6f8j5D2IiPa1ppKIyJw6nbESXREfI4HDg,815 +torch/include/ATen/ops/_weight_int4pack_mm_native.h,sha256=k4k6boqLTPu5uvJ-QAYe_8c2ukMznLEq6PTyeUUVlJU,725 +torch/include/ATen/ops/_weight_int4pack_mm_ops.h,sha256=je8h_EGRAW7fExjjYH1FYpaLWlAP09fC6M_EizWzryk,1267 +torch/include/ATen/ops/_weight_int8pack_mm.h,sha256=GVy-xgFVHARH8ub2SPiCbN5TurZeeIYnTOivdz5ZQM0,760 +torch/include/ATen/ops/_weight_int8pack_mm_cpu_dispatch.h,sha256=IIlfdUaGwPaqLLqivBiaP3DMLwu-zb-XbnKuVls06qk,785 +torch/include/ATen/ops/_weight_int8pack_mm_native.h,sha256=WTJ_H2z8Yl34_nNpmIMUwaq5Wg-B3zyx1kw9Vr53Cdc,545 +torch/include/ATen/ops/_weight_int8pack_mm_ops.h,sha256=prGNJYMQjvxbMn9reOljwDvnBpG0xJRICt4xQdJ_CUA,1178 +torch/include/ATen/ops/_weight_norm.h,sha256=V-56D58deQzpKWdhuzePJpwprVb1qJVppQTYHN1_hLg,695 +torch/include/ATen/ops/_weight_norm_compositeimplicitautograd_dispatch.h,sha256=72xdbSxcp0pSanEpxTjc7JParWoyAh7wxoJDIhhDgA0,804 +torch/include/ATen/ops/_weight_norm_differentiable_backward.h,sha256=UJDfvadRbJdSB62Syh5M4AVPziq3GTZ1cVYWB1Sh6dg,973 +torch/include/ATen/ops/_weight_norm_differentiable_backward_compositeimplicitautograd_dispatch.h,sha256=6FfonTNV7IpjA113GFW1ZGh3359dnWKChhuQSCHiOo0,922 +torch/include/ATen/ops/_weight_norm_differentiable_backward_native.h,sha256=BYXRRkC8XmUV79FuIpnsd_8kcgyMyF9Zhki7w5dndeA,634 +torch/include/ATen/ops/_weight_norm_differentiable_backward_ops.h,sha256=ZuQANowhjFiymWs-Q07monxclLzImMgWVppJBShbsyY,1480 +torch/include/ATen/ops/_weight_norm_interface.h,sha256=uon2bkuEZGLewfH7DRYfCgYTNpqH4zT7wQ72hSwkVwA,1540 +torch/include/ATen/ops/_weight_norm_interface_backward.h,sha256=-RTsgi_4GdzpoT6e2h5qU5IfzQCp2E-jP1O1N-TYLRA,2073 +torch/include/ATen/ops/_weight_norm_interface_backward_compositeexplicitautograd_dispatch.h,sha256=KeuaJ5RYGGtOqiKBbBgAbdBRDmglt9vvzA-z4LOEIy0,1217 +torch/include/ATen/ops/_weight_norm_interface_backward_cpu_dispatch.h,sha256=Zt3kPCHxlkUOEBxVnVrB763dq-1XfgHOK298LrdueyE,873 +torch/include/ATen/ops/_weight_norm_interface_backward_cuda_dispatch.h,sha256=PAJU5hlv45Hf54SNcN2Z1RZem_7u0gczcNYAWs4tBsc,875 +torch/include/ATen/ops/_weight_norm_interface_backward_native.h,sha256=ezTCIFCy3P7B8RPiQ45cVbbBUZa5l9hGF-AwBabLWwg,1076 +torch/include/ATen/ops/_weight_norm_interface_backward_ops.h,sha256=0eP0Tob3IVclSXDry6-G7uowjmnnVOoPspxuOPt5G08,2668 +torch/include/ATen/ops/_weight_norm_interface_compositeexplicitautograd_dispatch.h,sha256=9MFh2LBtWygir10PGloql-X_3tFJJCd727a62LCMG5o,1059 +torch/include/ATen/ops/_weight_norm_interface_cpu_dispatch.h,sha256=IjGW79hTAoGDV6hN7OUo1RYQSgdjeBYDSzhdeuccVa0,795 +torch/include/ATen/ops/_weight_norm_interface_cuda_dispatch.h,sha256=eyK0rAmzHhKbBk020hL59btp6wAppzXkZpKTQgcrKwE,797 +torch/include/ATen/ops/_weight_norm_interface_native.h,sha256=CHJ9TezP9Y6JmcxoVOGlBgpmhAReG38n-4pLhbp14RQ,840 +torch/include/ATen/ops/_weight_norm_interface_ops.h,sha256=4HPJhcMjEmddnNzd6m4jfxvfiQGSNGGbkvAF-wUZFw8,2160 +torch/include/ATen/ops/_weight_norm_native.h,sha256=uMagnlVSuc1U3uybCpxABGQLWpuz9BoVn13YJoxDaTA,516 +torch/include/ATen/ops/_weight_norm_ops.h,sha256=lvFNc55xUUnEx2AhFpS9z4ehlSvtshmXPLZHgDJjf7U,1096 +torch/include/ATen/ops/_wrapped_linear_prepack.h,sha256=P5U93UAtURGA03TFEHBgxElBuST4Pm0bfi4pTA7qgLY,883 +torch/include/ATen/ops/_wrapped_linear_prepack_compositeimplicitautograd_dispatch.h,sha256=5vrvhqpddG2hILaeqUPI7hN8mO1S5b8guIgHqN_GTss,879 +torch/include/ATen/ops/_wrapped_linear_prepack_native.h,sha256=f4YGXiHKSR5TPpS4OWG8Laa9eQ-nWleAJoMi0xlkXpE,591 +torch/include/ATen/ops/_wrapped_linear_prepack_ops.h,sha256=hupoZGVYFd2KVRFqwYDFSPIKzduLzWVZKck6d-H6gac,1336 +torch/include/ATen/ops/_wrapped_quantized_linear_prepacked.h,sha256=LlpbY3HTCAwe7QSEFwkC9pWc9Z-BbMBB8etx5z-MUvE,1151 +torch/include/ATen/ops/_wrapped_quantized_linear_prepacked_compositeimplicitautograd_dispatch.h,sha256=qLa8p8mkFN-lUH3MPRz6iqq2LC19fgOhK2WFCV1UKM8,989 +torch/include/ATen/ops/_wrapped_quantized_linear_prepacked_native.h,sha256=5pfXMh-H53AWN1aoI67Brz0hUug47LrGN_5n3OV90Ic,701 +torch/include/ATen/ops/_wrapped_quantized_linear_prepacked_ops.h,sha256=JvY5gTxI6T22rgkgzQEGgM8QmM7PBq9zeZFl5nCN8oE,1687 +torch/include/ATen/ops/abs.h,sha256=ORnPlnT-zPJ2jt6vgmJ_GK-wlxOl8Cd91H-iYojVt2Y,1107 +torch/include/ATen/ops/abs_compositeexplicitautograd_dispatch.h,sha256=Sd14UZRfkTCXhNMw0kzSi9_XNtp_kwSBd-kAPnpUWrY,809 +torch/include/ATen/ops/abs_cpu_dispatch.h,sha256=guyZY2PA6mb3ZW8ApEw8_dWj_yPTUfHqFD_ZjjHBv6k,817 +torch/include/ATen/ops/abs_cuda_dispatch.h,sha256=BH8wfKMo0aJ9_ZeRiRbjXCGIaqjc0dxQLHjJZtpvY_k,819 +torch/include/ATen/ops/abs_native.h,sha256=OK0rGfo2raMc60D3rln3nvpLqUnivApOCHmQVmuMSpg,1123 +torch/include/ATen/ops/abs_ops.h,sha256=5_loAyBNGiXWAcrBV324Z2ivRkd_QxRQqIJxxxPAraY,2095 +torch/include/ATen/ops/absolute.h,sha256=Ca05LMz35EoWEA7Z3GTgeAk4yYBcctgRBHvmI5SYCjU,1024 +torch/include/ATen/ops/absolute_compositeimplicitautograd_dispatch.h,sha256=aCllNnGdo-W3Mi8_NeGVbjB8PQch7PJaQDUuuWTz_mg,980 +torch/include/ATen/ops/absolute_native.h,sha256=-js-9F-8QhvbWG4ctj2yguw85xYb-xTPEXvty0LMXjQ,611 +torch/include/ATen/ops/absolute_ops.h,sha256=mSag_6aFmyUM125g8NNbN0EkhXFLTs5GbGvQbKJdvDg,2140 +torch/include/ATen/ops/acos.h,sha256=2KoNgQn9qlYpd0lBOvyyOmTnexvpWwMX38xWX34psNo,1120 +torch/include/ATen/ops/acos_compositeexplicitautogradnonfunctional_dispatch.h,sha256=7m2UUrN6V0OifYWhq8Y4JdyeU27SaP4jsE7yeFNDkno,837 +torch/include/ATen/ops/acos_cpu_dispatch.h,sha256=_mWbzBse4usazcAkWTyaYcigoywudsk0Of_dlUUMyLs,920 +torch/include/ATen/ops/acos_cuda_dispatch.h,sha256=nHWrZz-0VZnbDGWKYT1YoO0l8UJXhjLl_bd_QxTFC6o,922 +torch/include/ATen/ops/acos_meta.h,sha256=zV4E9zaeOXzSmM7n0Q5UMJCAGCIlRVCn1hEtqBMl18U,573 +torch/include/ATen/ops/acos_meta_dispatch.h,sha256=emksaFT8ie8u9RZaUXkWI1PmR1bLCQhFbM1xnO49M48,922 +torch/include/ATen/ops/acos_native.h,sha256=EsgIgrGX9gn6xHeLl-u4-vlR1UMbCO5gbM1SqZ0feoc,590 +torch/include/ATen/ops/acos_ops.h,sha256=GZKP3FqYRG0qyWkaYZMLYNI4MEtNr4hi9BJNH82CyOs,2104 +torch/include/ATen/ops/acosh.h,sha256=x9-o-Yhjp5FdScoj7fan8fo9_PydpWp1BHJVH5Nf3ZE,1133 +torch/include/ATen/ops/acosh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=lvTlS6z_YYckApSKjIHMwcGS_4rHdhYtbyohz0Wlw6o,839 +torch/include/ATen/ops/acosh_cpu_dispatch.h,sha256=tSsT1qwMLbVIY3CHXWHQ4mi3yULlk6jglSXeOdIMzME,924 +torch/include/ATen/ops/acosh_cuda_dispatch.h,sha256=-xX9J6cRkppVrvGeEfSGMNPMQDEisgTvt4-eht5YVfM,926 +torch/include/ATen/ops/acosh_meta.h,sha256=Xq1lpe_MaoJgvZrzyEx4EcKfOZ0ewiNEBmhj5BJwtZU,574 +torch/include/ATen/ops/acosh_meta_dispatch.h,sha256=xHl13gBpTEhvg3VCexx1NtNv9vfED9X07Tk57EBaYQI,926 +torch/include/ATen/ops/acosh_native.h,sha256=QxiJ-zMzbW-2UM0WEHZP4hzgvM8Y6be3uFDEyZLisp0,593 +torch/include/ATen/ops/acosh_ops.h,sha256=kjjNzLZhzN04HhYVAYQVMmkkWKTgvZr3-TFIbVLaIRY,2113 +torch/include/ATen/ops/adaptive_avg_pool1d.h,sha256=3qGsBgrwVJgcOFyrQiTJY8Kd1IAy0L7f_kgG7q5ixHY,728 +torch/include/ATen/ops/adaptive_avg_pool1d_compositeimplicitautograd_dispatch.h,sha256=doSzMvk_5-_s1cYEsx_hdPOIlJnNA7bM-7ilOjVir4M,806 +torch/include/ATen/ops/adaptive_avg_pool1d_native.h,sha256=s_x7gHYK_dKh1PvemZtkDnNov4-QFhJjnmTbhsmJZoM,518 +torch/include/ATen/ops/adaptive_avg_pool1d_ops.h,sha256=z-ZTpOpDzKrPuk3cQqZhVNtuddywbhCsFC3exq1YLiY,1101 +torch/include/ATen/ops/adaptive_avg_pool2d.h,sha256=lktnva2qA_enBzp0U4kqeR59bjiQxOb0noVTlhik27M,4117 +torch/include/ATen/ops/adaptive_avg_pool2d_compositeimplicitautograd_dispatch.h,sha256=t58FI59zAhX2cTuIzvks0DctrAqrGLo1xp3oPn2oHAo,913 +torch/include/ATen/ops/adaptive_avg_pool2d_cpu_dispatch.h,sha256=TgCv13yCD1tH1Vq7ThKkaZdUxNJu5Rwe2byz8WiwuQg,1170 +torch/include/ATen/ops/adaptive_avg_pool2d_cuda_dispatch.h,sha256=wmhJw_rDfYi6KQPTe9931Gl83GKVUl30uRpAYbvyikM,1172 +torch/include/ATen/ops/adaptive_avg_pool2d_native.h,sha256=nh2FFzDPh-3euadEZT8bSMZ8giDBd9gMKrgg7-HItg8,910 +torch/include/ATen/ops/adaptive_avg_pool2d_ops.h,sha256=pcGba5OIrKKbh3kI1H2-lEvVwLI_UUuGGRbR1eOjoog,1888 +torch/include/ATen/ops/adaptive_avg_pool3d.h,sha256=s9KHPSLVb-s8p1UO63JltMmJheyXvmIaRbRPvNBp11Y,4117 +torch/include/ATen/ops/adaptive_avg_pool3d_backward.h,sha256=wV1Xnv4HlV7zHbQsUdQfeMo5jWqEvkL3eguBVrRrhyY,1223 +torch/include/ATen/ops/adaptive_avg_pool3d_backward_cpu_dispatch.h,sha256=v6wC2GCnAzBHhY85cfPPRnbOCOto6mPhp_VFvACFfwE,945 +torch/include/ATen/ops/adaptive_avg_pool3d_backward_cuda_dispatch.h,sha256=bFu-IufA6K9L-EadptOXvHGFDEbn7IevfhsZbXs-HQU,947 +torch/include/ATen/ops/adaptive_avg_pool3d_backward_native.h,sha256=er_kSHYtWj6Me1Bp4bctwXEnT-tNkgNzFuJfzHEwiiM,709 +torch/include/ATen/ops/adaptive_avg_pool3d_backward_ops.h,sha256=b02yK8wkja6Xn81kPD8_uzwL0AuiWIxFdKaontjOU9c,1269 +torch/include/ATen/ops/adaptive_avg_pool3d_compositeimplicitautograd_dispatch.h,sha256=aK7I9UhQVql8xucJeUbx0XkuiwbGgAkAJtTPpT20xc4,913 +torch/include/ATen/ops/adaptive_avg_pool3d_cpu_dispatch.h,sha256=Oyft_GQQdDU8Sqq-4DhG7Gw8LiRQdcJ1IhY5jpDS38k,1170 +torch/include/ATen/ops/adaptive_avg_pool3d_cuda_dispatch.h,sha256=Jgk69BHO0qGFGxciujaApOHckujk_uuf5w5_VFjGzGk,1172 +torch/include/ATen/ops/adaptive_avg_pool3d_native.h,sha256=KJQVJqq-riKj2MMldj9UYjKEiRKVPUZ-8vmzP5ZOO8I,912 +torch/include/ATen/ops/adaptive_avg_pool3d_ops.h,sha256=M3psuKatc3eK6G7KVfPhe7NfIK4JW_bWdcHTifr9A9Q,1888 +torch/include/ATen/ops/adaptive_max_pool1d.h,sha256=JWs3dYFZnOqPt4yl6Ra8GHbdr8jStqL8f5ZnPbfOo3I,763 +torch/include/ATen/ops/adaptive_max_pool1d_compositeimplicitautograd_dispatch.h,sha256=te6hl7u04jOMEM6Bv0pOyQ6fnJh6Nyh8iOIH_BpU5Rg,831 +torch/include/ATen/ops/adaptive_max_pool1d_native.h,sha256=8VjSa5KCBKxtNd0AsZtzMiqaieIoA0GM0meX4mfhMnE,543 +torch/include/ATen/ops/adaptive_max_pool1d_ops.h,sha256=5Uto2fSXm9Uo6VxapRHSEYrFIftA48Oaz3nsnrhXR8E,1186 +torch/include/ATen/ops/adaptive_max_pool2d.h,sha256=H-rKdBfvZiJrMOfpuXhwOnpUUYA_SLVifBE0FenhqC0,1539 +torch/include/ATen/ops/adaptive_max_pool2d_backward.h,sha256=tkv3ud1tQfQDrTZPzhYbowKx4OaNFYVaGSDhpVs2ZFY,1648 +torch/include/ATen/ops/adaptive_max_pool2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Vcb_6ELEGK2EcZofQpiLp4hyAwqio5Pz2wUS6zOfofE,872 +torch/include/ATen/ops/adaptive_max_pool2d_backward_cpu_dispatch.h,sha256=Y92LqbI_PYF_A_GHOSML0rm7ZhoXlOBk9ozXUT_qg88,1137 +torch/include/ATen/ops/adaptive_max_pool2d_backward_cuda_dispatch.h,sha256=QSGHqMXSuCoFT4nX8HvDpOpkkrCIdazlvatyzS5gcog,1139 +torch/include/ATen/ops/adaptive_max_pool2d_backward_meta.h,sha256=9P8PIhRJLl4ExdjZJupyTSZgRwBJjFxnRODbyn66Qno,657 +torch/include/ATen/ops/adaptive_max_pool2d_backward_meta_dispatch.h,sha256=16jjySoVenD0G_MpOlRxESreNPcV25gUZpzEXeU566U,1139 +torch/include/ATen/ops/adaptive_max_pool2d_backward_native.h,sha256=fD23fMBRq2Wia0lVaKBUEiSCm9Dx_bpvObq7Ai7CEIg,990 +torch/include/ATen/ops/adaptive_max_pool2d_backward_ops.h,sha256=2PWrGxGVOiurUBSdZF0aA3uGOyeVE7YdyLSlXAqeARQ,2156 +torch/include/ATen/ops/adaptive_max_pool2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=aTrCt6bg9EtQzFEIUGk94x14eDa7xcdTcRci9zhBGcs,857 +torch/include/ATen/ops/adaptive_max_pool2d_cpu_dispatch.h,sha256=3rjzlZNn-GDG5yPtmg3ukZuhuh-4mQ0lD9lGKK7F9Ao,1126 +torch/include/ATen/ops/adaptive_max_pool2d_cuda_dispatch.h,sha256=Pe3PvthDJrktjTKO6Xn0pfbQk7IfHyLP7qsUYgJv8kY,1128 +torch/include/ATen/ops/adaptive_max_pool2d_meta.h,sha256=M7EpqEg33BbTny38tI4uhNE8-7_Bxsy4hmdgjLXxi9s,617 +torch/include/ATen/ops/adaptive_max_pool2d_meta_dispatch.h,sha256=ojHCji9QFmLNqeB4XyiNUTv6h2IB54lyusJ2LxdjOJI,1128 +torch/include/ATen/ops/adaptive_max_pool2d_native.h,sha256=0s5dHvdyutBGEHJ0ouiXa-IQ1j91I-FotRvYTbsB06s,925 +torch/include/ATen/ops/adaptive_max_pool2d_ops.h,sha256=5okGZgclefyRYK06uuYgVVQM3pDQngSUhtKxppWhE3M,2116 +torch/include/ATen/ops/adaptive_max_pool3d.h,sha256=ZJL8RaTAfGRE-_hK7p6xVPCiXAhI_QyjwEMwLomalbQ,1539 +torch/include/ATen/ops/adaptive_max_pool3d_backward.h,sha256=AgSRB_WQPXseVbfMnoMateCVMZhgGpmgjzdKgC37egQ,1648 +torch/include/ATen/ops/adaptive_max_pool3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=eNU_1o8hj1AWYfKIUIFDFYSwZ7sv6zXgVTnUsAdjMbY,872 +torch/include/ATen/ops/adaptive_max_pool3d_backward_cpu_dispatch.h,sha256=EmjSFgOSv3ieAJv_OjabIyVNFqdwHKxjAUzf0doAWWo,1137 +torch/include/ATen/ops/adaptive_max_pool3d_backward_cuda_dispatch.h,sha256=S3-VDdYDIrjMImEjeWftxuEVppzYwoFX85UPPNj1cf8,1139 +torch/include/ATen/ops/adaptive_max_pool3d_backward_meta.h,sha256=CpiTahResWu1FEOOUbL37vkAg_X0FxiU0CHBFCcHOJY,657 +torch/include/ATen/ops/adaptive_max_pool3d_backward_meta_dispatch.h,sha256=c2UMye4JyuLsqMW4LU-tlaqJ6edEcXKOnsKUUWMCPyM,1139 +torch/include/ATen/ops/adaptive_max_pool3d_backward_native.h,sha256=99Fz2KJgJVkgjl0MGxVfbpuqOnXXGnlUyCJyAN1D3xQ,990 +torch/include/ATen/ops/adaptive_max_pool3d_backward_ops.h,sha256=bg9mjTPj_4bDTDL6b1o2qnhq5o3GbjroOaiJwH5bq00,2156 +torch/include/ATen/ops/adaptive_max_pool3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=lfPoQeG9XlF2KFsoXnSts9bueIh1KRiml78ivhBVVHY,857 +torch/include/ATen/ops/adaptive_max_pool3d_cpu_dispatch.h,sha256=Z6Z2a6x6qbbDFeMJOOAWiEG7yW11lu92JM7Kj2hv8uw,1126 +torch/include/ATen/ops/adaptive_max_pool3d_cuda_dispatch.h,sha256=9uMJCaI-YgJByAnN-lPBe5ZifV3rVgCN41QsLv1ILCE,1128 +torch/include/ATen/ops/adaptive_max_pool3d_meta.h,sha256=o3rue4Ty6Gc46kE9-StXmsMbIAG3kNH9RUMms2zVQTs,617 +torch/include/ATen/ops/adaptive_max_pool3d_meta_dispatch.h,sha256=aWkrLvG6luVX0kuPCRDyNaT-LGV3LZdCMtRE6xG_jlU,1128 +torch/include/ATen/ops/adaptive_max_pool3d_native.h,sha256=8omndOUlUGiSt8QpqLt6RZMzJZa0N1GTa7I8Rkc7Tso,925 +torch/include/ATen/ops/adaptive_max_pool3d_ops.h,sha256=eDrdZn2oBuHFtY1fHcBSJGq1aEMzE-eYe3f80keSXZ4,2116 +torch/include/ATen/ops/add.h,sha256=CA2LV7UMceMf99MmBukPbosYZQKtvepla-P3swzxmrU,2115 +torch/include/ATen/ops/add_compositeexplicitautograd_dispatch.h,sha256=j0HNl6Gh438d0W_bXbYoOQRHdwqomzXlfil9BYvvW3w,1174 +torch/include/ATen/ops/add_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xTZVQnxCa_zJUKytiGdSGOUDu7csGECwJVY26eQTkP8,943 +torch/include/ATen/ops/add_cpu_dispatch.h,sha256=1WX_RIqo8xb2cnpUm-ShjUWP7y6QOlVmU5OAfQBLQCo,1130 +torch/include/ATen/ops/add_cuda_dispatch.h,sha256=CCTkCi5g8XV-DJTx8SXCaiER0lDZfZjmcq-sfaCaJ-w,1132 +torch/include/ATen/ops/add_meta.h,sha256=cWR-xNi7aonaJH0X0x-ANS62KmG43QVPsUOdylxCEaI,631 +torch/include/ATen/ops/add_meta_dispatch.h,sha256=niarZ4Y-q3sdm4bTnG7umOl9AACTzkOZ5qp9JcRXkiI,1132 +torch/include/ATen/ops/add_native.h,sha256=fgUdSiN0tdi1WG18a2vhAJ6_GC51ZEcFcrEDYui11FA,2938 +torch/include/ATen/ops/add_ops.h,sha256=41jOg6s039Cb8hH1mXAb6bxbWqPNl8Imk8ZaaGlu7MI,4907 +torch/include/ATen/ops/addbmm.h,sha256=TSyW8qPMbZ2FVfxufHQWsoeazyZTwcL1i5qhCSBNqaw,1600 +torch/include/ATen/ops/addbmm_cpu_dispatch.h,sha256=9K63lzHWiGQKkohJm6rdZV-s56Ji7lSTExuQwyztFPY,1360 +torch/include/ATen/ops/addbmm_cuda_dispatch.h,sha256=fl_i4wrG3FQLzCM9GGRx0FLBfAl_oTBeaIGmLkoURQA,1362 +torch/include/ATen/ops/addbmm_meta_dispatch.h,sha256=u6BqOgk1T64Kf8Y3Qfy1RdHkdU028zTEV14g2WMLtsM,828 +torch/include/ATen/ops/addbmm_native.h,sha256=94J8RBl2UEt8U5DG9cSPKW80DFawOvAtKFKtbpu_zw8,928 +torch/include/ATen/ops/addbmm_ops.h,sha256=6RT3_qGKVZfYJdzbhH5yTQDd4M3n4GPr5S_W_msYop0,3181 +torch/include/ATen/ops/addcdiv.h,sha256=iZGm_xHGgAU4Q8dza0EunI7CmMTBhaau5QX0HRCFMbc,1486 +torch/include/ATen/ops/addcdiv_compositeexplicitautogradnonfunctional_dispatch.h,sha256=O2hmGLuG1uMX3QbWpkkKUxG1bTQVcsdR-WGXc6w1lYE,1011 +torch/include/ATen/ops/addcdiv_cpu_dispatch.h,sha256=UaOpX_jcOji4dy8OyAaG2jeBPeI7uiAFWRK7XXoESE8,1266 +torch/include/ATen/ops/addcdiv_cuda_dispatch.h,sha256=RmmcfK8nLzBaKunJk7MRunpuI9gNerwY9LMWPkEavkk,1268 +torch/include/ATen/ops/addcdiv_meta.h,sha256=BPMm_F7qGwVyKSqBWdUxVznpRJQchgjV8PYYo7y6WQA,658 +torch/include/ATen/ops/addcdiv_meta_dispatch.h,sha256=nQI-eU_kFhkVcFFcIiJKlQwyBBtEtofhCUuwCrRspfM,1268 +torch/include/ATen/ops/addcdiv_native.h,sha256=PYhAHk7-rls0blvlyaVuqmaDLJGzcW2UmqDY2TfPokI,681 +torch/include/ATen/ops/addcdiv_ops.h,sha256=bqUz629Mmxa7r-Y1Vq6iSaWMiSUiu4v_pWput2bedks,2953 +torch/include/ATen/ops/addcmul.h,sha256=Ud1pRVW93-zNF-7yKfC5nkZwep0Up7owiH5x9kiBMRI,1486 +torch/include/ATen/ops/addcmul_compositeexplicitautogradnonfunctional_dispatch.h,sha256=iT2tdmTUBkdu2qGNWVuVXkiHs-MbXEM8cJYx91DNX3Y,1011 +torch/include/ATen/ops/addcmul_cpu_dispatch.h,sha256=TrQXPeEDv-tDOenvoO63lTWo70r7tq12JnITApzhDHw,1266 +torch/include/ATen/ops/addcmul_cuda_dispatch.h,sha256=KyvIz560P8S3-Keno6f-PLeAFGSofTSWhZ_cvfOGj3I,1268 +torch/include/ATen/ops/addcmul_meta.h,sha256=z9CPyshZQwSWEi6xlA1o6Oo1LPkuES_QvhqVXdjnWLw,658 +torch/include/ATen/ops/addcmul_meta_dispatch.h,sha256=BZKfPpgkZ094ykWvhuxXt1Ek3LgyZeN-2ZffHuSiR6U,1268 +torch/include/ATen/ops/addcmul_native.h,sha256=v9ozfiEoTKJmPaslcUkPVIGMAG6fDPEicmbpdPMnfko,681 +torch/include/ATen/ops/addcmul_ops.h,sha256=2F_xsljhXJlKtY_3ay7iTBzOWkpgXzHoczEbhSMPeMs,2953 +torch/include/ATen/ops/addmm.h,sha256=YbVvjPv-9TOlzMF7KLttoXnN1c9FAtYwOlV78Jx0cng,1554 +torch/include/ATen/ops/addmm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=jqOudEZYHHcOXTWtPbeFfXnW6ObyVxS1l1KueE_7JPo,1049 +torch/include/ATen/ops/addmm_cpu_dispatch.h,sha256=hiyK_jE3TrYjxzoOuu675SWApWms-hKL-knXeGmOlKo,1340 +torch/include/ATen/ops/addmm_cuda_dispatch.h,sha256=FH4ZTIr2jx6zatXsROZDxxSgxVhWrKgECuD0bBbHU6c,1342 +torch/include/ATen/ops/addmm_meta.h,sha256=3ZaYgiqR3z8nctSgL-dC6Z3dG3qzJ6dovsVsFS68b4o,675 +torch/include/ATen/ops/addmm_meta_dispatch.h,sha256=fFBtcWp4A_kC2smXp662y7hLAQShxAuS0kCET8rNK8g,1342 +torch/include/ATen/ops/addmm_native.h,sha256=hnvt8h5lnu-Qp1gC5iLR5Lqw88ciwpw7yGrWgLOov_k,2617 +torch/include/ATen/ops/addmm_ops.h,sha256=2V4Wqbr9Wvt3GfLCeLEG7Fhm_2iTNXZ329UAmECK3to,3136 +torch/include/ATen/ops/addmv.h,sha256=7CFSvKP86qWK8pzt-j_9BCuuLNzVlVa2UyJk1Oyy-dI,1859 +torch/include/ATen/ops/addmv_compositeexplicitautogradnonfunctional_dispatch.h,sha256=fcvwwlmcROsF7RMPFKZrAkt22mT4-tLpx7367_thNDw,1045 +torch/include/ATen/ops/addmv_cpu_dispatch.h,sha256=f8GvqB9C5_E_8LJJtnhS2DmfZZsmaP3EA7zoGCVrlg4,1332 +torch/include/ATen/ops/addmv_cuda_dispatch.h,sha256=hXvCv9CLCih6tx03U4lhu6U0Q7s7JmSpoDeGwG5R5co,1334 +torch/include/ATen/ops/addmv_meta.h,sha256=tU-fWe9mfMbqqBeV65PLXMcJ3WJG1sDuph_HgLwzH8M,673 +torch/include/ATen/ops/addmv_meta_dispatch.h,sha256=jzzplhFZukrmnODgPZ1HN30Xixnu3aYzw9ZtStN-FgM,1334 +torch/include/ATen/ops/addmv_native.h,sha256=LHj5wLVGFKe3alwquDsg4nHO9TZRSKqssy2Y9EYCRS8,1332 +torch/include/ATen/ops/addmv_ops.h,sha256=Gclb1xcdEXiLSGSBkeudC68BvE9ZPNTOLBsaQii9o8s,3118 +torch/include/ATen/ops/addr.h,sha256=7mUdHDpjPIFTBC7DF5lYSJDWDM0cd-nAjKzfkeXNVLE,1544 +torch/include/ATen/ops/addr_compositeexplicitautograd_dispatch.h,sha256=bOc7NyFek0_nkLCbJp0hBRLou_qHwoFK2PrMA6IGMjI,1380 +torch/include/ATen/ops/addr_cpu_dispatch.h,sha256=jYQdOB0bS-rn2re-pqcnomYmtsNbHKDmREn4YQDkkj4,1182 +torch/include/ATen/ops/addr_cuda_dispatch.h,sha256=1OJL2mngSnw4kZoIgvmeiXg8qVmXK0F3-Z1zr8qlRW0,1184 +torch/include/ATen/ops/addr_native.h,sha256=7EWY1bOXX5kRQtQsmwebwLNA2O2uKKbCw8Zhj_w7PSk,1254 +torch/include/ATen/ops/addr_ops.h,sha256=onvG12Ch5Px3w3KkmHCFyYg59l4ETPGf5g-9RZduXiU,3127 +torch/include/ATen/ops/adjoint.h,sha256=b4wuCwku3l-B6FB8xC5ew0nhXTKFlNBFdDkAU3Yq6m4,624 +torch/include/ATen/ops/adjoint_compositeimplicitautograd_dispatch.h,sha256=BesD70OxuwD2C4d9wA1ZThjrS_jgl-zwz2NcDz_mi3Q,765 +torch/include/ATen/ops/adjoint_native.h,sha256=aTCG1dA7VAV-HkzDwc40oe-VKg17rm7T20Ee4ifODGU,477 +torch/include/ATen/ops/adjoint_ops.h,sha256=G0gGnVej_a3yCd-il4iwnPZhIQiBvS1dlv33LsNYNpM,976 +torch/include/ATen/ops/affine_grid_generator.h,sha256=LJBIAiqed5S_Vy40ZCzSApg5_1pmYDYUK5Wjzt4BMbk,4533 +torch/include/ATen/ops/affine_grid_generator_backward.h,sha256=8ahFeMQ2hRKyhlCZZT3VFp5zlIUpfs-7fsr0GFpd1ao,1804 +torch/include/ATen/ops/affine_grid_generator_backward_compositeimplicitautograd_dispatch.h,sha256=1YaTzIDDslmB_kRpQNu_XINLKr5hJbULjycu0w-JK4Q,961 +torch/include/ATen/ops/affine_grid_generator_backward_native.h,sha256=4bYUiIIpiG73M_jvIncTcgLh4plKtLKpJ_JYHrTZEdo,542 +torch/include/ATen/ops/affine_grid_generator_backward_ops.h,sha256=Rcp22vnB0FSOEhwZSyei01rPjlbb6UUicb_44qIAgpk,1193 +torch/include/ATen/ops/affine_grid_generator_compositeexplicitautograd_dispatch.h,sha256=YSde4i--4aHYeOubjnopPAqprHHj_-BBFgMM0Jn2m48,1513 +torch/include/ATen/ops/affine_grid_generator_native.h,sha256=8vdvvWorzFVmnkUyX0pRIi-kal7l329wjtwXl8GLfeg,681 +torch/include/ATen/ops/affine_grid_generator_ops.h,sha256=6k0fHqvTJaAmH513lUMcbqbdy3Hny09BZZedP0Q1QcU,1994 +torch/include/ATen/ops/alias.h,sha256=2rSFsoJplobfYsG4gmPFpDvCJg_vq5mCcr4oPCMQYgw,616 +torch/include/ATen/ops/alias_compositeexplicitautograd_dispatch.h,sha256=6VotKbMM5R0GKEdKbiGXEYg61aUVwBkp8UCnJpub_h0,763 +torch/include/ATen/ops/alias_copy.h,sha256=7KqUfUs3BW4uhCrU9tfawppjvsU_k9Js95V3FscZd58,1044 +torch/include/ATen/ops/alias_copy_compositeexplicitautograd_dispatch.h,sha256=5WlJxs_AfbbyZgIoQOn5WluDQFgHf-hth5uNZAKiUpk,875 +torch/include/ATen/ops/alias_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5igB1i3qMZZwWLGxwYjkaXdz9ocfcYrbOyoil58SKWQ,794 +torch/include/ATen/ops/alias_copy_native.h,sha256=QQ3pu9APKWVyq7YSW5wY_HuFBYbLdPb_z5MVe9nnkfc,562 +torch/include/ATen/ops/alias_copy_ops.h,sha256=_SDLRmwZkNxnaElSTbSXlnvt9epYSCHnlyKjBNok0EY,1614 +torch/include/ATen/ops/alias_native.h,sha256=dnS4OjTrbbgMDtUuqUerKu41PsuU6fYAADG8B2g9_Ko,535 +torch/include/ATen/ops/alias_ops.h,sha256=UdmTY1sWnrUkUI6XtE_M8p3je5vY_js5UOX_k3NESwM,970 +torch/include/ATen/ops/align_as.h,sha256=CE8UvRE_poQoI9qRnyKjvfsjdpHIkHrnBESBuebHvsk,482 +torch/include/ATen/ops/align_as_compositeimplicitautograd_dispatch.h,sha256=nb-cMhkOmO8r21o10vcHCUTjuffjRoyvRLWGU21kApA,792 +torch/include/ATen/ops/align_as_native.h,sha256=gy1YVg2CKI1VOWaz-3A_fgCTjbvpyF470dlRd7TMi_0,504 +torch/include/ATen/ops/align_as_ops.h,sha256=4iaBOETSXa0al1vsFhwJ2yMf_YniZFr0wiE4AXHMqwM,1059 +torch/include/ATen/ops/align_tensors.h,sha256=dcYSNoOf1PR6mdeam9l1MU-IA2d6TgJp58OdXb6V2rY,666 +torch/include/ATen/ops/align_tensors_compositeimplicitautograd_dispatch.h,sha256=rU4xlJF6Io3cOrjdgnyjT0rdhqC3YJsWrxQF0FxX6sc,785 +torch/include/ATen/ops/align_tensors_native.h,sha256=lQeNZGXIaajRfP_hgG8S9PiDwc6guf8AH8Q0_Vd8iAE,497 +torch/include/ATen/ops/align_tensors_ops.h,sha256=uJC6UJeoZDJXzNh6WeOq8_TuXP7MTYZGZpOnWrjgZHk,1034 +torch/include/ATen/ops/align_to.h,sha256=GjMc7B-MPCc7ftRjQsZAL1B_Z56Ffsy8etPUc8S6vZQ,482 +torch/include/ATen/ops/align_to_compositeimplicitautograd_dispatch.h,sha256=mB3Eed8SeV_HwN3Lo0_01loyRCJ5u70IzWGRL4uzlzs,890 +torch/include/ATen/ops/align_to_native.h,sha256=nF6ZBvbH-wUMMco3QjcQu7wvBhKzlYb2ZR0HYy2Ujfk,602 +torch/include/ATen/ops/align_to_ops.h,sha256=F81q28IjjCeqx2d5tac3lSucPfArMprvzi79cBwA6uY,1793 +torch/include/ATen/ops/all.h,sha256=I63MkY-o3PBtFoghUjgfbGACKfZt0JZL8cYqI0HdlCY,3332 +torch/include/ATen/ops/all_compositeexplicitautograd_dispatch.h,sha256=-_BR4CX8sA0M6UOMb_ZQtlBnDvtyDVf-eCOnhlXQ5PE,1053 +torch/include/ATen/ops/all_compositeexplicitautogradnonfunctional_dispatch.h,sha256=O8k4zyELAzs9wZSRkvhyxBeMpgOHW_LBnWeSwdgLU8s,971 +torch/include/ATen/ops/all_compositeimplicitautograd_dispatch.h,sha256=kQxvnzw9ZOK7Dl_a4eGZ14boarkNjAn83f9NDPoP4DM,1017 +torch/include/ATen/ops/all_cpu_dispatch.h,sha256=VEvzY92jUf1eH3De9dEu2qeBygt5u-zwB2e-UmhQTGw,1506 +torch/include/ATen/ops/all_cuda_dispatch.h,sha256=hLSwkUrnjWwnVGoZTeAK1x6ihYdCBadIcVHptJe5B4E,1508 +torch/include/ATen/ops/all_meta.h,sha256=6UqOZg17mERLgu9A1U6gySUH3dqyI1TyE1MqSV-yhiQ,881 +torch/include/ATen/ops/all_meta_dispatch.h,sha256=gI7mXiscbMIjlHsW_c7wyYBNEzTjF0ipFOmn14rw7_k,1508 +torch/include/ATen/ops/all_native.h,sha256=JdKAqD1WO-UoQGb5bSOdgKP_6uSnNunRdxr14GwB8Zg,1495 +torch/include/ATen/ops/all_ops.h,sha256=pCvxj32lpHnOGrwAffa95HsjcI2Mb3pR7hFH-ApYUUk,5817 +torch/include/ATen/ops/allclose.h,sha256=vmHxAY0g09HWnV9VqG-rSn7TnFFRMGZK2ygBlx_KMuM,802 +torch/include/ATen/ops/allclose_compositeexplicitautograd_dispatch.h,sha256=qPU-HKtBOEw9esjjAgBPDcIbEW-M0vvEYcmaMjpiDBw,846 +torch/include/ATen/ops/allclose_native.h,sha256=jZpfcqkQfVXvyp7BA6Hu0lZue75KU5mLcvk0QFPkedY,558 +torch/include/ATen/ops/allclose_ops.h,sha256=c2Z_4uavSZhxT3h2hDzitlQiz4xukkyf4anrhd_G4UQ,1203 +torch/include/ATen/ops/alpha_dropout.h,sha256=HS951ZZma4ADRlxpd-J1jAkxeZnBUfmP5viyV_dI-90,914 +torch/include/ATen/ops/alpha_dropout_compositeimplicitautograd_dispatch.h,sha256=1rAd24UqhI4E6zicZY2T-xcYYa89_UNOgVLns3eX35o,874 +torch/include/ATen/ops/alpha_dropout_native.h,sha256=a6ZEnqLXSJsuaKaKuWZM8jRRA2OSz32MeZIGUFm2XSY,586 +torch/include/ATen/ops/alpha_dropout_ops.h,sha256=qUIiVs70WkXA_gimoLech1U1F0wk29hq4bT4bpXhG9g,1702 +torch/include/ATen/ops/amax.h,sha256=trF20bxekE13TvKGVDKOgZ9BuTsK-v01atjLCYEJ6uY,1254 +torch/include/ATen/ops/amax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dhhdZ0veuV8ZJgob87RF2br8KyyKQ7_hHv3hKNw2nrI,832 +torch/include/ATen/ops/amax_cpu_dispatch.h,sha256=bTSDLN-9YaOQ19tOGvZfTcG5Ft-Uv_z31m3I2CJ13Uo,994 +torch/include/ATen/ops/amax_cuda_dispatch.h,sha256=7l9HqnUM1ITMjbqhv00bDf9RbVRIhqQaUUo3pujE_RA,996 +torch/include/ATen/ops/amax_meta.h,sha256=e1vB3ZRSxmKeVmUfZWgygu3b1hlRz1VAjjdXY1FUaXQ,608 +torch/include/ATen/ops/amax_meta_dispatch.h,sha256=N0iRa_0MrwaSYe7RERp08xNHexkJTaGAqNPFeVZp_-4,996 +torch/include/ATen/ops/amax_native.h,sha256=9pCQH2_HrZr27i9NW4Bo9orfvoZdycjlZKeOEhLfab0,625 +torch/include/ATen/ops/amax_ops.h,sha256=iAJVRECY9vFxm8NmI_uuwiApd3O2R58O-Po7jjAYX7w,1834 +torch/include/ATen/ops/amin.h,sha256=MpVjWh61JAPt3is7Xc1F6UkmwRd32veXlG5aW6j2GYc,1254 +torch/include/ATen/ops/amin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=n4l4av7to9aq1T46rMJIiK7x5QQ-ZdveuYq2Kt_1OKo,832 +torch/include/ATen/ops/amin_cpu_dispatch.h,sha256=ItocUOwWj82YOdae2NPjYLpsYE5CrmDxMWeJIFeG0Qk,994 +torch/include/ATen/ops/amin_cuda_dispatch.h,sha256=06N22SMiD6AgUOUtQVnUBq2Vu53hnzYeSRmWwIYLJ60,996 +torch/include/ATen/ops/amin_meta.h,sha256=sUEZGLIhBC1A5Kd6YfJ2Fdq1iiSTdvR1QXEkf7eSHwA,608 +torch/include/ATen/ops/amin_meta_dispatch.h,sha256=pygboi_K_Wt6BTxgR3EQukeYGMEIuBfHQ5sMz3_ByA4,996 +torch/include/ATen/ops/amin_native.h,sha256=0_544uxUfSmEbcsmYRKzufc5-5r1SwbrbhwGtWxFd1Y,625 +torch/include/ATen/ops/amin_ops.h,sha256=bMiKY_toulTQ37lsjcxrgN8PhkIoNtWerXNDt7wfZ1k,1834 +torch/include/ATen/ops/aminmax.h,sha256=3A2w1OW6QrXBuE5oruweHkT31VDVITdoqoV09fzGOc4,1557 +torch/include/ATen/ops/aminmax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=NVXGvDIkulwXzFbb3Epl5SX961thsbxfc7TyAiS8-lE,881 +torch/include/ATen/ops/aminmax_cpu_dispatch.h,sha256=Pvb1_DiJdKrgTc-zz2egUhYcuDlqIiTaxrNmDPzOxi0,1169 +torch/include/ATen/ops/aminmax_cuda_dispatch.h,sha256=QhP5UfR2npqMa0WcSLbOunCtQPD0izyq8vosz5Ei6Gs,1171 +torch/include/ATen/ops/aminmax_meta.h,sha256=uwq5Z9NEP4ORcdlUBKMfKt7YMDo9qtm9BqjruB4tXRo,620 +torch/include/ATen/ops/aminmax_meta_dispatch.h,sha256=5hpxiZKAmQL_bUAWo1cCKcfTyHqbKIlBiF7x9aC2RW8,1171 +torch/include/ATen/ops/aminmax_native.h,sha256=IuBkUk-WvzQ0TW77_kClhNDijyXeKJzRtbJ8KCT07DA,667 +torch/include/ATen/ops/aminmax_ops.h,sha256=taff828fCmZG_Qq4pzPJgywmor81OKcLCysYa4I4cJ4,2171 +torch/include/ATen/ops/and.h,sha256=u7y9jipTO_BfWsDy_eA6cYIez9UFLZxN5U4KmvfFv4A,874 +torch/include/ATen/ops/and_compositeimplicitautograd_dispatch.h,sha256=ctXesQUpQ1hfL4Dn-q5t3U_t6qS_dBXGQnLmEqE1P08,1028 +torch/include/ATen/ops/and_native.h,sha256=tcYJSC82GFLuzOHrJF_YezsoGMmFmjiWce4bZm4a54k,740 +torch/include/ATen/ops/and_ops.h,sha256=5E07x4OAK77LKxNiGzck0e9QPTWR0vHIOSHqQfpy8hQ,3000 +torch/include/ATen/ops/angle.h,sha256=qer6buR4phSgtiyuulctOYZHUlFLot_oPuc-5mWe1RU,994 +torch/include/ATen/ops/angle_cpu_dispatch.h,sha256=WUvzdsQUiT6_2piUZ-GMqNj5TKPj1xR1ZXn3RAwELXE,874 +torch/include/ATen/ops/angle_cuda_dispatch.h,sha256=8GLy2DqUmgoypN1MhJlOpa6fTk_Ei-PCt1iFXpeJStk,876 +torch/include/ATen/ops/angle_native.h,sha256=7s93AzmCEsKqan7Po9tuCsOIA7PYPGm4fpXtlR9mk2A,704 +torch/include/ATen/ops/angle_ops.h,sha256=ngOD77_TJI1PPxDcgRc3WW_ln3faBNwF9G8TjTlHoVY,1584 +torch/include/ATen/ops/any.h,sha256=EhEuPNbk2USUnUM92Say7WHXe4f7q8ytsOH2Dw5o-Ig,3332 +torch/include/ATen/ops/any_compositeexplicitautograd_dispatch.h,sha256=-Mfm-PFlLBwYShF2nkvgq3WrBzQpoTnGwGeDDKL8F5Y,1053 +torch/include/ATen/ops/any_compositeexplicitautogradnonfunctional_dispatch.h,sha256=SLNTt6owPxxWO7vdqEhXTG2XoFQZemNjKywixgesqcM,971 +torch/include/ATen/ops/any_compositeimplicitautograd_dispatch.h,sha256=w4q59ZxOZzeeaUeo56MtK3xxkRBreQxI0QSzYDqt0Qk,1017 +torch/include/ATen/ops/any_cpu_dispatch.h,sha256=0wPYeWS0J_m9Z2SvOHBwzi2-WvZlHNVyE_RqhkXO2GU,1506 +torch/include/ATen/ops/any_cuda_dispatch.h,sha256=YTBNllhYHcauscodcajqUciY3ZgkF2XtKuzfZWkiIfg,1508 +torch/include/ATen/ops/any_meta.h,sha256=0ax5s26Ea2TQeJqxnExf_SvIOATkVeyC15jcTRx-qqY,881 +torch/include/ATen/ops/any_meta_dispatch.h,sha256=34-luNWSVXBKVgddZf7vWwUbimKA1TJ-qAGG_7MrY4s,1508 +torch/include/ATen/ops/any_native.h,sha256=0DFqjxZM5t6KnEpquTRx_9I7hPcrJgUDSMs_9S-XuYk,1456 +torch/include/ATen/ops/any_ops.h,sha256=_7rGnuK_p2AO_HPd88hVwzlLvgW89ABVOap-NtlmMis,5817 +torch/include/ATen/ops/arange.h,sha256=mxEMj75vYi-wUshAnyo7_LjW2PklwHioVrCss4ZkrzI,4148 +torch/include/ATen/ops/arange_compositeexplicitautograd_dispatch.h,sha256=dCpSR-uMw9VWbtjyvKRpX5U93H4LrjMg1NOheP1OpHY,1862 +torch/include/ATen/ops/arange_cpu_dispatch.h,sha256=C9Ypq3u7F4qWbk789bHniQ9ymtIaKZcD3VuFsOBZtTg,923 +torch/include/ATen/ops/arange_cuda_dispatch.h,sha256=-qDx-i9dGxN7Hjb4agj2kiiqymNjlQk0Txo82GRrWA8,925 +torch/include/ATen/ops/arange_meta_dispatch.h,sha256=y9edn1kvM-Vqm1UQnotmCTG4u7QKl6O6C-g16zWW0i0,925 +torch/include/ATen/ops/arange_native.h,sha256=fEiBOr_VdQsewlP-_JmIc0PuIDbMQNDYR_6LtVCQ3Cs,1469 +torch/include/ATen/ops/arange_ops.h,sha256=esukPwSzIMAKZnf8euUDqOKxa2rEon1RkwC_k-ogd-o,5247 +torch/include/ATen/ops/arccos.h,sha256=OB8lD5ydXEN2rVFJ1z04BjDa5NNJ7E8MuGamimGfdIc,1146 +torch/include/ATen/ops/arccos_compositeimplicitautograd_dispatch.h,sha256=jw4rphQLHHWCbSigf7ZPoPWNUpOuOgheIke_dAHWvDU,972 +torch/include/ATen/ops/arccos_native.h,sha256=hxOUoKb1tGMk-KvqWd1Epey3zZwq9NClJgIwAjxOvyE,605 +torch/include/ATen/ops/arccos_ops.h,sha256=XzT7b2OmYlKiQzgijFX0O-UzI34yQ4AlkotHAsgO514,2122 +torch/include/ATen/ops/arccosh.h,sha256=HCSV3EjFD2Tt6J6v6WM5yvvLqyOMiVeYESTWoXZ0Ajg,1159 +torch/include/ATen/ops/arccosh_compositeimplicitautograd_dispatch.h,sha256=M69I6MFjaHKVp03uZT2S_pDD6_hQ4jV8BJY8OU_1kBA,976 +torch/include/ATen/ops/arccosh_native.h,sha256=HfskqPY4VFS8-ONk_9VfURlve_0MXhbeEw1may9Cw7g,608 +torch/include/ATen/ops/arccosh_ops.h,sha256=LXGf18YmN3spOeU3-AcJ5NCFJt0uD9uvfKeCELgHtoA,2131 +torch/include/ATen/ops/arcsin.h,sha256=Q7Sbr0JdDHmLgnYPudQ4VdPMND8jKhKe4zzyP_it3XU,1146 +torch/include/ATen/ops/arcsin_compositeimplicitautograd_dispatch.h,sha256=76NDHrGaKw6hf5SOCh_z0Qp9UTlYFqLEkKJ2FIg4TbM,972 +torch/include/ATen/ops/arcsin_native.h,sha256=r5OHvQmeb0F8zNakT4iBdFpvf2jDU2NCKsdQ-KDIK5M,605 +torch/include/ATen/ops/arcsin_ops.h,sha256=Y-yors0QJcbJIGFLa08-PK1QMO24kHpQeidg7EhQldU,2122 +torch/include/ATen/ops/arcsinh.h,sha256=KfM1N6YeenZ6jXvGjU7kZXyi2uVzJZ8XPL5WhJfbL2E,1159 +torch/include/ATen/ops/arcsinh_compositeimplicitautograd_dispatch.h,sha256=-u5rHNy3oW5PM8Chink7cK_FotvnDM7X7fiRO1zEpSY,976 +torch/include/ATen/ops/arcsinh_native.h,sha256=D8A_dzhIC-IoIhSgZbQx2tWDstpUzi8b8i83t048foI,608 +torch/include/ATen/ops/arcsinh_ops.h,sha256=AwAHIDoEgRtbWybqgsbNDR57iEonzht6qtu4ZAKxYZw,2131 +torch/include/ATen/ops/arctan.h,sha256=fE3rOeB6tXYk7aM_6d_zQaET5S8mhYkxmQp8GJkwBv0,1146 +torch/include/ATen/ops/arctan2.h,sha256=IPdqsVIsL6p2i7sAPrmpf_7kLn6dOY-Cinn_uo-S8HE,1155 +torch/include/ATen/ops/arctan2_compositeimplicitautograd_dispatch.h,sha256=Mj6_sZPVIQkHQPq0CLPgTvxVtWiTGUtPKxQI0nZEAIY,1080 +torch/include/ATen/ops/arctan2_native.h,sha256=kIIkHKYosJ5ckvro65s_sq5PxB5F6kq2Qiro7xVaNSc,686 +torch/include/ATen/ops/arctan2_ops.h,sha256=ab0oc92yfahperoSk0YZlqrluPhpDBgFgs9XnkOF0IY,2389 +torch/include/ATen/ops/arctan_compositeimplicitautograd_dispatch.h,sha256=a_ZC-HFEXxXTPp-PtIXhh8Oeb0Sxsw633Prlmc9Ido4,972 +torch/include/ATen/ops/arctan_native.h,sha256=JIJfYkHPhul1cc3Y8n6AFrCItwGsDlQ1dY4TP6MbZtE,605 +torch/include/ATen/ops/arctan_ops.h,sha256=n1F9speHGRI513G5VnKIphSzzJHuGPnM1c3x7MeftYo,2122 +torch/include/ATen/ops/arctanh.h,sha256=SQFKm81CEyO4IcAZGK7MQ4t7qggalXMuA31lbGr5wMs,1159 +torch/include/ATen/ops/arctanh_compositeimplicitautograd_dispatch.h,sha256=NHmbbBPpnIzLN7JRZNM1BjOwMHtjZOeAwTx3x8cy0gk,976 +torch/include/ATen/ops/arctanh_native.h,sha256=CN3DxSqbfmJ_LCDkY7Z_YZlXr6A0Q0HZFMWLFwMoE4c,608 +torch/include/ATen/ops/arctanh_ops.h,sha256=Br9XaLb3raJg1PQAnV5meZ3ftkflbXAsF6ZtDNfOiMs,2131 +torch/include/ATen/ops/argmax.h,sha256=eMR34o7kY7_tix6SZzAhNX1ek-1gx_628Qf2ae2mb8k,1325 +torch/include/ATen/ops/argmax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=IPLcxZnKqFbzFrli8nIYL4ylblLo7y3iHsMMoDvizf0,855 +torch/include/ATen/ops/argmax_cpu_dispatch.h,sha256=U0G4VI9t24oJ55ZBfuJ_t0LlaB5LAKyc4o1RJvqOsJ4,1051 +torch/include/ATen/ops/argmax_cuda_dispatch.h,sha256=ld08DRvrgyxHCH3lPD33JORAOhJgaTSnXiw45CQBcLs,1053 +torch/include/ATen/ops/argmax_meta.h,sha256=hewWyj-dxAtlC77KWrAgTSBDUQlmve2ifvYVQuZghnc,619 +torch/include/ATen/ops/argmax_meta_dispatch.h,sha256=Y0EREve1-4F505lD16_ewfzCAAzEu_mpgB3-Om5SRo0,1053 +torch/include/ATen/ops/argmax_native.h,sha256=tog8hu--ioIQzOLljPDPNVHSWI1FH0aF6gMW2pruN9w,640 +torch/include/ATen/ops/argmax_ops.h,sha256=5Nkqo_U0mNKuUTk8BVQgJ4G78yBDCgSF3oNiiZURVng,1900 +torch/include/ATen/ops/argmin.h,sha256=P5wA3WK6OFjc-ZeGNhS3HEa8-QZDbGgUv6jxveQQops,1325 +torch/include/ATen/ops/argmin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=qxvIc4PO_FVE4Yd3I48iA62LNZZvLhIBgQpxa5-JUQA,855 +torch/include/ATen/ops/argmin_cpu_dispatch.h,sha256=NQpc_zxW6qGKo1Lbw_Zf-vRZgsywVi-s_c9xlIg4FUM,1051 +torch/include/ATen/ops/argmin_cuda_dispatch.h,sha256=r9YrJ2oKF8ZL9ctALaTza5rxRxS_q2qoQ9qylKmiJhs,1053 +torch/include/ATen/ops/argmin_meta.h,sha256=UfBTfkQnC4hXLF3jdApOpU8CEi1ryxG_tjCi0zmqkN8,619 +torch/include/ATen/ops/argmin_meta_dispatch.h,sha256=KvJ8Lt_0OYSN6ofbqpENfaz86wTHNVfGYwzCTmNnzGg,1053 +torch/include/ATen/ops/argmin_native.h,sha256=m4pfaE1DZj-dF-PLkyyS1kFq6ug0sVZu3fDzvhP2Z18,640 +torch/include/ATen/ops/argmin_ops.h,sha256=IksjHGFxXkr_r44R_M9ZbTM70ukXLfQnV1VpZ4kHIuQ,1900 +torch/include/ATen/ops/argsort.h,sha256=dzYnvfmm4AwPg1KXQhEngXAhOesK2G_9QdEYoISC-_o,1901 +torch/include/ATen/ops/argsort_compositeimplicitautograd_dispatch.h,sha256=01AU0zh6Jk4icz3GnKn9aHR7vLU30akb8s93Cq_ZUjQ,1260 +torch/include/ATen/ops/argsort_native.h,sha256=jhARQH5BFjtiidDTajz02WMUp1VNy1g8M42S_ka9RIo,840 +torch/include/ATen/ops/argsort_ops.h,sha256=JwD_A8RQXMovGoIU4omQORAwtbcVULL11m8xmgquQvM,3278 +torch/include/ATen/ops/argwhere.h,sha256=VrnoTpmAr6YVi2ls1iyPhyd2mUslScyATf46h5hPM8w,622 +torch/include/ATen/ops/argwhere_compositeimplicitautograd_dispatch.h,sha256=oEmh9Sazbi83mlQrioLOkXjuaGywuSxfAar2MLfTUkg,766 +torch/include/ATen/ops/argwhere_native.h,sha256=g-PPOmW9_h28xP3-C5cLalvphpDP8t54hVzjDNDBKFM,478 +torch/include/ATen/ops/argwhere_ops.h,sha256=FiKHmo4Q5NhVVCxfYYmT6-AJVCZaelmqOms9kE5Rm8Y,973 +torch/include/ATen/ops/as_strided.h,sha256=r9FjwClvfuMJe1zJc7nZ1qx5YlTxwOYQQHodmDwDbqQ,3875 +torch/include/ATen/ops/as_strided_compositeexplicitautogradnonfunctional_dispatch.h,sha256=HPd3b1oBOl8XHOox4HGpcw5xXFrTrlRA9uHgMwo_06E,1093 +torch/include/ATen/ops/as_strided_copy.h,sha256=9Fb9pZNOLmciNiAQVTyL7ETvIjD3SMPNkTSPSzUGtik,5937 +torch/include/ATen/ops/as_strided_copy_compositeexplicitautograd_dispatch.h,sha256=UxWcI1GGzAQzst5B1Oa2BLy6Tr9NJgcYaqRJNzFmScs,1476 +torch/include/ATen/ops/as_strided_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=phK0t5P3lVOaI12M1rc7eCH1ZwP558sT4qbau2TdZ-c,1085 +torch/include/ATen/ops/as_strided_copy_native.h,sha256=Fxa40pSGkNv8FB5iB3IWkkkQ7U4XxbsX3kNwzsiP5xM,799 +torch/include/ATen/ops/as_strided_copy_ops.h,sha256=zDAM4dTtB5PlgkvqGBb5k6VgIMjXLLhb-9WqJaJ6zQA,2306 +torch/include/ATen/ops/as_strided_cpu_dispatch.h,sha256=1WC_g8Xv100fiG8lWOFWwlcnkNEZDxPKYDipTHQQCsM,1005 +torch/include/ATen/ops/as_strided_cuda_dispatch.h,sha256=XgcapudRoxD8W4b6AuifAaLs4mwU81QLzmemEWSvtgQ,1007 +torch/include/ATen/ops/as_strided_meta_dispatch.h,sha256=I53Fha1vmdOAZR3DaihknkzBpLQC0wDi2unBqsGcmOU,1007 +torch/include/ATen/ops/as_strided_native.h,sha256=GqADkT4XFW5V0Ma6Sd8ktKqa1GVfUB0Tuf2lPXE1Kdc,1148 +torch/include/ATen/ops/as_strided_ops.h,sha256=1Vs0WNVyvnBocs6I_w938rZs6EVyJ1Z5Bfsi2YPWjlA,2227 +torch/include/ATen/ops/as_strided_scatter.h,sha256=hiLA0K1SOU8HGRyH4XH52F4U5OlPcSBJ9iYdPzts4dA,6450 +torch/include/ATen/ops/as_strided_scatter_compositeexplicitautograd_dispatch.h,sha256=Vp7iL-jdE3ePiphziJzrAVSR6Eg2vQ2gWGtgEfYRLb4,1584 +torch/include/ATen/ops/as_strided_scatter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=-rIFcMBQllSXfR-O2MP4Qp4Eyyi5w8UcArcEo6QqtKU,1139 +torch/include/ATen/ops/as_strided_scatter_native.h,sha256=sKfR2Ddepd5hxf6Y6jIWF_jb_qtZhSZoXOxy_E_7kfw,853 +torch/include/ATen/ops/as_strided_scatter_ops.h,sha256=cvH3wLR2ZpxdzMWvZlThTTzMgWp4ZSjWy1AP88ogkDM,2484 +torch/include/ATen/ops/asin.h,sha256=BI7kds7DoFzN6T01UNiyM3CBfaOToVJzkw-Eq7CEVgY,1120 +torch/include/ATen/ops/asin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rQHAdGl_lnlZVEffdFCs2AaXlBmpzAb5wzfcIbds5I8,837 +torch/include/ATen/ops/asin_cpu_dispatch.h,sha256=OuHL9pTRifagYOu7g6iUfFQMJreoJodOWdFbgPVcJbk,920 +torch/include/ATen/ops/asin_cuda_dispatch.h,sha256=qVhBQh2CBSfrBUdf9zFKS-KGDXK26Sl1QIcnh1609z0,922 +torch/include/ATen/ops/asin_meta.h,sha256=aPcqY_UxSVFWO06Qs-W4VKU0fHlgp6SYKCKxcDOaciY,573 +torch/include/ATen/ops/asin_meta_dispatch.h,sha256=TBZ94Bnz43jcXA7HeDKlOcQsRZMPkHMCIFfL3j3dNRo,922 +torch/include/ATen/ops/asin_native.h,sha256=UNQ4jVdM7mmlTSH7K6yq1lTOIc9i-Gp1eZYSdGZZmAc,998 +torch/include/ATen/ops/asin_ops.h,sha256=0Jtnf_pn6Q7BfJ-IwT64m7F17ciYPVL_MS07Kj73HRQ,2104 +torch/include/ATen/ops/asinh.h,sha256=PakQgli-wq4_n9YkzAkuYpRIIkHHp8B7Xor2bXY9gu4,1133 +torch/include/ATen/ops/asinh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XWUBJVXWCOR1SGb3SMCsbBtGfphnS7peYo66N9DqMNQ,839 +torch/include/ATen/ops/asinh_cpu_dispatch.h,sha256=vaPhg0Yu4RhDrwXY9uMyvJW6cXCSsEN-Axudj9Z5Gx0,924 +torch/include/ATen/ops/asinh_cuda_dispatch.h,sha256=HhALrALFbQq2j0EFNZin2Gy0ofneP_RMuHZn2uWTjQ0,926 +torch/include/ATen/ops/asinh_meta.h,sha256=8T2fwi1r1DOvxtuF9q_PIjQGBubMsPHQQ-Iz12qiRsU,574 +torch/include/ATen/ops/asinh_meta_dispatch.h,sha256=ruiNpAsP9VM-FNYJnMUtSNrT6I5Sif_czWTS0GpSMLA,926 +torch/include/ATen/ops/asinh_native.h,sha256=qegUFDPDrx7HV5s9IF2YyswB-XdmhjDE3-nAuGCxKdM,1007 +torch/include/ATen/ops/asinh_ops.h,sha256=1TGQXVsHDV_WAJCq4TsRH79uDgdbIR3dJlLBBcRCAr4,2113 +torch/include/ATen/ops/atan.h,sha256=m2b40AM4-obN_DUrkf8n8LbfDZpHaJin10-lNPvzaio,1120 +torch/include/ATen/ops/atan2.h,sha256=mNqdQ38vHFLz80lF1DUlgxDe79_ybyordkrwQnt4eeQ,1135 +torch/include/ATen/ops/atan2_compositeexplicitautogradnonfunctional_dispatch.h,sha256=YqjnXLlGksLr4zjEjpY71TGCLDd_j8c6wekt-xoRi8g,891 +torch/include/ATen/ops/atan2_cpu_dispatch.h,sha256=zMYn6YJFoYVrRH1gECfH-7d8fQIy0pnPRyom2Fx6CT0,1028 +torch/include/ATen/ops/atan2_cuda_dispatch.h,sha256=xTWeq4ixt7FJW7kqKHfBxGm2B30LWHz3Me9ibvIuuv4,1030 +torch/include/ATen/ops/atan2_meta.h,sha256=pTHj9D2RD3kVoPbtrAyWerzdGW-9XOH9LY3dfyiIbCo,600 +torch/include/ATen/ops/atan2_meta_dispatch.h,sha256=0FeW_H7IsRfktHWg5g928-SR-M3tSnUhxwRBTwDVQRs,1030 +torch/include/ATen/ops/atan2_native.h,sha256=T2inn-cofqEZlA7ON21w5e05KWGiSYY2A-NCs4T-2HU,619 +torch/include/ATen/ops/atan2_ops.h,sha256=oPgNhC4iDShrIVTxeQsF3Ite2xfq7X8uZ5_VEk56hnU,2371 +torch/include/ATen/ops/atan_compositeexplicitautogradnonfunctional_dispatch.h,sha256=gOueKxq4BYkz195quAbAgPb9RC-j40NJoguAcxD7Qv8,837 +torch/include/ATen/ops/atan_cpu_dispatch.h,sha256=RsD0iE3lk2CMWuWAqHSt9ms0l_vLIWDmG3ZacMPf5Qg,920 +torch/include/ATen/ops/atan_cuda_dispatch.h,sha256=-0BMCkdLIhuV69cmGYNL6geP-n2cgn8CBR2IruVHQ5A,922 +torch/include/ATen/ops/atan_meta.h,sha256=atuYkxScStk9mvsW2P2ePNROIZlhav5ZqsOAJp6eW98,573 +torch/include/ATen/ops/atan_meta_dispatch.h,sha256=e3sGwmIE1cTJ2t9XlEKXzh7X_unwMwc44R9bXUn5lNM,922 +torch/include/ATen/ops/atan_native.h,sha256=M6GPqGJLQncjOcaHeLnYU06p0j1ZdtVbw3gAeU88Z7Y,998 +torch/include/ATen/ops/atan_ops.h,sha256=oy_zB32bOoengD4QcXNcksPDGm38xwhduenSGkJqet0,2104 +torch/include/ATen/ops/atanh.h,sha256=Zght7u5br9jr8Jyk56rxDc-2giVEqBkKBLIDuFijjfU,1133 +torch/include/ATen/ops/atanh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Oo-i9SdkZbDJgWn5INR4ery80-el6-uZBItpmm_GU5I,839 +torch/include/ATen/ops/atanh_cpu_dispatch.h,sha256=DkPSF_j83bb_UQOtoNwRdKH8v0Z3a-IxFY2ryYlYS4o,924 +torch/include/ATen/ops/atanh_cuda_dispatch.h,sha256=0pjqsnPEi1m2644blhUV3ATJD-Z5Ok2lrLTzlCodzF4,926 +torch/include/ATen/ops/atanh_meta.h,sha256=g-AEShUjUtDRZDj5QCh9R901FSUCCd5Bn6qKg4sUJHE,574 +torch/include/ATen/ops/atanh_meta_dispatch.h,sha256=ie0GfrrAcdXNSxSKl9isvfgjqa9ezsWqoaA4wZkp670,926 +torch/include/ATen/ops/atanh_native.h,sha256=M1WdCLrM8pyuxgmbdJ8W5K4MtIeyS3EZuT3AYW2e7SE,1007 +torch/include/ATen/ops/atanh_ops.h,sha256=_7X3D0VSyw_jU08ktUwx5SfzNqN6zyNC0zYEr41X8z4,2113 +torch/include/ATen/ops/atleast_1d.h,sha256=Dh6I7LIIeX6L-Rl9jgUcXD6SrXSCgQGlQYfg4kyhHQE,819 +torch/include/ATen/ops/atleast_1d_compositeimplicitautograd_dispatch.h,sha256=sabjmZ2Q4zW5Ki65GKr0Q5Jx9i5cug-_a4u6EFccaZo,840 +torch/include/ATen/ops/atleast_1d_native.h,sha256=1Ezi_cLA0OhRsyKxfu8QU7yowH7GSBul1iD3oVDs7nc,552 +torch/include/ATen/ops/atleast_1d_ops.h,sha256=58ALb0rasaRybi3xUXE3mIJgG7UBbT3JbUe9nu3ntbc,1596 +torch/include/ATen/ops/atleast_2d.h,sha256=63kk2L5DOWMKkodFT8b0FGsQLqtx5BexrYDhGRdW_sU,819 +torch/include/ATen/ops/atleast_2d_compositeimplicitautograd_dispatch.h,sha256=Es7bwCTh6K1Hrc1ObtFm19xqK0SEAkdGyBhAeZgdWk8,840 +torch/include/ATen/ops/atleast_2d_native.h,sha256=RsWErgz09xCnUFOOhm_Cx5Pxoq1ddYoWhe1uO5MRKIo,552 +torch/include/ATen/ops/atleast_2d_ops.h,sha256=yv6WRKaUiFmV2Yk2JFgTrnyFqDS1LQcs-5xehaGKYuA,1596 +torch/include/ATen/ops/atleast_3d.h,sha256=yGtmUVRIcV3HoyRQ2uPCWRjL0h8hKmI4VT-B-tlE_04,819 +torch/include/ATen/ops/atleast_3d_compositeimplicitautograd_dispatch.h,sha256=SRXx--ZpMZn0FFTDEadUZ8qWG492up2Wf4YY4duHnYg,840 +torch/include/ATen/ops/atleast_3d_native.h,sha256=S8B52EfVSjXz-2Mrv2Nvkf4JA3v0iMDmEYB4n_b49bQ,552 +torch/include/ATen/ops/atleast_3d_ops.h,sha256=9XG5HyLn-0rsZMA2fb7l8S4koTXjcK5rZSDApH_vjDQ,1596 +torch/include/ATen/ops/avg_pool1d.h,sha256=ZJqUj5bMx1I3regKAL9LtLQNXj_wBIXvoDpTh-OibOg,931 +torch/include/ATen/ops/avg_pool1d_compositeimplicitautograd_dispatch.h,sha256=NGtYaWPA-VLongy8-xut4L3di2ee0PApSxIjMyZp2wM,902 +torch/include/ATen/ops/avg_pool1d_native.h,sha256=g23MfhYJeKg2KO9b2f2bNqRlQAgoUXM_LG9bX-_AysY,614 +torch/include/ATen/ops/avg_pool1d_ops.h,sha256=m3VLF_jB5PbRHzrnwPJba_z7eMc855AWB15oEYkhXnM,1385 +torch/include/ATen/ops/avg_pool2d.h,sha256=25kUyQDyqkKpF-xbyyoZiVd8NKWdJzDZ2qjJ1hXK83o,2228 +torch/include/ATen/ops/avg_pool2d_backward.h,sha256=kYIkTgcXiTNhhd1LmY1rMsHlEDktZj4yShKidMvvYUk,2458 +torch/include/ATen/ops/avg_pool2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=-Z2q9NgVi6AGN-6ykCFgRRd9R_yW15VKrJrJNVzgRoQ,996 +torch/include/ATen/ops/avg_pool2d_backward_cpu_dispatch.h,sha256=srMr4tpU8ar8bunkx_JaP-V6BoAZ0X_6VXxGLA_c58s,1509 +torch/include/ATen/ops/avg_pool2d_backward_cuda_dispatch.h,sha256=53jzO9nw0TPsiBxaESCn-jUccPGcaQpjioBYR0pbNsA,1511 +torch/include/ATen/ops/avg_pool2d_backward_meta.h,sha256=BOYcagpAoJa3vGOFEdiLJAQKKGJgEojjJ5MdRk_Lfds,781 +torch/include/ATen/ops/avg_pool2d_backward_meta_dispatch.h,sha256=gvz_7H-6aEa_D9wcfbDX_5OgaFi_rOqd83KwUeJLUeU,1511 +torch/include/ATen/ops/avg_pool2d_backward_native.h,sha256=MdqeKZaC7ne_zu3LsUiDOLuWfQYnKlCRc_CveXGxOK0,1776 +torch/include/ATen/ops/avg_pool2d_backward_ops.h,sha256=NjnduYNt_joZk-MR7AScYiLQKvolk4Nuq_wDFii2iMo,2968 +torch/include/ATen/ops/avg_pool2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2BPsKlvsfeuTQcSj9bjCEiete-Vdwgz2Aw7LG7QvV90,986 +torch/include/ATen/ops/avg_pool2d_cpu_dispatch.h,sha256=473279q5SyVN7Ee5b8LyMDpCeSFhFm4c_LWzbPB0bHY,1434 +torch/include/ATen/ops/avg_pool2d_cuda_dispatch.h,sha256=7oPqxayY1wmj1aZV4ZJVKLjDs0VqaMPFM2hLzm7Beoc,1436 +torch/include/ATen/ops/avg_pool2d_meta.h,sha256=HVyHXsLnak6IHP_-NSCfTmixW75JDld8F3oXWUgOd0Q,3562 +torch/include/ATen/ops/avg_pool2d_meta_dispatch.h,sha256=A6WxkH5qwNf3pKQ3xAj4ByFFLyj1gfw0DCWWre6UNPg,1436 +torch/include/ATen/ops/avg_pool2d_native.h,sha256=ZvUxafeqmxny3uyekY8-yjzyL_5mvmmjeoAEThc9blA,1855 +torch/include/ATen/ops/avg_pool2d_ops.h,sha256=DMCpD7Dwy4uA6RQbjGAMm5nD7mKOHzu0JZW_1iUR_4M,2706 +torch/include/ATen/ops/avg_pool3d.h,sha256=zk_YmH0XG8wsQDT9rPEATwxA6Y_ZtVHA9-0rs2tNhgs,2228 +torch/include/ATen/ops/avg_pool3d_backward.h,sha256=CGmTmgFUIo4hV2WAWEPUtODC6IwF9VjfzScz63DB5jY,2458 +torch/include/ATen/ops/avg_pool3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9821DfL0iyGKYszwkcd5DC97aeLtFKokjoKCNj86fM8,996 +torch/include/ATen/ops/avg_pool3d_backward_cpu_dispatch.h,sha256=AJG0cXUkJWlA9QA0fRP63Z6L4OQABbNjPukS-BXb0Y8,1509 +torch/include/ATen/ops/avg_pool3d_backward_cuda_dispatch.h,sha256=GVMcK9Z1OeM5MeLzLneaFMo8aVMzdcNZGPyEgfCsqEk,1511 +torch/include/ATen/ops/avg_pool3d_backward_meta.h,sha256=HYqW1Cr8nnU04jr2qGwkh7HdxO-WLNh7cJLGqpulOq4,781 +torch/include/ATen/ops/avg_pool3d_backward_meta_dispatch.h,sha256=7j1SkEMKz16zCXui-aYywIj8IjTLbh1xp4snTdZeqdc,1511 +torch/include/ATen/ops/avg_pool3d_backward_native.h,sha256=eqPmZBPlIQ-lSGMtrlNRXrj71GzThb3-QI22Aw4Fpfo,1776 +torch/include/ATen/ops/avg_pool3d_backward_ops.h,sha256=kUC3Jh1wKbmVB44YzooQVu4YEbORXa9BytjeHYQA0O4,2968 +torch/include/ATen/ops/avg_pool3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=aIJ27Nzi1zmgSNSf4Ocba8tUhdLazS-mF38o5qvCnPo,986 +torch/include/ATen/ops/avg_pool3d_cpu_dispatch.h,sha256=HYyEhPyWgkx107ibAYlP6-a3ymQYdK4jxrpeujL9YTU,1434 +torch/include/ATen/ops/avg_pool3d_cuda_dispatch.h,sha256=ETMQHjmsJK8AUOfEbaGIxciVeAnCOCnGtX3QTuPJQqw,1436 +torch/include/ATen/ops/avg_pool3d_meta.h,sha256=qXWgnJgboSGmpkuBbXU_55X2SpDy0oNmmHT-Ks5kmKA,740 +torch/include/ATen/ops/avg_pool3d_meta_dispatch.h,sha256=ikgqq3uW618qZsWqV-GNh6SGUfBRiT52ETDbOn6T5lE,1436 +torch/include/ATen/ops/avg_pool3d_native.h,sha256=mKQFsMilpG-RpMriwhfJk9zoeEisbk7zR8nhVael2rE,1859 +torch/include/ATen/ops/avg_pool3d_ops.h,sha256=XpASooI-d8t5ho2vFpxQJLyENP_08auKFo_yCOLidvw,2706 +torch/include/ATen/ops/baddbmm.h,sha256=1xS-yvUpas7YTyns1AhH3wo3wPiOimr3gv4gvNF0KOk,1610 +torch/include/ATen/ops/baddbmm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=8Ofyo5U9fpQA2tb4KRJai0j1Sk0D6ilq5kJ0WnWFptI,1061 +torch/include/ATen/ops/baddbmm_cpu_dispatch.h,sha256=9bF7yR6FY-nWl9qxwu6Az1oPbYpqql3ULwyMmpoSsDM,1364 +torch/include/ATen/ops/baddbmm_cuda_dispatch.h,sha256=JOgoWdK9GwbRal6wS-omqqnR9Z05CihOLekwAnC5wi4,1366 +torch/include/ATen/ops/baddbmm_meta.h,sha256=AsbZjBOawkbFAd0jfjLJsCsv4Qn9JusxT0zD5Wy-mc0,681 +torch/include/ATen/ops/baddbmm_meta_dispatch.h,sha256=ssHIzyYP0Iywmv5IUACn6bnK5wNfBZUIZuYBCquGTLU,1366 +torch/include/ATen/ops/baddbmm_native.h,sha256=Q_a1is0kUMdbvUSNshlpiu5UugKEBUTxGu7iyhr86P8,1161 +torch/include/ATen/ops/baddbmm_ops.h,sha256=p5wuL6dyjIRTd7JhOX4J9PDLyj4oV0PodhUo4Vr71tM,3190 +torch/include/ATen/ops/bartlett_window.h,sha256=3fq-o9VQc2ve_9VZV5MQWhtBdtLGcuK1NGRfoWk12eM,3394 +torch/include/ATen/ops/bartlett_window_compositeexplicitautograd_dispatch.h,sha256=49zFEVy7zDDg7G1qI59iXdHn00stIdE_TA6hZmaIpGM,1706 +torch/include/ATen/ops/bartlett_window_native.h,sha256=5xvas8BKnVMOEzp8OZA-QOOUHQYUSQweF1dcshAZQ4A,1067 +torch/include/ATen/ops/bartlett_window_ops.h,sha256=1tLmQ1pmDGXTN3OJIMM6JNRokDBP8tKB-G7xNf9lmbI,3965 +torch/include/ATen/ops/batch_norm.h,sha256=RI3_tmSe_uDUtY8VGNL4-a1soh6dnz37WgZVDZ51998,1098 +torch/include/ATen/ops/batch_norm_backward.h,sha256=rdQ0HqF8EpDxkbz8hbR1V4JOghs3knhOKstLgB320WQ,1332 +torch/include/ATen/ops/batch_norm_backward_cpu_dispatch.h,sha256=WsmxSSG8yLLZni5A3nbVBLJ3o6Csk2OLZdKVg4K7v_s,1105 +torch/include/ATen/ops/batch_norm_backward_cuda_dispatch.h,sha256=7ez8hB7YTE0MfV3r2ggBzjnThYoHmaqhPgFXqDe2pOM,1107 +torch/include/ATen/ops/batch_norm_backward_elemt.h,sha256=XX9UCKSTVL_EDsfkJCte29Yt1QbuOuGonOeweV9Hq20,2334 +torch/include/ATen/ops/batch_norm_backward_elemt_compositeexplicitautograd_dispatch.h,sha256=L_xRh45pKLZhA-GjC6SgQQDCaVWUsx_vzCw1NPi4Gag,1325 +torch/include/ATen/ops/batch_norm_backward_elemt_cuda_dispatch.h,sha256=DPT9v9x0rCpESlKzBisCIncQfoT-Eq6SV3FNfj9vPc4,951 +torch/include/ATen/ops/batch_norm_backward_elemt_native.h,sha256=iLrfug9Vbqmg9-bnIK18O_oY-03b9-Kp79PV_pGesIw,1017 +torch/include/ATen/ops/batch_norm_backward_elemt_ops.h,sha256=lwwKU_nUB5qleX6F6xVUJ7qtVofeMqIbWUjhYyGLM74,3078 +torch/include/ATen/ops/batch_norm_backward_native.h,sha256=gqkn3vInRy-GHDpzqfUoBjnupQjiZvvnN2ullkJcffc,1770 +torch/include/ATen/ops/batch_norm_backward_ops.h,sha256=yyMeyLyICFyiNUlPN8zxQEHxo4cjQU0VBwY_bgWsPlQ,2222 +torch/include/ATen/ops/batch_norm_backward_reduce.h,sha256=Aydk6XMVrg7TxiE_Uqa0tMCZ2VYA5eVeDH7KR576x6Q,2713 +torch/include/ATen/ops/batch_norm_backward_reduce_compositeexplicitautograd_dispatch.h,sha256=_bW_4vY1LhlP5rSiWDoYSjGpE10w2qVg3HY9bAxy77o,1465 +torch/include/ATen/ops/batch_norm_backward_reduce_cuda_dispatch.h,sha256=JhyX2W3swXOYPA53gZQs1k8Yx9evOvzrinP3VBGTQTw,957 +torch/include/ATen/ops/batch_norm_backward_reduce_native.h,sha256=EGfZxuCf2bc8uNutYkVF3DJq1SUqk5-IzcembyM4Mxo,1093 +torch/include/ATen/ops/batch_norm_backward_reduce_ops.h,sha256=3mWAQ5TryCoZ6WRhb5GUuxRqhTk3LVYPqJjztMcsMbk,3394 +torch/include/ATen/ops/batch_norm_compositeimplicitautograd_dispatch.h,sha256=kRgAkD30i6r6aAG-EJFKcUQzMPEwa0S9zlVFS0OtOqM,1018 +torch/include/ATen/ops/batch_norm_elemt.h,sha256=Kzrun-LrxQ5heg-VNd4FA2ceQdY7c-tV6aS1jY6i3yU,1869 +torch/include/ATen/ops/batch_norm_elemt_cuda_dispatch.h,sha256=xWfe9fmG92oZkP5YzDbNXTUIid3gjKRHQx23QrE6O5Q,1362 +torch/include/ATen/ops/batch_norm_elemt_native.h,sha256=TrBC_4Mc0wgaNSuLeyWKQ3RNAcA9mmDjIY9e9sfAXoQ,886 +torch/include/ATen/ops/batch_norm_elemt_ops.h,sha256=v87sXQ7pA18jZLBKbCPmET8SpYXUkASmXdUbCUoA8Gw,2638 +torch/include/ATen/ops/batch_norm_gather_stats.h,sha256=bgFDBaaCrBM5woSVkoM5NSTeidVnwGoMT9PFz3T2uGk,2491 +torch/include/ATen/ops/batch_norm_gather_stats_compositeexplicitautograd_dispatch.h,sha256=OXPEiLwwUgvd1clCY7uAR-YFEWfkPrkg7WKHLhK3vL8,1387 +torch/include/ATen/ops/batch_norm_gather_stats_cuda_dispatch.h,sha256=6N5vQeao7VpqFngSDUxWCew2xWmObLi3gyQLeMWf3Qs,960 +torch/include/ATen/ops/batch_norm_gather_stats_native.h,sha256=8_snx08X64Gy9RbpvGUbbN7kJdEDJb1Pdfz-MzEcBDs,1057 +torch/include/ATen/ops/batch_norm_gather_stats_ops.h,sha256=LBb7QX193pmL_Y4URuQC2NzdefKpN9SrSVKl9Hrxnos,3226 +torch/include/ATen/ops/batch_norm_gather_stats_with_counts.h,sha256=UsRl5puFZsMnUhuIJt-uc2dh1eiOc7T_08C99gBIW6U,2662 +torch/include/ATen/ops/batch_norm_gather_stats_with_counts_compositeexplicitautograd_dispatch.h,sha256=Fs464tXO4xaBOkDWGxGzl5w_6v1yZ2iyD8EONzqXmcE,1435 +torch/include/ATen/ops/batch_norm_gather_stats_with_counts_cuda_dispatch.h,sha256=XIMliA_MdBm-HNjybKdlbbCc44T6N7JpyQHD9rlLRb4,984 +torch/include/ATen/ops/batch_norm_gather_stats_with_counts_native.h,sha256=AQCfyTOSZISnXr5SnqEVOyeYAJ9BOMg2tMQMKDCJLdk,1105 +torch/include/ATen/ops/batch_norm_gather_stats_with_counts_ops.h,sha256=PFxV8rYuDqOAsykdsnb0qIg0xRuiF0q_sBhCA0yck_M,3376 +torch/include/ATen/ops/batch_norm_native.h,sha256=H49sdjOBVi3HtJ-lKpARlp8fTLmrsWgTOssg30-Ao_Y,730 +torch/include/ATen/ops/batch_norm_ops.h,sha256=TkcDBzFnl_4WEU3oQnZypizBpmdKsMpr-OpxpdtWvns,1791 +torch/include/ATen/ops/batch_norm_stats.h,sha256=e0OYMTNGkk61ntTdKRTglMvq1ugL-r5Qf0s3tKXlqlc,1404 +torch/include/ATen/ops/batch_norm_stats_compositeexplicitautograd_dispatch.h,sha256=ZAPOV0qBmd-GZ87oVVTWdPecx6jR9YeA_z1gDsbwtTo,1007 +torch/include/ATen/ops/batch_norm_stats_cuda_dispatch.h,sha256=HOpegR8AfirwAstSrTCTvVDqyNrGWWNDY54bLXZOhYQ,770 +torch/include/ATen/ops/batch_norm_stats_native.h,sha256=HUmFlXHbYLZA7fmZrjOA0crz9OFbp3oI2MkgfA1tVVU,677 +torch/include/ATen/ops/batch_norm_stats_ops.h,sha256=freO-jdCYv4k0MRr3Z1QDyUDFQ-Y4_68lg-RG1JyvJQ,1994 +torch/include/ATen/ops/batch_norm_update_stats.h,sha256=KFdOIlnV24f-o8Nh2Cotm2cqftcbHXSrhxYWWY4Bems,2026 +torch/include/ATen/ops/batch_norm_update_stats_compositeexplicitautograd_dispatch.h,sha256=TJVOKHnbsn1r1p2Y1TeXd0wNxJYYvVxuI7nr3bSG9Vg,1229 +torch/include/ATen/ops/batch_norm_update_stats_cpu_dispatch.h,sha256=IxSC7ZcJMUz-j6Y1kWigZGFvjGVLsJ9rDy-P2q4HI0A,879 +torch/include/ATen/ops/batch_norm_update_stats_cuda_dispatch.h,sha256=YBfHVDxsD8q6qubi7KW7ho_CfELFydIsdOePN9eyW08,881 +torch/include/ATen/ops/batch_norm_update_stats_native.h,sha256=IPj-BWV7tBezarviES1v9SqyTn_BPHQyAXnpIbsPSZc,1116 +torch/include/ATen/ops/batch_norm_update_stats_ops.h,sha256=aoUD0aL2E2ucYNgg1am4rtiHq3qR5lUXcqzzPSqOzc0,2696 +torch/include/ATen/ops/bernoulli.h,sha256=8Ix8riWhak-Teos3NBeDTadEu71fIRnUoK2UMEUGH4E,3210 +torch/include/ATen/ops/bernoulli_compositeexplicitautograd_dispatch.h,sha256=j0NvIHxYeV4KjWOQMbvLc5a7z0p6VU4OL_JPEtnKVdM,1552 +torch/include/ATen/ops/bernoulli_compositeexplicitautogradnonfunctional_dispatch.h,sha256=VXTfBIDDWfhGDAnlPib8v-mSfUSsYuMZGV8ugOXwi9I,860 +torch/include/ATen/ops/bernoulli_cpu_dispatch.h,sha256=GjPMEcU9PNF6ouR0HdWqiDOSWniObN4VWSjIhTrVQ0o,1186 +torch/include/ATen/ops/bernoulli_cuda_dispatch.h,sha256=Qly2J3dg1T2OE1DzSrzJGhEMKv5scwVL_vzh-6mM-tY,1188 +torch/include/ATen/ops/bernoulli_meta_dispatch.h,sha256=xH3DQ9eyiBbUmuBJOOnskDhi8vl-aUqLqqsS3CIuN18,926 +torch/include/ATen/ops/bernoulli_native.h,sha256=PnjfJmH1f06tYf1yUob8XqCKHD5oWRftcBV2YO4oG74,1468 +torch/include/ATen/ops/bernoulli_ops.h,sha256=g_ugB95-IeUQELeZQeYGL2SCR9RFxGxgM5lUWabXqww,6639 +torch/include/ATen/ops/bilinear.h,sha256=3bOlJ1AjLmdcZWmB3cwEony7I8qNaxTboUcIBZSpDCM,798 +torch/include/ATen/ops/bilinear_compositeimplicitautograd_dispatch.h,sha256=HxUNiGaH-14s8_XWwZTnYKgfWbO_YVvJ9cQQuB4UM50,867 +torch/include/ATen/ops/bilinear_native.h,sha256=pEqHwcxrTbrEl_m4_TpfqPlnWg-Q6DGrOSgaZ5Gg34k,579 +torch/include/ATen/ops/bilinear_ops.h,sha256=OZucn9NXyEcfNxuUnCuG41de8O2no1Qf0o1uyz9eltQ,1297 +torch/include/ATen/ops/binary_cross_entropy.h,sha256=pombuia86runVfcXFNXu2bhRV8DLATy3X_EtsfMCo6o,1709 +torch/include/ATen/ops/binary_cross_entropy_backward.h,sha256=e1WXoyJlCjLeBZ_EYUdGki2YWWBP6kGQYsNYFqZl4WQ,2064 +torch/include/ATen/ops/binary_cross_entropy_backward_cpu_dispatch.h,sha256=uVfJnBuINHvmcXds_-zxZbVAjKO-CO-OuwK5vquDDfc,1372 +torch/include/ATen/ops/binary_cross_entropy_backward_cuda_dispatch.h,sha256=Sl_UHuyC0NLXdOph14CyotpfmcEgIfc9oFQDwxwA8a8,1374 +torch/include/ATen/ops/binary_cross_entropy_backward_native.h,sha256=DJnFZaJSVjcNXcHdYMMRWvf850arAfGcNb4MO6SuZY0,1344 +torch/include/ATen/ops/binary_cross_entropy_backward_ops.h,sha256=S8lZ7FQYzrCNmrorhh7ErpdieFpsUlBV0mV9H9JScWY,2582 +torch/include/ATen/ops/binary_cross_entropy_cpu_dispatch.h,sha256=B9U_i7dGjF8l4QpK-wUQyV_1Vbx7qOKZhfk5Pi-a6o8,1235 +torch/include/ATen/ops/binary_cross_entropy_cuda_dispatch.h,sha256=rc-f3TUSgE7qm7ZWzRREQemX47jrpg5gs3FfS8LlNFM,1237 +torch/include/ATen/ops/binary_cross_entropy_native.h,sha256=ZUTF_aCZixJdOtRx0JN-cpQ1wGJFHHxb9aDkVZzZjOs,1166 +torch/include/ATen/ops/binary_cross_entropy_ops.h,sha256=4y0E90xnhgL91L-Q963D3vCPGaHfZeGxMyOeEuGzajs,2278 +torch/include/ATen/ops/binary_cross_entropy_with_logits.h,sha256=QmO0vNgXUQtM8h6YCRmSCARg0qJVFZ9g0MS9QCBdB_o,2090 +torch/include/ATen/ops/binary_cross_entropy_with_logits_compositeexplicitautograd_dispatch.h,sha256=Vd_g6jTY4rye-gFq_e8T7V7zRuTcVVnIkCWuIsQbvK8,1465 +torch/include/ATen/ops/binary_cross_entropy_with_logits_native.h,sha256=ZVhhi4JAo7K8eurSHotzld3xTaZHSfnLVgUrlxczEXs,908 +torch/include/ATen/ops/binary_cross_entropy_with_logits_ops.h,sha256=0F-2dr0is5Y0VmUpkH0VIpVZR0gIX6HsLpUGJRdkVbw,2666 +torch/include/ATen/ops/bincount.h,sha256=RT2RX15-cGHWzAds0JlFNNUa6hadx7pyVd_A48u3HRY,1403 +torch/include/ATen/ops/bincount_compositeexplicitautograd_dispatch.h,sha256=sRXI4NoAVpCSbRpSMvChhdDwt1SwZvIMPCHAreQV-wI,1004 +torch/include/ATen/ops/bincount_cpu_dispatch.h,sha256=Z-NpYZzxqGtI7bANxf-pbD5W5_gmTE9ntjZCnRV1ek0,791 +torch/include/ATen/ops/bincount_cuda_dispatch.h,sha256=tCF-foKIdajo_rH3Ip-b07xgKeZe6RYQrJTP5IB72BY,793 +torch/include/ATen/ops/bincount_native.h,sha256=FPhGQlS7h_dS_XCn2Go1uf6PUIaArRsYOAULLPo8wPY,827 +torch/include/ATen/ops/bincount_ops.h,sha256=6x-7vjBRGncdS8OV-izVVCRHaLNSfdrXPM9XA1lG9zw,2028 +torch/include/ATen/ops/binomial.h,sha256=Cm_gPX-Puwbeqbb5xu4dGqrCYqDCnKdQTQVJ0QvsLOc,1435 +torch/include/ATen/ops/binomial_compositeexplicitautograd_dispatch.h,sha256=5yLUwmjkLjrord49nWVRCQJf6HpPWQm5t5xZI1TBX_o,1022 +torch/include/ATen/ops/binomial_cpu_dispatch.h,sha256=7PmfKrnPNNjNH71M0dNn16L_rjjH-vigjRwLUEr-LCY,805 +torch/include/ATen/ops/binomial_cuda_dispatch.h,sha256=6ZLvp5CuFYedrJBWhkyh1IS0dTQC839u-h5Y9SINDMY,807 +torch/include/ATen/ops/binomial_native.h,sha256=9W5GhvOJNHsCKTvcZ-lo40MZZYyZmaxm0awIa5JlEiY,863 +torch/include/ATen/ops/binomial_ops.h,sha256=yQiQt1qdWC9VIaVkFWlHm2AS95G4abEVpyQ0dixjug0,2060 +torch/include/ATen/ops/bitwise_and.h,sha256=DpjX3mZl1TWJoXqGxFORdot0rgLutC_nbQ6Kmrov6cE,2785 +torch/include/ATen/ops/bitwise_and_compositeexplicitautograd_dispatch.h,sha256=8ZaI1H4eg5D_cXL24Bvi7jGQsDcbD5HbSUGog8qBGGU,1400 +torch/include/ATen/ops/bitwise_and_compositeexplicitautogradnonfunctional_dispatch.h,sha256=MJzsMWYVZHkb_5VyR6QjOujDYNDjFMvxA5bGR1eFi1I,903 +torch/include/ATen/ops/bitwise_and_cpu_dispatch.h,sha256=EKMSRJXrsv5kBpkeX3vZpLL7nOd_PhpHorM0coVr9QU,1052 +torch/include/ATen/ops/bitwise_and_cuda_dispatch.h,sha256=kj_kV0ws2q8v7fkOcnn5rNuai5j45Zxc0yiA634Ed4s,1054 +torch/include/ATen/ops/bitwise_and_meta.h,sha256=lD4ajreuE5GiIcAmb4kr6UEUAXRedtR6qqrFaKKp1TE,613 +torch/include/ATen/ops/bitwise_and_meta_dispatch.h,sha256=Wao3v2z8ZnVqJdDL2fM5AioaxK2PQz-U8U3gxwC-6OA,1054 +torch/include/ATen/ops/bitwise_and_native.h,sha256=pDIRg2pdk4-UTDI_Q4_uR6sDb6A5rsfnkE68kWpVoec,1128 +torch/include/ATen/ops/bitwise_and_ops.h,sha256=BmmlioF68yYciK1hFJGuMqVccI77tcO2lOLtuEWFlyk,5979 +torch/include/ATen/ops/bitwise_left_shift.h,sha256=PLThhCRuG8BOshyiQhRJ4Ym9q2_1I2osELnMzIic9zw,3023 +torch/include/ATen/ops/bitwise_left_shift_compositeexplicitautograd_dispatch.h,sha256=AdNDWeu-6klGNM-OwdXdXixTJpsmOEpx-KERmCnp-64,1449 +torch/include/ATen/ops/bitwise_left_shift_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xznL0Pf7UHLXibz6MhFFPfZTgxTqrueSt7SRfKgcAjE,917 +torch/include/ATen/ops/bitwise_left_shift_cpu_dispatch.h,sha256=Td4Rxgw-afqnp_BWY9OaUuaUFJ27Z-kSI-zTPvyHYNo,1080 +torch/include/ATen/ops/bitwise_left_shift_cuda_dispatch.h,sha256=-h5zYJVAPvIZN37TVh_ldkAjsqoQpKYEvjOLLLAjYj8,1082 +torch/include/ATen/ops/bitwise_left_shift_meta.h,sha256=TeV5-qpMXxRgPu_lMRkFuZqNXFuze7qOYZ_W1COMaX8,620 +torch/include/ATen/ops/bitwise_left_shift_meta_dispatch.h,sha256=eb5xFg84HRrRHn1ubU7lzu2AtnmSndxOlo3lFpOeRTw,1082 +torch/include/ATen/ops/bitwise_left_shift_native.h,sha256=omh-vXxtxkafCdkaRCPUCBDwT0eNyQLoNw1oBaTuckQ,1184 +torch/include/ATen/ops/bitwise_left_shift_ops.h,sha256=hkzyTGjLVduBjtavm6_17ZMRoq71nMcEtME7swaCIvw,6210 +torch/include/ATen/ops/bitwise_not.h,sha256=U1YioNZgydUvJtk0auFBDHnT6bnviDtAN1-eN5ZJPq4,1054 +torch/include/ATen/ops/bitwise_not_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dfVbUdqzflc6_VEoePRAtcu8wYq47L8kN_ceko_WtsI,851 +torch/include/ATen/ops/bitwise_not_cpu_dispatch.h,sha256=X1haEgPEEWAhmDS8eI1eFu26hgsW81IEdCd88L9DVKU,948 +torch/include/ATen/ops/bitwise_not_cuda_dispatch.h,sha256=2OSMuXUSuYRNZrIMTmCcISa-jw6xGvwRo9C7IC0652w,950 +torch/include/ATen/ops/bitwise_not_meta.h,sha256=_2qzIVw3SrZeRW-1IL3UWm9a1b9pfThe5D7g6wKWvIo,580 +torch/include/ATen/ops/bitwise_not_meta_dispatch.h,sha256=zmQxz6rREY57rwObV5jcWvwixD3sVMKyW3VFUDSaN8Y,950 +torch/include/ATen/ops/bitwise_not_native.h,sha256=9cxhR79Hz3arWRD2dmYg5yEC3RJmkugvKmZEMzNeUeA,611 +torch/include/ATen/ops/bitwise_not_ops.h,sha256=ytleaNcIwLgLWLGrRSLMIEgW-CDzZ-I31gct5MGdtp8,2167 +torch/include/ATen/ops/bitwise_or.h,sha256=yJo6ETHGli6_xuQ062zbxC6sETHk20YlF8TbY1Dw-Yw,2757 +torch/include/ATen/ops/bitwise_or_compositeexplicitautograd_dispatch.h,sha256=JKEgJ0whAcAMbO8NtAt46GUbI-UyQiXVAqHDOIFyxz8,1393 +torch/include/ATen/ops/bitwise_or_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Z1JKmNLgp7K5WmPpXI-oJKMrFrTGHLIogCIZH8m5Al4,901 +torch/include/ATen/ops/bitwise_or_cpu_dispatch.h,sha256=aNXdW5P3YMnd-YnGKwlcz6_jzyRxfVdno8JHOF0B6mk,1048 +torch/include/ATen/ops/bitwise_or_cuda_dispatch.h,sha256=QqkgltCyXXVfabHm7AUK7I6MCvynqL0jsgQ-ruX-nck,1050 +torch/include/ATen/ops/bitwise_or_meta.h,sha256=PTXIcv5V-u65VFMZ2t107ECyPrD_GntxDtRZuABylX0,612 +torch/include/ATen/ops/bitwise_or_meta_dispatch.h,sha256=n4UN3njmnUIPZrAvOcob-GKThUtUpouN20gaPZvMunQ,1050 +torch/include/ATen/ops/bitwise_or_native.h,sha256=RyCVn3f2Au_wN-z5B2JPiJS5kYLO61qzphRCvMwoI2s,1120 +torch/include/ATen/ops/bitwise_or_ops.h,sha256=6HUyKGYr602tD2WkTZI0OKJ03nmjUBoGb_d30PIkp2E,5955 +torch/include/ATen/ops/bitwise_right_shift.h,sha256=pTBdTTwqJrhMzj92L_kgUnMIPJ2xpe1kyRfXL_d-NoQ,3051 +torch/include/ATen/ops/bitwise_right_shift_compositeexplicitautograd_dispatch.h,sha256=Oeyu_Q4mUOeLpeJ6hLvmvW021M-XSTNwbOXTAhQrsgQ,1456 +torch/include/ATen/ops/bitwise_right_shift_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ZLB1Q67deHOZ5bUJYfyETrdQdj21ElCllB4XTdTXkaQ,919 +torch/include/ATen/ops/bitwise_right_shift_cpu_dispatch.h,sha256=98hmqzwBwCeybollq59L1pkfYnPsYJLDEsmVD9ZNPN8,1084 +torch/include/ATen/ops/bitwise_right_shift_cuda_dispatch.h,sha256=3OCzn7HkXoXiXzX8CjCC10oOf4yYqeR74vVFSOHrTJ8,1086 +torch/include/ATen/ops/bitwise_right_shift_meta.h,sha256=afymOVNf1p0FDE9F4sedpOgr5h4JfTMzzLrj9bhQUhY,621 +torch/include/ATen/ops/bitwise_right_shift_meta_dispatch.h,sha256=SMycmub9tn68MQc92iVR0w1BGeldVvAj1wh9V2IuzLY,1086 +torch/include/ATen/ops/bitwise_right_shift_native.h,sha256=tvhmyE4JgqaYlG7fCun64TqHcdafA49VC70arNWC5SU,1192 +torch/include/ATen/ops/bitwise_right_shift_ops.h,sha256=edmQcHytzGX40reKv5SCX4DDmZzKsAHrprPFXqCbQVg,6234 +torch/include/ATen/ops/bitwise_xor.h,sha256=wg0h19_5OhL1IBAUxPhVpOWZ9okTaTP2rzLUBUMRKoA,2785 +torch/include/ATen/ops/bitwise_xor_compositeexplicitautograd_dispatch.h,sha256=cXHAagz2ZgByCJMCtRiQDqol_MQNDNTnzRifpF6ijfk,1400 +torch/include/ATen/ops/bitwise_xor_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rMofNG8-fc5ojqPyq3YW7EeHRF6qcjAHg-xyO-Z86hw,903 +torch/include/ATen/ops/bitwise_xor_cpu_dispatch.h,sha256=k_hDQhYCc6IQJKGHHZ7W8Z8_Isz7y28HJrNgXjFxLbw,1052 +torch/include/ATen/ops/bitwise_xor_cuda_dispatch.h,sha256=SZV093s2DccxtNOVrKT3ekL-Vz4iiXogTrUIZaCvN-c,1054 +torch/include/ATen/ops/bitwise_xor_meta.h,sha256=lOVWW_oe27kqj02BJlDL3ZOVRrKOIWWqiV6EOWCHpEQ,613 +torch/include/ATen/ops/bitwise_xor_meta_dispatch.h,sha256=7-USEQJJdk4qFc-jZEQAoisApIlk7O9s7L6Db5Tau8Y,1054 +torch/include/ATen/ops/bitwise_xor_native.h,sha256=nCC95mKv_02xynDHSVdOGm0Lpw7nHObhKmrrxdObWC4,1128 +torch/include/ATen/ops/bitwise_xor_ops.h,sha256=d4n-37PghtUXE5VbG9OKgCpbkwQyPVv_zLX-tGp7DHc,5979 +torch/include/ATen/ops/blackman_window.h,sha256=y7I1b9jbeA4a6pJAE3m6_gAIi6h1JQc6rx94OkT50y4,3394 +torch/include/ATen/ops/blackman_window_compositeexplicitautograd_dispatch.h,sha256=oSGfCTqu-iwfvrQYV1zFcmcF53-IFUg_qc_YBqk8B5I,1706 +torch/include/ATen/ops/blackman_window_native.h,sha256=cqw41bzuc36WepI_MfOVonAMkzjKnbYFJ31ga-ylsz8,1067 +torch/include/ATen/ops/blackman_window_ops.h,sha256=AahMdtFHcZon5_RnRZ0hgU6_dF69CHf7pHNp34Dr3tU,3965 +torch/include/ATen/ops/block_diag.h,sha256=-nxmQhlHcK3eEa8_cNO7jjhEVV4eoxhjoUNRZ1w2Ljg,1065 +torch/include/ATen/ops/block_diag_compositeexplicitautograd_dispatch.h,sha256=-LBKvYSKIsSqmw7cYB_Io13cOf82V2S1pV9EZik8Us4,930 +torch/include/ATen/ops/block_diag_native.h,sha256=0aGcg-VNfgZiI6A7LWRM_Z94gAtxXQaIVzsV4zlUmn8,560 +torch/include/ATen/ops/block_diag_ops.h,sha256=l4JLjof1d39O-oUEMySJRmvBSj3TJtQr036qz3U-JIY,1612 +torch/include/ATen/ops/bmm.h,sha256=oiYDjZX3KlKg6pweX75iHE1C_7PBI7teJP1PsdywFyM,1106 +torch/include/ATen/ops/bmm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=lkPn0Te0SbAOYDsmtNCHddxe9ROOx6MJVuXdKj-PxP8,812 +torch/include/ATen/ops/bmm_cpu_dispatch.h,sha256=Tk5v-4kJbuVK7VkvuF8DGjJ7Fx6GncR276JJqANcUdw,943 +torch/include/ATen/ops/bmm_cuda_dispatch.h,sha256=VkOtk3avLtOl0jva4BcubsGfuzJdaNYmYekAUPTPEY8,945 +torch/include/ATen/ops/bmm_meta.h,sha256=IoaeB2pqkgRVzAVmJgqi-dZ9ua2oNgtVeFo9Us7dnrg,597 +torch/include/ATen/ops/bmm_meta_dispatch.h,sha256=a7XP4cS94lJOIKLhwdGa54UN5gcH5FhLZ7LeLfVGnLU,945 +torch/include/ATen/ops/bmm_native.h,sha256=JW1iOIJZQp3NnDqweJTeMhEddi5aZfdMuJdXINdSMgg,1466 +torch/include/ATen/ops/bmm_ops.h,sha256=saJsW5oNDUmOQUSi9xEWoDdyP0O16MiJEq4WIcdxfRg,1738 +torch/include/ATen/ops/broadcast_tensors.h,sha256=vVI7G_aodFTuRPs0hOcLNgIP32Z4KgfTsNdNxvo8wNo,682 +torch/include/ATen/ops/broadcast_tensors_compositeimplicitautograd_dispatch.h,sha256=2EVZOKuLXaOqBtct3I5TPA5NsexwS7BsyNJYEX3Xmq8,789 +torch/include/ATen/ops/broadcast_tensors_native.h,sha256=RDybWRSb0QIKPExw9BoSZlBfAVRKEdCrWkAm4-Y5_fs,501 +torch/include/ATen/ops/broadcast_tensors_ops.h,sha256=ehTAWOiBPdDKtWik0HirwiKJxlgk014HnCsSiqTlKks,1046 +torch/include/ATen/ops/broadcast_to.h,sha256=DOsFZWHpuK6gSyd114WT42KfZ4dQmUc8q7GjN-XbD4o,1438 +torch/include/ATen/ops/broadcast_to_compositeimplicitautograd_dispatch.h,sha256=rDQbKsedKW7I22IjBMmS2vBlpXzpyNCnuZDMYBETdIc,885 +torch/include/ATen/ops/broadcast_to_native.h,sha256=ZUOLleFoLu9w_GAo_sOdBIL_TgXzw-j7j7RWvHCfdq0,515 +torch/include/ATen/ops/broadcast_to_ops.h,sha256=DsQhBtOk1s23x2qFzyKiFtKCcN57o3zzj49Gvil6_Mk,1079 +torch/include/ATen/ops/bucketize.h,sha256=4np0Cq1vnhlMToI9JE1SRvxyc55VQMzN-c3QhQHquWk,2612 +torch/include/ATen/ops/bucketize_compositeexplicitautograd_dispatch.h,sha256=shWFqfYbLNpHzLp0RpXHq2GZmSdGEaRVWFZUCzhDh4w,1003 +torch/include/ATen/ops/bucketize_cpu_dispatch.h,sha256=EQG2QlwM8JXWINglJO6oNeBStDj7td5AilcSdw6JVmU,1215 +torch/include/ATen/ops/bucketize_cuda_dispatch.h,sha256=F2wgBJDfZGEJpJDliBI-LipB8cHdVYOIdnKwz5ThySs,1217 +torch/include/ATen/ops/bucketize_native.h,sha256=wZkag-808d-YPQ5mWT9A_oQnC9uMikh9PPA6fomorHA,1388 +torch/include/ATen/ops/bucketize_ops.h,sha256=FOYgQ7sprI6Ca_xHOX4G6jkts8Bb4tchvtzS0Cvh_eM,3706 +torch/include/ATen/ops/can_cast.h,sha256=Lp5f6_r8Lz4lwJbuzT9USuArslFL8ktWDFTORkNOAmg,655 +torch/include/ATen/ops/can_cast_compositeimplicitautograd_dispatch.h,sha256=P-hoK6KYQUmv7L4eH2FOnCu2M2GFIi7a8HRgj9WR_20,776 +torch/include/ATen/ops/can_cast_native.h,sha256=agyyDktdQMq9pwx_G4-qAiGTG2OfJy49CSXXwnRhzIE,488 +torch/include/ATen/ops/can_cast_ops.h,sha256=F0tvXsoE8n022nCzjv1hAOsUG_78iYAIHvgsR3kBoiM,1017 +torch/include/ATen/ops/cartesian_prod.h,sha256=C14IGXbXv6ZuEqTz-czMJ6NFDX0oFEBNp4NTGlJNTpg,653 +torch/include/ATen/ops/cartesian_prod_compositeimplicitautograd_dispatch.h,sha256=dmymsj4ZqUPsOKgt_d4eaastHOR4f1xiXDui4nLIIlU,771 +torch/include/ATen/ops/cartesian_prod_native.h,sha256=ps_qy-boI_yADdH29ddMlR3BBOCSE0lwsAY4v3m4wjg,483 +torch/include/ATen/ops/cartesian_prod_ops.h,sha256=LMWJEGAovV_6FIi8jlIrqeV3u3eMU9sdDPb32OKupeE,990 +torch/include/ATen/ops/cat.h,sha256=CW4HRzoOBx_8r_SWh1KR5alb4-lsMp7efdv5y1J7_ag,1782 +torch/include/ATen/ops/cat_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dRkwFq1hCtidrynksryEs8L442Z7RFwxTjywwk0rZUY,813 +torch/include/ATen/ops/cat_compositeimplicitautograd_dispatch.h,sha256=KxjO71AjyTb-EdyPDOUZTFEJp6fKFil6SUoNqWTdMkk,960 +torch/include/ATen/ops/cat_cpu_dispatch.h,sha256=_zpLE9puvNocjc75ELNUuJKbRyyZ0K2dYHFN-ks69P0,944 +torch/include/ATen/ops/cat_cuda_dispatch.h,sha256=9JBIkZv7H-4HETgykO21_GSencT12_zgC2YrBpfU60U,946 +torch/include/ATen/ops/cat_meta.h,sha256=jdXxhooqF9NSQKz7Ir8ARSL7YISP9-dsDHpgaaqd4qY,4926 +torch/include/ATen/ops/cat_meta_dispatch.h,sha256=vAbAQK7lGBPpvvC7nGZ0XgqqecLedHQ_hkzB6SyWC0g,946 +torch/include/ATen/ops/cat_native.h,sha256=3jkuxBRNA7QbNagnNyzycY5AvgL4akkiHgKXXCjLisw,1551 +torch/include/ATen/ops/cat_ops.h,sha256=K7QjsdAt9yaqHsz7JgR5cNkiWmNvI5ztjlJwXmKY3Vk,3025 +torch/include/ATen/ops/cauchy.h,sha256=pDrv92tjT2DEAjM7FF4G8OYcJFdqXreFTW62a9UbcXI,1510 +torch/include/ATen/ops/cauchy_compositeexplicitautograd_dispatch.h,sha256=2Ua1jy1AZbIgYTCGj1fkVYs0FVHnxcQ3Jv1KYpUM5sc,1172 +torch/include/ATen/ops/cauchy_cpu_dispatch.h,sha256=SP1xQoL4gRuA_dxhb8RY4RIkHbmegMEfVVnOrFOwrVA,807 +torch/include/ATen/ops/cauchy_cuda_dispatch.h,sha256=kH2i_Vxa5eVAeeCum9iVSyK4BFNbdCaAviJGzcWyZ-M,809 +torch/include/ATen/ops/cauchy_meta_dispatch.h,sha256=FaIREEwHnZnLHtI5U40hEA8ls2xQzGHWjRdvc0JeOzE,809 +torch/include/ATen/ops/cauchy_native.h,sha256=pEvzBSAz44-OKLaFxLrtEfctwGEUDSxJ5smYkPOMgTE,856 +torch/include/ATen/ops/cauchy_ops.h,sha256=F-9C-cMWG6fm_PetarizG2wfS8KGvO3h-kHSegstmhs,2872 +torch/include/ATen/ops/ccol_indices.h,sha256=dGa8cUK_bK7s3UZLe4xGvSk5hcBNTzPOPkDKfx3O_Rc,486 +torch/include/ATen/ops/ccol_indices_compositeexplicitautograd_dispatch.h,sha256=4WGOOfAKe5FzRJgpgbWPk1dpC9mT6flRTo-jlF7Rgks,770 +torch/include/ATen/ops/ccol_indices_copy.h,sha256=rVrqOCEMGknnA0JO09JsHO6aWZbNmACP11GhcTl-huI,1114 +torch/include/ATen/ops/ccol_indices_copy_compositeexplicitautograd_dispatch.h,sha256=bVxleViJAnqMvlb0AnvtmSsAQfECmNGip_Sf87jmMU0,889 +torch/include/ATen/ops/ccol_indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=3K7KeH9UnDDQD9xzgR5RCuyDyaRymqeSHQHgfU4Yzds,801 +torch/include/ATen/ops/ccol_indices_copy_native.h,sha256=3w1xKqQ6_TwTcfjb7cyhXd_OX4ds0-3qGnDlt45NBGw,576 +torch/include/ATen/ops/ccol_indices_copy_ops.h,sha256=BweOv85XAZhhyDvMGxLl9eJxdBoL14WGTx1AraF4kJs,1656 +torch/include/ATen/ops/ccol_indices_native.h,sha256=WtoEaiob1kifXdBK737kQgcejCPjcAnGiuLBz-3NBzk,561 +torch/include/ATen/ops/ccol_indices_ops.h,sha256=TCNpDqKhvU__pRQK-u7ALujYLuz9-C6R2tenD49-qcs,991 +torch/include/ATen/ops/cdist.h,sha256=ZwXqblBsSAjPfEd8BWsjUTBEhflfdrufTJ1tSOZKeh4,760 +torch/include/ATen/ops/cdist_compositeimplicitautograd_dispatch.h,sha256=tf1lklD-TzUpRYE0TIfETrhHG9nGwnPqSPVFI2qW1C4,850 +torch/include/ATen/ops/cdist_native.h,sha256=7RNYMvojjrctC0WB0raDqsXBkDHK9uHF4Fvyg0njrAg,562 +torch/include/ATen/ops/cdist_ops.h,sha256=RNwuFsw6RSj2Rj9NXRz5ILg1C5QWxwfD5vlIhcIadik,1202 +torch/include/ATen/ops/ceil.h,sha256=Ylvc5h70UwbZo9chbO6GrGIJo0H1xwJ15sStGJ93A6I,1120 +torch/include/ATen/ops/ceil_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WP7fIJDR0KTZohmjmePgEXX00sN3cbK4u8hi4PCZM8Q,837 +torch/include/ATen/ops/ceil_cpu_dispatch.h,sha256=7zwcrxZGixOXwT561Voq0rQRziNhnK953PwQRbmsvAk,920 +torch/include/ATen/ops/ceil_cuda_dispatch.h,sha256=Tpk2bpAj3aYJ-gptXs2H-4Avb8GqWK_bZWVQa9-OfkU,922 +torch/include/ATen/ops/ceil_meta.h,sha256=1zbO-UEORYNz5LbKcn-CQet602g32t4N3d0tWC6go5A,573 +torch/include/ATen/ops/ceil_meta_dispatch.h,sha256=QQfHlzGmC3pWl94y77QsMZP0SgSMC8cqV5tlvtDf-5E,922 +torch/include/ATen/ops/ceil_native.h,sha256=oummupXxTbLVthscStMJGIsm91ZobPi1zkUuhaR9AWk,998 +torch/include/ATen/ops/ceil_ops.h,sha256=fo4Onq-4ct-r5yZXyTLSXFbmYj4OGmoA-ydeqMlB8r8,2104 +torch/include/ATen/ops/celu.h,sha256=Zt28rsuxax4uyJ7gAicQdAlgZd_peQpF7NPlQuIbaHk,1336 +torch/include/ATen/ops/celu_compositeexplicitautograd_dispatch.h,sha256=d9sUBHZ9CwUxLUt7_FTphTtbiVqNbgB1DCAFPblSESM,1080 +torch/include/ATen/ops/celu_native.h,sha256=yj7x2GKfwZse9NmH-9h3poqOeIC_IMlK32ifXYNfFjo,685 +torch/include/ATen/ops/celu_ops.h,sha256=80eihAqTJDKPDQoTReOXXZSTRqOQIfX1V6daxFSC42U,2374 +torch/include/ATen/ops/chain_matmul.h,sha256=AnyOT1XfJ7AnaJO3r_Rf0QURQLtqC60u81LmXyWXeoc,1094 +torch/include/ATen/ops/chain_matmul_compositeimplicitautograd_dispatch.h,sha256=tXk-x1GHWlZ5qbO2J5K-nr7IReloIamp6TqcWRt3DU4,939 +torch/include/ATen/ops/chain_matmul_native.h,sha256=gY2BLo6j2zdzYLSQxm6X2FFq-lN26C74XyqXn-UfnQw,566 +torch/include/ATen/ops/chain_matmul_ops.h,sha256=xlp6Mt5OVgW8NnOly4enx93pLjomTP5hSp2m1rKw1ts,1630 +torch/include/ATen/ops/chalf.h,sha256=3rgk8R4J4x5IU0GaCvejJgG9bxvw47SjkyFDc-DrfTI,479 +torch/include/ATen/ops/chalf_compositeimplicitautograd_dispatch.h,sha256=58aHcm9DL2RtTTBuNeo7OpKysSgUyr03Ym_5GkzxDO0,827 +torch/include/ATen/ops/chalf_native.h,sha256=9pecLWB1WCkQCPsNdMsxWyolTOLfpsFN9fkh27Zsyo8,539 +torch/include/ATen/ops/chalf_ops.h,sha256=6wv_Wgvb8ggz3lq8y1sioPVw0wELSoFKlA7uSswjPy4,1134 +torch/include/ATen/ops/channel_shuffle.h,sha256=0zcGnek6oEoCiNE-MpEEfiotGVljOaWxwpyrnogS0A0,3573 +torch/include/ATen/ops/channel_shuffle_compositeexplicitautograd_dispatch.h,sha256=xFU9dYhNLYPJpMfKn_q4cGbvRc5Vk1fyTysp6NINZQI,1146 +torch/include/ATen/ops/channel_shuffle_cpu_dispatch.h,sha256=QK4ePyg72Ho8o65p_egeK-ujnrdXE6obVJruRUgfAAQ,835 +torch/include/ATen/ops/channel_shuffle_cuda_dispatch.h,sha256=UgzPWdfgg7OGtmaGlgejv9Nl7gNTBh20AvJeY_ZuJS0,837 +torch/include/ATen/ops/channel_shuffle_native.h,sha256=f5Klv5bjOPaABtoGO1vB3I8A9P6cTjgYWJz7WyXD2H8,708 +torch/include/ATen/ops/channel_shuffle_ops.h,sha256=rDZpX0e1ZUfoP4sGac703LAyVs0c_zgr382GH_qNhmA,1780 +torch/include/ATen/ops/cholesky.h,sha256=MO7J1tHFBueOVKpRVYX0aP2aV567sUtqS_nBATnqAvA,1147 +torch/include/ATen/ops/cholesky_cpu_dispatch.h,sha256=D58RdBAXHqkk2keK-Dxh3TnwKkOsAE8AJvhg4Fbs6WU,931 +torch/include/ATen/ops/cholesky_cuda_dispatch.h,sha256=tgs2VPSBRRpUsnTj-vm-RmrCWgh1ckRu-K5WDSMzHIE,933 +torch/include/ATen/ops/cholesky_inverse.h,sha256=JaxF44DcyyAPlUNcX4NEH4jNYuhCkXbLeGptEXBN1X8,1227 +torch/include/ATen/ops/cholesky_inverse_cpu_dispatch.h,sha256=DRZm5bNPDnMmwGXtdtRc7Hu7Sk3G9whru8UVL-GUN3U,955 +torch/include/ATen/ops/cholesky_inverse_cuda_dispatch.h,sha256=yuJ_hQntkLoX8PGe4sMY7IVf_vhR5lu-YnAh5KRZCYc,957 +torch/include/ATen/ops/cholesky_inverse_native.h,sha256=olzfLVWF-fnCQKkwWgaiE7YX8G8hVQff_vVOLXHunLI,604 +torch/include/ATen/ops/cholesky_inverse_ops.h,sha256=lag6qy1nLq6TIwAEYdpxyoyL3VvurLM2-Os9N4goUp4,1746 +torch/include/ATen/ops/cholesky_native.h,sha256=0o6ZCfQI2kOpgpHl8SC9Q4LYp6bhhc6anct_j512GNE,588 +torch/include/ATen/ops/cholesky_ops.h,sha256=opgK2hCt2HAVPeEgGnPdhFgpz4JDkQiC8usdwZVapOc,1698 +torch/include/ATen/ops/cholesky_solve.h,sha256=6z0ZmuGji5-fxsqNRW4FyvcjAC-aDG6h-9Emoh_vGaM,1357 +torch/include/ATen/ops/cholesky_solve_compositeexplicitautograd_dispatch.h,sha256=7LP8DVCZhUR-9GC-shuwTxYYkZlm42sa5qXbAxHK1V0,1074 +torch/include/ATen/ops/cholesky_solve_native.h,sha256=z74F9QisaBsUPpwpVxi4f4rG1mrA2xeusXoW2xe2YXo,654 +torch/include/ATen/ops/cholesky_solve_ops.h,sha256=YyaVFFLYcMr5QjW7_me7ZvuDHDHZ_jsWUVfl82ao_00,1912 +torch/include/ATen/ops/choose_qparams_optimized.h,sha256=uPfc69PEowoF3n66TGCwodsuNjDbdYivbMtrPOpLCJw,872 +torch/include/ATen/ops/choose_qparams_optimized_compositeimplicitautograd_dispatch.h,sha256=RE1in_qBLmU9q-dyCaPFiEziALzkifJw1kLDiYL3t2c,872 +torch/include/ATen/ops/choose_qparams_optimized_native.h,sha256=QWiISZ-t_tL-RdesTCe_EQdlUM0efWvpn45rkMAaHds,584 +torch/include/ATen/ops/choose_qparams_optimized_ops.h,sha256=7PU9tBu6mXzWLinsmsmfuYETXClz4xBkkJVzZPAggzA,1323 +torch/include/ATen/ops/chunk.h,sha256=we5Lvbiots3kSos_xqdO0v0CjGxBVy9IOuB2KHPmi5w,705 +torch/include/ATen/ops/chunk_compositeimplicitautograd_dispatch.h,sha256=Z9whs8-l3niDCyIflFS2476ld4SW4YEQK-EeS8fkay0,809 +torch/include/ATen/ops/chunk_native.h,sha256=Bm7RCdDEmfKip5Ibd6eDsS35YFcdbUoP_b0e0YYm_1k,634 +torch/include/ATen/ops/chunk_ops.h,sha256=-zUEHdV-jnUmwRKMjN75kirjxbVZT9ZXqu4EZ8pRVAY,1121 +torch/include/ATen/ops/clamp.h,sha256=J02vzZIaZMOn3-CNFXnEywKfaTAw6thON9j3Mk_TfTA,2931 +torch/include/ATen/ops/clamp_compositeexplicitautogradnonfunctional_dispatch.h,sha256=T0apPFk4JJOGan9u0tyYaeGJPD7gzUv0ZAebW3U0Qq4,1312 +torch/include/ATen/ops/clamp_cpu_dispatch.h,sha256=-gArl3MHHuvTcwEauQ2kC2g75XvhmKwxwtkSYh-oYjE,1901 +torch/include/ATen/ops/clamp_cuda_dispatch.h,sha256=qJt9b0in1x9IjCE-7ZCIcsqjv-Tuh9hEbdSx3nt4VvQ,1903 +torch/include/ATen/ops/clamp_max.h,sha256=pQ0cnAAtunwehObxWP-6TAnRMdqgevZmA6bgF2XssDA,2272 +torch/include/ATen/ops/clamp_max_compositeexplicitautogradnonfunctional_dispatch.h,sha256=W91EUPZhWIMWT_xkkeQz25f3Ye4BQ9YLX_7O7uxRWdE,1054 +torch/include/ATen/ops/clamp_max_cpu_dispatch.h,sha256=LDogH0WJh2EAWV4_ijIKirYlA6i87B_kdayAq3r46U0,1406 +torch/include/ATen/ops/clamp_max_cuda_dispatch.h,sha256=kXt-RgfBhjjof4v2D0k5iUYVwz-o3qyAKwPSbMeMR6o,1408 +torch/include/ATen/ops/clamp_max_meta.h,sha256=UZXCzSaQ07f029neIHJA5a89B0H-VY2tZa3Mv-AEAQY,754 +torch/include/ATen/ops/clamp_max_meta_dispatch.h,sha256=w4HHJ5S7uHbWPiIbjWK1BNXcbCzTc1IM7MgnEx3anQY,1408 +torch/include/ATen/ops/clamp_max_native.h,sha256=GNqQyYmuyIajfjObAMmiuN_OtBPECwzWiVyDmzoe-nc,814 +torch/include/ATen/ops/clamp_max_ops.h,sha256=-cc3F-ePLnza9KgJZNKK16yhpOuD4sK7sFu8HsXmLiE,4405 +torch/include/ATen/ops/clamp_meta.h,sha256=0mmjZwAAiAzKqeqdVT8Gc7tStPj20g6pB8KLXEaFYqo,806 +torch/include/ATen/ops/clamp_meta_dispatch.h,sha256=tMUcaMLvNZdAvsYA7riPJRbGp1JtTK5KR0bZO0iqR-w,1903 +torch/include/ATen/ops/clamp_min.h,sha256=Cuv5nseRbabg1HaXqAUkC25dUBIh5Sp0R2fax46vBUA,2272 +torch/include/ATen/ops/clamp_min_compositeexplicitautogradnonfunctional_dispatch.h,sha256=VcAAaMT34NKMJrYZba60ovpPs6FqlyP2C9-notPuqXE,1054 +torch/include/ATen/ops/clamp_min_cpu_dispatch.h,sha256=pWeh2HkMexdrYj8juQmQlpidc-J2dkkaehdcNc7jZVE,1406 +torch/include/ATen/ops/clamp_min_cuda_dispatch.h,sha256=2GQ470IpvGVcnQBastPbzBNsxa-wRs8nMdIbUK4tpF8,1408 +torch/include/ATen/ops/clamp_min_meta.h,sha256=Bt1rca75vcqhMnZ7allFt-_Xg-LnN3IGIB-ocd7apYI,754 +torch/include/ATen/ops/clamp_min_meta_dispatch.h,sha256=rjEqR4kNym9Nt7gI8nKdXNPbZRRdPhpHFL1gJJmj048,1408 +torch/include/ATen/ops/clamp_min_native.h,sha256=5Pb_md-6UJMh_UWxBsSuY5JY9kETH5qkBhiGkP_jjEs,814 +torch/include/ATen/ops/clamp_min_ops.h,sha256=Xyq3iXsYp1IfPLvWKTr_vezfHQQsYdQtpzrUJVgprhE,4405 +torch/include/ATen/ops/clamp_native.h,sha256=hjObRWFSuXBOgOOMUKM35LaDODO_UISE-zVNV4_u7KM,1033 +torch/include/ATen/ops/clamp_ops.h,sha256=fHVkoIcxh3UPXz4oCxjpivVJuMgvWyx_jHoQvPugml4,5497 +torch/include/ATen/ops/clip.h,sha256=1qFvKUr4QXqbmWYdhn64qhb9JIXVaOETCsP-Y6LAgUQ,2906 +torch/include/ATen/ops/clip_compositeimplicitautograd_dispatch.h,sha256=Woi8jGOoQgAT2x24KBNzyAz5vo6UhXSiycXfsmSQDBo,1937 +torch/include/ATen/ops/clip_native.h,sha256=ELHgT6lORnkgu4pKaJ0-ofTmqZlln9O2YzvkBu7EvOk,1340 +torch/include/ATen/ops/clip_ops.h,sha256=GeuKx2q-m_f4SFpl-JHkM6g3HFePKTO6L9wPtDBxMfk,5479 +torch/include/ATen/ops/clone.h,sha256=jfQjs9X7L-bEXIRvigsWJHAxXo-LnWXPbju6LcQTUmw,1321 +torch/include/ATen/ops/clone_compositeexplicitautograd_dispatch.h,sha256=TQRJ1nYMyn8IMa50d186QfF3FFmPHRScMvbgczaxXDw,1095 +torch/include/ATen/ops/clone_native.h,sha256=xuRpTDumCP6jWHIqTawnYpskOq3dE_wuPrCMhtSdHeE,1299 +torch/include/ATen/ops/clone_ops.h,sha256=6xN6l8GV2KCduHPOGhNo_YwCmSeaHEX8s3Y8TRFSZw8,1921 +torch/include/ATen/ops/coalesce.h,sha256=Eqttgh2t-UawCM8DTierSH864SwwkfE1skPoaqnPlCE,482 +torch/include/ATen/ops/coalesce_compositeimplicitautograd_dispatch.h,sha256=nFzB-bQ0N0MOJHUFrhpowltb0DP2uxWppm6if3mWlck,766 +torch/include/ATen/ops/coalesce_native.h,sha256=Oa86GIAfwYM1nc9iWyRR2mZpFmzsNwqP7vXzbQuJCPY,478 +torch/include/ATen/ops/coalesce_ops.h,sha256=KmwTa2FF-PbDFY3MNLGUQ-FqKzHiEsITfSUQScQd59Y,979 +torch/include/ATen/ops/col2im.h,sha256=wqRUFPLUJyR_AjJLwfDPC1H5KZu8D0nxAcQ5xRII0IM,5850 +torch/include/ATen/ops/col2im_cpu_dispatch.h,sha256=0p1Ni6Xvk1tICZ9Lfl_dX-BDlnbvlZBhu9wCIJ3JQSA,1919 +torch/include/ATen/ops/col2im_cuda_dispatch.h,sha256=cjoEDOZBzvgv4GDYNhFPR6RMuI-YnA7eAnRfJz93V7g,1921 +torch/include/ATen/ops/col2im_native.h,sha256=ZwNkZh1FCdW3xFUksmM5sWi1uAk5uFzE5OClGjY1G7g,1236 +torch/include/ATen/ops/col2im_ops.h,sha256=MGUGJTvKYzo3RgnjykO5TOY_op0fsDui5pHqkrJZVPc,2498 +torch/include/ATen/ops/col_indices.h,sha256=FBSog-WCsZBMvgmLch7FR1T7J4Tb2J8G7BeyToFptI4,485 +torch/include/ATen/ops/col_indices_compositeexplicitautograd_dispatch.h,sha256=GcY7_tnCy0qrhoDP335cbHGm240I6UcwcrCd1n5-5KY,769 +torch/include/ATen/ops/col_indices_copy.h,sha256=lQeaeW8bDtT-tfC_x43wmnBQ0QJ5pwhUngrFklbZOM0,1104 +torch/include/ATen/ops/col_indices_copy_compositeexplicitautograd_dispatch.h,sha256=U65MR5erMxTs_pIluZKSFHpvZ6hHgcalC-vI-xYgng4,887 +torch/include/ATen/ops/col_indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Xf1aREZaOYqUgVxd2-Qn1IjPYv8L18LmGzKFlh9HHGA,800 +torch/include/ATen/ops/col_indices_copy_native.h,sha256=z5dEMvUyjUxBznHleJFBjOBfhlwhxs80fxqvt6Hi7YA,574 +torch/include/ATen/ops/col_indices_copy_ops.h,sha256=7JmMq4iMovPyKKrbcgIef2lFbpycg7fcO7lkkpvY66c,1650 +torch/include/ATen/ops/col_indices_native.h,sha256=2Bg35AaEL0mmvZFoEs7s-7SQJL98QyA008IbdWX5Tlk,559 +torch/include/ATen/ops/col_indices_ops.h,sha256=pM9TZIz9_V5gGXDwTp_3AztwISpmNECJDCOpIvf2huo,988 +torch/include/ATen/ops/column_stack.h,sha256=EQnGNw12bq_fkMhOVfQbf1tnXTgiegSAGxomTksof1Y,1085 +torch/include/ATen/ops/column_stack_compositeimplicitautograd_dispatch.h,sha256=JXWKV1zOf0K0vJJILsp3bdowyzsxkdk1PIAlTL3OIDg,936 +torch/include/ATen/ops/column_stack_native.h,sha256=f5lYYkq6MXG3L-tMr8qtRyw0RGJ7mooyGKvl5Ioz_Ac,564 +torch/include/ATen/ops/column_stack_ops.h,sha256=kAJY-cQHtEzNluwP0MOlzmKm7gArdC6HUJJgUn2SRfU,1624 +torch/include/ATen/ops/combinations.h,sha256=P3zHvmvKdUzncXt8WIyutTmKtCbXfaH0zK194X20Skk,739 +torch/include/ATen/ops/combinations_compositeimplicitautograd_dispatch.h,sha256=HINIJGIdGdJCnEIXLIEMdqOz8Pb_sUL09bhrFsb2bRs,812 +torch/include/ATen/ops/combinations_native.h,sha256=V97XOLk0vLw51jTrIUkT3JmJu9FMmGQg5nPN5_DFqHc,524 +torch/include/ATen/ops/combinations_ops.h,sha256=Ou4OncrZaNXCWCyQ-t1SaiWWCDk_boERF3wFyDJc-6o,1106 +torch/include/ATen/ops/complex.h,sha256=jP9y8ET8VbcZOGBwAD5msk7BpEonUddzqeCWv2ynuQY,1146 +torch/include/ATen/ops/complex_compositeexplicitautograd_dispatch.h,sha256=FjsHHUPn2jQRIC77XSuQZZzklU-WCHpEexVYM9HJTq4,790 +torch/include/ATen/ops/complex_cpu_dispatch.h,sha256=-nU9jv1fw1XsrEK7JVNMMsor3ntzNGYO23zDDHgDE6Y,875 +torch/include/ATen/ops/complex_cuda_dispatch.h,sha256=BEbeYfSJrsq0qQcRdwz-Os_fCU_pumWVfLI-QDpL_Wk,877 +torch/include/ATen/ops/complex_native.h,sha256=Q1Gft4e7gxyTdw8peFfjt-Vwo0TLQ_GG2ZMyXeHPkGU,606 +torch/include/ATen/ops/complex_ops.h,sha256=j0j87eF3wJAjHTx_IZ23Y1MP54ep2LgsVH5vUk0hw_M,1762 +torch/include/ATen/ops/concat.h,sha256=ENr6w15IPdavOE8qIB5QVNTADRxeFYjvCcDS7Q0Jyrk,1803 +torch/include/ATen/ops/concat_compositeimplicitautograd_dispatch.h,sha256=XSGUZa5yG4c8FnA7utPF5aMBFpNPzrF-P7nhoxxeuFc,1220 +torch/include/ATen/ops/concat_native.h,sha256=JA60EognDnUJEq_IwfVmLt80elcsZLFzEoMvIYFvRiE,744 +torch/include/ATen/ops/concat_ops.h,sha256=XIapH3VbCTpWyyQHO0U0LaNVWze9R4BfcQOtiPsLmA4,2989 +torch/include/ATen/ops/concatenate.h,sha256=eXu9mtIBu8vRQY2Kra_DWhHLPw8LE--m6HgrsW_CWH4,1898 +torch/include/ATen/ops/concatenate_compositeimplicitautograd_dispatch.h,sha256=T6BBiY0g95HyKHsqrTldtdhEI99tUt2y6Zhwh6Km7-o,1250 +torch/include/ATen/ops/concatenate_native.h,sha256=Qzcddu52uCYEf_H89SX7KBDxh1F9uEVG28IPWq-GmVg,764 +torch/include/ATen/ops/concatenate_ops.h,sha256=--8eAcukqtnJ8lJRZQUacnmRllTsMYiGjEfbX1psUbk,3049 +torch/include/ATen/ops/conj.h,sha256=ICJaB4K4dlaHQCz_25-HbeG7AJSugY1d6RQBRMI3nPc,623 +torch/include/ATen/ops/conj_compositeimplicitautograd_dispatch.h,sha256=gG4OYaWXqTf9ozYuc1YoSj3ATrwz3RKKf9yw6asFJoA,762 +torch/include/ATen/ops/conj_native.h,sha256=nBLarQlkRPHvSQeH4avrf9QOO8spOJTVzytHNiG94ZI,474 +torch/include/ATen/ops/conj_ops.h,sha256=6nSy_L-Sru9NrhANH_IMgM5NOkC6CDh0qh78WGtMmFU,967 +torch/include/ATen/ops/conj_physical.h,sha256=0sK1S39GEvs0uwjXDXCMSr-tNuFAfy_AO0IMFgwguio,1237 +torch/include/ATen/ops/conj_physical_compositeexplicitautograd_dispatch.h,sha256=on5qUc9_taEPD9Krkd4JsZdCoXvZUMQjsyUQvJO2J_o,768 +torch/include/ATen/ops/conj_physical_compositeimplicitautograd_dispatch.h,sha256=VmeGaNahiSzBJT4RTXMB8BDqQLvOZcjWe_xRimIPP2I,771 +torch/include/ATen/ops/conj_physical_cpu_dispatch.h,sha256=7iNhZm--iZqCsPzG4GWqc9BOPh7maSzZ9hxNtCkJxPQ,837 +torch/include/ATen/ops/conj_physical_cuda_dispatch.h,sha256=GjNPY76piL4f1icyPg4h3C95b9pnFthC2EyXFooNxZI,839 +torch/include/ATen/ops/conj_physical_native.h,sha256=9h_4IOD8MJBhpf8-oL4uALNv8OBMShZ2xP0crh2xzlU,883 +torch/include/ATen/ops/conj_physical_ops.h,sha256=WtTpCmlW3A5vN42JNmIIpilyBOIlcuEEUJycskWgpCc,2185 +torch/include/ATen/ops/constant_pad_nd.h,sha256=U0-vemaYgh8eEWxal5ONWMf0DBXZpZS_cbLQqNOk8is,4255 +torch/include/ATen/ops/constant_pad_nd_compositeexplicitautograd_dispatch.h,sha256=ATkctSPAvOi9sYa5tHfr3rsOl7CMokuNOQvLfF3HsKE,1509 +torch/include/ATen/ops/constant_pad_nd_native.h,sha256=h7Zt3WmcTnGrhKPZG2Roz3HQ13ZGuFbV0Oec8xal4_A,679 +torch/include/ATen/ops/constant_pad_nd_ops.h,sha256=9bVRY9N3p_R5nh4deSiDVIfIygGIJ6ZohcFLgb7nbOc,1990 +torch/include/ATen/ops/contiguous.h,sha256=dT28qbekr7Zy5PeSWrbAQHnqVOXGYFF2vWylaqnBB64,484 +torch/include/ATen/ops/contiguous_compositeimplicitautograd_dispatch.h,sha256=Om4qfc58b6z-wIaMEJygUOkXxc1XKJT3UBJyIooVEs8,830 +torch/include/ATen/ops/contiguous_native.h,sha256=zHyrfkLMPK4I3vZyP0AEEECO1DzI6E1mqUhnqfmC5S8,542 +torch/include/ATen/ops/contiguous_ops.h,sha256=v77OBaWMddf3c_raOwYYFb3nVa4EC-FBeNADmk24wqU,1116 +torch/include/ATen/ops/conv1d.h,sha256=6GW0--o2CkQBZOoSAJA_6JL2eSK7WJ9cq5v91GTYzmQ,4517 +torch/include/ATen/ops/conv1d_compositeimplicitautograd_dispatch.h,sha256=OWBI6dqXwH1nQt1DZXNFpn_jk6OU93QN3XrVhYKduD8,1696 +torch/include/ATen/ops/conv1d_native.h,sha256=A20npVZQQSTglRRt-kHjWJPXsgPmECF9cjd7hGALRuc,996 +torch/include/ATen/ops/conv1d_ops.h,sha256=2dvDeQ3F22shXy4_B8ZcGUiOJHJRvw5p-ZlCWeL2iiw,2720 +torch/include/ATen/ops/conv2d.h,sha256=5tVr0xhPOhvGgrKDJo8Pw86XtLs_hV6LAJi8oHIsX2k,4517 +torch/include/ATen/ops/conv2d_compositeimplicitautograd_dispatch.h,sha256=AOljqpFi1ljH0z2buNtdyJql8vaQ3XwqybwsgGcMcpU,1696 +torch/include/ATen/ops/conv2d_native.h,sha256=G-aEKDkPr6LYHcLXe2HpbqBF7HC1YGMQiQK1Qi8lU4g,996 +torch/include/ATen/ops/conv2d_ops.h,sha256=BBaIRWEuT-SesRtvvc9s1E_D2UAaPmkUn1orHKPefY4,2720 +torch/include/ATen/ops/conv3d.h,sha256=DVsawFYuTVfPUP5BGoc9wiPVluBtua9B-YRUJ_QAYCY,4517 +torch/include/ATen/ops/conv3d_compositeimplicitautograd_dispatch.h,sha256=Wk1r3G4ngLImQGximQ6E25efY0w1Hoh7DB8JWw8gzAk,1696 +torch/include/ATen/ops/conv3d_native.h,sha256=TxzdeWQX4OGpS1ngVOYP3AEstMzHhbRV1Fz_PMhRD1c,996 +torch/include/ATen/ops/conv3d_ops.h,sha256=m1xbjJ27girszLKDC6Im6HJEaEKwNSgGdpVoFCv0SwY,2720 +torch/include/ATen/ops/conv_depthwise3d.h,sha256=SlXAX67Mo5AF6luccr7I8qtA7U3R8Jb4kU6lRDtk5uc,7300 +torch/include/ATen/ops/conv_depthwise3d_compositeexplicitautograd_dispatch.h,sha256=oqzNaF0y5pgh9xry6i6fXb_uMLuMYLsQ2oVSLtoytI0,1802 +torch/include/ATen/ops/conv_depthwise3d_cuda_dispatch.h,sha256=qs12HHbFYRLGuwPFRLoOlosj4yg1coGbBsPRxrUoF-c,1165 +torch/include/ATen/ops/conv_depthwise3d_native.h,sha256=O7dOP4E2ZouFuoNriLsXuQKHiPkfrx1NZ38GxvLp81w,948 +torch/include/ATen/ops/conv_depthwise3d_ops.h,sha256=b6ATwlyYNYiwxhik2CsgP8QDtDFjJX-9ZeredGuG0gA,2906 +torch/include/ATen/ops/conv_tbc.h,sha256=PBOWcO29Z04wlQIVOkC25HzC16ZfcZ934r7G_ZDwzI8,1397 +torch/include/ATen/ops/conv_tbc_backward.h,sha256=YsfSL2S-eOWRMGlEPG09Qez_7yFfKOxPjbU6fSYyo30,880 +torch/include/ATen/ops/conv_tbc_backward_compositeimplicitautograd_dispatch.h,sha256=6GxVEHNMyW6tottxNnWkUrZxE53IhThRBjiQLDvzbzw,902 +torch/include/ATen/ops/conv_tbc_backward_native.h,sha256=GQFZosWJBme3TQ3PHaOzbAYPoFXhlBod_kcnGFQ7E_0,614 +torch/include/ATen/ops/conv_tbc_backward_ops.h,sha256=JL1k2YE_y183lmlttbhSHMxedpGdo0dRyDy4qfkl3O4,1428 +torch/include/ATen/ops/conv_tbc_compositeexplicitautograd_dispatch.h,sha256=hprKU4WmQ-Xao_LP1nzdSbQ31KxB2cBTP4mZeT9DTz0,1126 +torch/include/ATen/ops/conv_tbc_native.h,sha256=xNBUWxM_nozc3l7AYl5ilnMk8LYf6eAhDb3mA_NdOJI,690 +torch/include/ATen/ops/conv_tbc_ops.h,sha256=wezqCTeMACjDX_li8DUzM29swjUWnHQ8d7A6XkkehRw,2038 +torch/include/ATen/ops/conv_transpose1d.h,sha256=3BGmx9MHTmgYyYf85ruNMcTki5skVssCa13DieI55w4,2994 +torch/include/ATen/ops/conv_transpose1d_compositeimplicitautograd_dispatch.h,sha256=UthS6txZ_F3D-2rAAmzrnb4jDGklskGMXuLoU4sFMpc,1329 +torch/include/ATen/ops/conv_transpose1d_native.h,sha256=UL8cS2WEel6JZNpxdV_j5xTFNk6ECU-_nBQtSLOTY7g,771 +torch/include/ATen/ops/conv_transpose1d_ops.h,sha256=nH1440B53GC9kNfpwKGWSupHTzaPNaSQLzTfi7uIB1A,1720 +torch/include/ATen/ops/conv_transpose2d.h,sha256=mHgBD1B5U6_dDAt2QaYKKRtcws5JIIOV-4auHL5Ntng,3030 +torch/include/ATen/ops/conv_transpose2d_compositeimplicitautograd_dispatch.h,sha256=PEg-LkSiwoj4SS9kqklSJvIKqSXGXBoCUlHpjAYwXbU,1329 +torch/include/ATen/ops/conv_transpose2d_native.h,sha256=-uZWdT2-58F3xm2FA9WeNokawu2ks--INcQsd4OG9Ug,771 +torch/include/ATen/ops/conv_transpose2d_ops.h,sha256=ESqa6B6E8fjBVE5N_TWrLB0HoCZj-lMoycNAxv5LNkY,1737 +torch/include/ATen/ops/conv_transpose3d.h,sha256=i1ZVCWeGMvxKr13839lA-32V1NGHS1CmeDFjw5hHZ4U,3030 +torch/include/ATen/ops/conv_transpose3d_compositeimplicitautograd_dispatch.h,sha256=8yHU2kA6XYgpz2kJI_V6INFOV07_LZRRj6OCsX2oOo0,1329 +torch/include/ATen/ops/conv_transpose3d_native.h,sha256=ydyrfLYAyfp8C0lwYSxGkMt6kBBJwM7lMxeVWoFMyJ8,771 +torch/include/ATen/ops/conv_transpose3d_ops.h,sha256=_yaDcX3shd9NtN76boyDmLTdMw9aN1PkxYRUuXlpW3Y,1737 +torch/include/ATen/ops/convolution.h,sha256=UOD_P_gAY-lUcdS3XMK9-865La4whgqDRip8XJ2XIA4,8093 +torch/include/ATen/ops/convolution_backward.h,sha256=zuZ-CI6Efd9TPnHQaWTj2O6PPFld86v_F5U_ymw8SGE,11494 +torch/include/ATen/ops/convolution_backward_compositeexplicitautograd_dispatch.h,sha256=-0UXHz1KzXh-H7Pe8e1f0vGLIKTqoqwQa-JcCu_xCVM,3314 +torch/include/ATen/ops/convolution_backward_cuda_dispatch.h,sha256=aLZEMnvCvqGljYhKD5L2LJGjzm4sCcISEKCoiOhz_1Y,1446 +torch/include/ATen/ops/convolution_backward_native.h,sha256=gos-5xwF3-TkzLUIoP1HeUewWKyJfIWEiU9gxgnOcUM,1267 +torch/include/ATen/ops/convolution_backward_ops.h,sha256=oYjQRexK2qvLA_1gOH1JOkxurpOIS4K-H_wva2RuFJY,4025 +torch/include/ATen/ops/convolution_backward_overrideable.h,sha256=mv3a6yOm43mndhg9QrqHt-T1KGhx47aQTXdzppCj0wY,10691 +torch/include/ATen/ops/convolution_backward_overrideable_compositeexplicitautograd_dispatch.h,sha256=GWt8g0mT9y6Tq9Gusu0jhNFOAZ6WinapWWHLnYctkNg,3167 +torch/include/ATen/ops/convolution_backward_overrideable_native.h,sha256=gm7bPCJxpmjFrkawLZ8oRZarIiKZO2e3lRMg3A9UgA0,1218 +torch/include/ATen/ops/convolution_backward_overrideable_ops.h,sha256=ikYeQaFezwrsDwQB8NyllHzzhyy98cj4YVS1Tn-wCDk,3880 +torch/include/ATen/ops/convolution_compositeexplicitautograd_dispatch.h,sha256=j4QR_cFv04nU0_dRcb25DFx9mosqsvziPabeg-x1US8,2503 +torch/include/ATen/ops/convolution_native.h,sha256=y0t3Uovu6Kq5EAqW5ljWv6syGPQOkGtCb592-FJf9II,1011 +torch/include/ATen/ops/convolution_ops.h,sha256=fFsnzgF3ePx50iYWW-K1O0N0mANtgUskNcImVbk0Xns,3142 +torch/include/ATen/ops/convolution_overrideable.h,sha256=omCpmicGRuwdWdwfFAKBdJBjI-HngDQ5OgTzQPIIH_Y,8496 +torch/include/ATen/ops/convolution_overrideable_compositeexplicitautograd_dispatch.h,sha256=WgtJ3zynkTJwbkZWFwq2aDPHKQkjrbpbpVeTuqKI1as,2581 +torch/include/ATen/ops/convolution_overrideable_native.h,sha256=-y0TYZssFcUYRDiGlnON4Pia9S8bpWElSX03-J6rbCY,1037 +torch/include/ATen/ops/convolution_overrideable_ops.h,sha256=2tpP8NhDzgFsN3VSHpDnM7npiGoD15RvxQYZ4S32azQ,3220 +torch/include/ATen/ops/copy.h,sha256=SJvCG-TgrJZ__cZOx5m4KbXt06Y_Jqr2qZHGcBsK6O0,1293 +torch/include/ATen/ops/copy_compositeexplicitautograd_dispatch.h,sha256=VDFyaba0I6QR8Yg9StZHBY4CgstzvxVeyQPfsxTCdUE,1053 +torch/include/ATen/ops/copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=pRa4Ov8t7RUzBl7h3WOwkTPHPjcesg51lwjazDCtCAQ,837 +torch/include/ATen/ops/copy_meta_dispatch.h,sha256=wcPxFfAd6yECGRsuPQVHKDMHkYu-XmHgM5d5ZXCldUw,769 +torch/include/ATen/ops/copy_native.h,sha256=rAUjmKFMvhyt5AgMZmq8fvRCrbL9QegJ9xONvlUV1y4,1285 +torch/include/ATen/ops/copy_ops.h,sha256=imBnwomr5Br576Vh5eyrS2GK9W0YLA73f8MsoDgLtLY,2551 +torch/include/ATen/ops/copy_sparse_to_sparse.h,sha256=1R8fWI28vi_4WgBaPdhnTa4rjQuUwoJz8l3IwyyDz_U,1755 +torch/include/ATen/ops/copy_sparse_to_sparse_compositeexplicitautograd_dispatch.h,sha256=IEEjBlmvKAh1ItZgSsA0H0UPNjEQXT_VZAj6-YkPWPM,1107 +torch/include/ATen/ops/copy_sparse_to_sparse_meta_dispatch.h,sha256=vhi9mRGJHF2CPBPW9LjmsDnH08quPjkZa-8pPp5oxNk,783 +torch/include/ATen/ops/copy_sparse_to_sparse_native.h,sha256=f61lm-XxVM3R_JrF4N-UMUBiGwPyGgRPMHkZukQ2Tmw,781 +torch/include/ATen/ops/copy_sparse_to_sparse_ops.h,sha256=JiUMVpriD2t5THYB4Y9KkEBo2AKT_kTy_4BdAdOQxtk,2704 +torch/include/ATen/ops/copysign.h,sha256=Js5c6PTCWNvMsCuFzGRzhOH2DO5suxCgyVfVQa4r1-8,1905 +torch/include/ATen/ops/copysign_compositeexplicitautograd_dispatch.h,sha256=Y1mTBzgiIs3DB2TkEd3whbVatSUTnomrFtMPxAz3SR4,1084 +torch/include/ATen/ops/copysign_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sO_Fu4Yucd8CXmcJJ-wm-t8WZwdjEG57WOYQ4FCITYo,897 +torch/include/ATen/ops/copysign_cpu_dispatch.h,sha256=h9zyuXG5gbYPpYL8XcLK-CrNXwcU0V0R_SOi1RIyl0c,1040 +torch/include/ATen/ops/copysign_cuda_dispatch.h,sha256=JdAHfnBrhDrHvXiwfJdx39rBw9qdTl3owMh6E7S5Lis,1042 +torch/include/ATen/ops/copysign_meta.h,sha256=sMHNjDVw8iZqXF8N6-o1XelmlM7vCKoCygseCOwpU9U,610 +torch/include/ATen/ops/copysign_meta_dispatch.h,sha256=nVtC1rX3wIQtjbiUUQfJqnK5w03tHevfth62SoItA34,1042 +torch/include/ATen/ops/copysign_native.h,sha256=Uc1JEIfpBb6MNHUDzEerB0LPOXfxl9unFp1U4JCu0Qk,902 +torch/include/ATen/ops/copysign_ops.h,sha256=Tg8r8fC8cGxFNI7lOSOZ2bV2ZF5NSodfZZZl6CKTHS0,4463 +torch/include/ATen/ops/corrcoef.h,sha256=sPrpIWIhjqzuBXXz11YHcy4XsQ9Wgx9jEIttNwF8bjc,622 +torch/include/ATen/ops/corrcoef_compositeimplicitautograd_dispatch.h,sha256=Qgr8zzSJSOy98Wk5pDDY4duS-gwWzpWCRiyuGLgoISc,766 +torch/include/ATen/ops/corrcoef_native.h,sha256=99zeeRCwtM9i13UNFYqmE-DJp63ZwlxkiUzpl9vYGAw,478 +torch/include/ATen/ops/corrcoef_ops.h,sha256=3svqaxwTkyRZfKzUmndCwBSwwGkcsC9n-_GUuujMn_8,973 +torch/include/ATen/ops/cos.h,sha256=oy163z3zSUmiZId2qMWA3RlA2yzl53-o-MtA1cbM50w,1107 +torch/include/ATen/ops/cos_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XKVvuZOuv_bPHRX7C4cwM-cfWPODj0pSOvZ1ZGFxhJE,835 +torch/include/ATen/ops/cos_cpu_dispatch.h,sha256=gw277qd8XP-Y724zsxGLvWZHBCHldZxJXMs7ukg16MQ,916 +torch/include/ATen/ops/cos_cuda_dispatch.h,sha256=bqDfjG5_6nHr06NGx5DEwadJSmaiB-qJzokbef0p9Yo,918 +torch/include/ATen/ops/cos_meta.h,sha256=caeV3v1yQ_txhqiAfXVNfPYB9TEj6CnhfPjXOPQhbJU,572 +torch/include/ATen/ops/cos_meta_dispatch.h,sha256=tSRNtVElJrkcfn1ByDc_ochQ6ZAk7DrI-3ZrEloalgQ,918 +torch/include/ATen/ops/cos_native.h,sha256=C4VWh4nfvtz-6DJ4Tag8jALnTrSoo2PT2yuc1AH3F30,645 +torch/include/ATen/ops/cos_ops.h,sha256=1KspHRzN6LgwSy2kNtehQ4btQMjLMZpR7EciXa6POxQ,2095 +torch/include/ATen/ops/cosh.h,sha256=SJ3iILvg4gKVHM7Dbe9Saf8LoQ_ONdLsCkWAyuNFp88,1120 +torch/include/ATen/ops/cosh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2Uur5cBIzb7T_-M4SQ1xA2Us7H7rhSDakvpYrF1eHSY,837 +torch/include/ATen/ops/cosh_cpu_dispatch.h,sha256=I5W4NGGCfpx2W1Lf3jjJn_Hq8tCI0OCD0hzhbvC5ku8,920 +torch/include/ATen/ops/cosh_cuda_dispatch.h,sha256=-nja_h6zd3bKq8ed_lFUz5aAqNhPBtkz5HYO39zYNqs,922 +torch/include/ATen/ops/cosh_meta.h,sha256=8rCpR02rqSNctMOloXiVktPemvtpwVcUs8r47vJHABo,573 +torch/include/ATen/ops/cosh_meta_dispatch.h,sha256=yQDVX0UfehAZOYAEBrIJgDgC-5HjlM96eSEkK8dgbdw,922 +torch/include/ATen/ops/cosh_native.h,sha256=Xm3I6i46RhWd0mOaJtIiCo7xz-daFAR1L3fgAV76ltw,590 +torch/include/ATen/ops/cosh_ops.h,sha256=PAwhLupMbpiseVgzR-2YBW0pepMXIGNhxZvN-kfQmws,2104 +torch/include/ATen/ops/cosine_embedding_loss.h,sha256=TJRlmugP6XKM-_fH6VsLp3tYmAPO6wTkGbmdHE3ZQfI,895 +torch/include/ATen/ops/cosine_embedding_loss_compositeimplicitautograd_dispatch.h,sha256=WHFcFub8Z_kx9KhnXYL3N8SQtZxtyd3ixksFaRlO6u8,893 +torch/include/ATen/ops/cosine_embedding_loss_native.h,sha256=epzsyIrMIXXrn6oMwPR_S7oL0jdv2PzxiSefaNcvq2o,605 +torch/include/ATen/ops/cosine_embedding_loss_ops.h,sha256=fzR_H035Svh6Fcbo8vBMKEM9My9eEnEm4jQBzzgbjPY,1319 +torch/include/ATen/ops/cosine_similarity.h,sha256=_XkCcUVjoRfTDs4xDq3MW1195DkTTjBqNR7LNBrepQo,761 +torch/include/ATen/ops/cosine_similarity_compositeimplicitautograd_dispatch.h,sha256=g6y8I6nkWoEnPKivCrGWtqMLMQxIBsQtfXSHOu26imk,829 +torch/include/ATen/ops/cosine_similarity_native.h,sha256=wxIjGGjFzGyVyc6koDzwJHWakfz2yHaQCKLvYY29YMk,541 +torch/include/ATen/ops/cosine_similarity_ops.h,sha256=lVQiKaHbQlvcUvnM-cBjkP5ohOm0RhQUI_XmGp1Q_MU,1166 +torch/include/ATen/ops/count_nonzero.h,sha256=AYA6Va1rkwJXzEOzADOU0qHB9qoyj2d144MG_1AVl8A,2025 +torch/include/ATen/ops/count_nonzero_compositeexplicitautograd_dispatch.h,sha256=_wbD-YLA17DmE_Y4Um2bKk9dtpIyzwDz9L39NDq-IMU,1275 +torch/include/ATen/ops/count_nonzero_cpu_dispatch.h,sha256=FBrVfXgU6x3nFFrROZJKjhe_gxOe32MuX6zhEQIwfes,748 +torch/include/ATen/ops/count_nonzero_cuda_dispatch.h,sha256=EE3oYmEhT5abweGJHngTdMQKNV7_XqWr0zoHg4NUWiM,750 +torch/include/ATen/ops/count_nonzero_native.h,sha256=nXzQ8Ozb-9mbhqbIkHLBeDgJdyVrKvbTUpQOp2sZhlg,934 +torch/include/ATen/ops/count_nonzero_ops.h,sha256=T64ouGxoBM2W-EaAq1G9Yc0LIXjXwoEiCNM6R1o3_DU,3243 +torch/include/ATen/ops/cov.h,sha256=DiYsX0fHJ1lRGg6pcWN5iXr778SPOFXvMXgHQxrR74Q,821 +torch/include/ATen/ops/cov_compositeimplicitautograd_dispatch.h,sha256=zXci1815i0BIB8gO88UAdc7m8BGSu1blVh_aI3eC9SM,881 +torch/include/ATen/ops/cov_native.h,sha256=Mj7f2hADsF5VSTiFatzy51ksvDHVWc0S_HuVAPi-Y40,593 +torch/include/ATen/ops/cov_ops.h,sha256=dg_rDs-XSe9-djpqErOj0awTpCGLoDvmAseXaLrCdy0,1332 +torch/include/ATen/ops/cross.h,sha256=kGLycD5LYyAWVH7oylJRPU8S-88RXbWkIrS_IXql-Zw,1315 +torch/include/ATen/ops/cross_compositeimplicitautograd_dispatch.h,sha256=wmpGjZD1KV-SUJjjYYLq2qkw_uYmurwgUQg4un634q0,1116 +torch/include/ATen/ops/cross_entropy_loss.h,sha256=tLNQF09NBbHxKJOBPQBNqdS1dfPWEEjUqDLGM9SbyEg,2420 +torch/include/ATen/ops/cross_entropy_loss_compositeimplicitautograd_dispatch.h,sha256=Rsj1YTXo_u5IjIehWzMyCxUDPY8M89TkdKAnDvHeNg0,1189 +torch/include/ATen/ops/cross_entropy_loss_native.h,sha256=XD4ujviQce1-ErTK-uf6M6luDhD9NVu5vbhfU6ETPj4,667 +torch/include/ATen/ops/cross_entropy_loss_ops.h,sha256=Z--6bhc194airQfCCPN88Zf7ffyUiEMwvfFcghWYPS8,1479 +torch/include/ATen/ops/cross_native.h,sha256=SlP6IBfRC8USx5MOg_HaXvhVaqiiE7vOtu9K0tEX0sc,679 +torch/include/ATen/ops/cross_ops.h,sha256=9MGOyN6Ik6qv9aWJUd2Kom8hVqce44sEl5lAH6Cxg2A,1958 +torch/include/ATen/ops/crow_indices.h,sha256=dB7jpFVxYc4jBi7m5qNf6Bz1bNUuqzKmcU0d2TAXjL4,486 +torch/include/ATen/ops/crow_indices_compositeexplicitautograd_dispatch.h,sha256=_w78fpD5cPDBFa5l2dFfGTzKOBPSx1jMppVkqzJ-PyU,770 +torch/include/ATen/ops/crow_indices_copy.h,sha256=QNp0ZSn-hoFPmxaj52EOrAtARFREeJFvKLgFSMhtAUw,1114 +torch/include/ATen/ops/crow_indices_copy_compositeexplicitautograd_dispatch.h,sha256=LEDe0wumTfyPrFVFsmJz8FomL-_aWjXLgfmg3put_kk,889 +torch/include/ATen/ops/crow_indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dor4GEDzsCoPlSlsDgkJFeuLcKD4k-i-dWyLIFgHUrw,801 +torch/include/ATen/ops/crow_indices_copy_native.h,sha256=5qEEVt8jqq4AXopJIj1aPUlcfCK_RdJssCnUKfnEOas,576 +torch/include/ATen/ops/crow_indices_copy_ops.h,sha256=lsEF3UjL0ui4H8HP5ZajAsJfQGDW5WVqNsmbb5okT9k,1656 +torch/include/ATen/ops/crow_indices_native.h,sha256=EeoDXu8TljPXikgU1HCo2G9oJfpOZfAth75vkhGT9Ug,561 +torch/include/ATen/ops/crow_indices_ops.h,sha256=36t3TF6AYe_mVo7wWA88kZwXQu_uupUJtdxglVl315A,991 +torch/include/ATen/ops/ctc_loss.h,sha256=dr7Ze5memi1R3-HbrsLG8DlVCL8M-eXKpPfkFG9KubU,1559 +torch/include/ATen/ops/ctc_loss_compositeimplicitautograd_dispatch.h,sha256=lodC6QtfIpGZWarBKx1EU-y_ZEKhUrjerqhrkNgiWUU,1184 +torch/include/ATen/ops/ctc_loss_native.h,sha256=a-uO2H83XRPc0e6dzF0MZjTjMjV70MbLMmy5vLxUsQk,896 +torch/include/ATen/ops/ctc_loss_ops.h,sha256=tam1NB_sGcegE5oh9ZpX6WS7oM3ztQcsFOzz_IPnKXE,2577 +torch/include/ATen/ops/cudnn_affine_grid_generator.h,sha256=ybYb_d9NBYeWf2ApFxRTXY7a-R9t4zDqtxkCUrWroBI,1480 +torch/include/ATen/ops/cudnn_affine_grid_generator_backward.h,sha256=YbcGe7VjrA6ExsztB8v07wazHNJ44WZif5x4CjCNvlQ,1567 +torch/include/ATen/ops/cudnn_affine_grid_generator_backward_compositeexplicitautograd_dispatch.h,sha256=omgVfGaYsaVWSRcW0IB3KYtsgv8tj7uY_3ok0Udwmuo,1015 +torch/include/ATen/ops/cudnn_affine_grid_generator_backward_cuda_dispatch.h,sha256=6c_zsSS4UaFFGCahgjtd-5WGJASaHHpFX-n89fHjLkU,796 +torch/include/ATen/ops/cudnn_affine_grid_generator_backward_native.h,sha256=IE6FyUT7lz4Ce7VO-p0jk3GLSBUCiPuzA86rkCRASP8,702 +torch/include/ATen/ops/cudnn_affine_grid_generator_backward_ops.h,sha256=-aRwVQji_g2Qov33XjUVMi4wg-3GzmqL8fmXTG2n9Bc,2085 +torch/include/ATen/ops/cudnn_affine_grid_generator_compositeexplicitautograd_dispatch.h,sha256=rXr_id-lUlL0PZLDFAndWFduVVPk9zmcBHSbRU54_34,999 +torch/include/ATen/ops/cudnn_affine_grid_generator_cuda_dispatch.h,sha256=Mg-Zbx9D4KO_RHKckBZyuaB5ZnmL5k_ybw-VOv9BAho,788 +torch/include/ATen/ops/cudnn_affine_grid_generator_native.h,sha256=lzXW8rZ8dCzoYdyMgx6_mhBKTXYZm_O-jmLC6rnH7Wk,694 +torch/include/ATen/ops/cudnn_affine_grid_generator_ops.h,sha256=lqPhdrBfovnJl1upr6QbHLRy8MZg7Rkzrr2jREInA58,2031 +torch/include/ATen/ops/cudnn_batch_norm.h,sha256=9KJlJnoDYxaA61eo9BkHaHaGaWEw5i-8AWT6V-3A_Ks,3000 +torch/include/ATen/ops/cudnn_batch_norm_backward.h,sha256=n_qHoiRtWieOhgRiowQD5ZYEk4AK67M3SqHKT2kDGFw,3192 +torch/include/ATen/ops/cudnn_batch_norm_backward_compositeexplicitautograd_dispatch.h,sha256=M9ynmo58XETZga4xOR3FXE3aBlQbNlrD0N5BplXZrLg,1665 +torch/include/ATen/ops/cudnn_batch_norm_backward_cuda_dispatch.h,sha256=1Jy1BZmfFNPIENyXlSWYNjb1alVoV9xRX4fkq92k0P4,1078 +torch/include/ATen/ops/cudnn_batch_norm_backward_native.h,sha256=7Npgj5F2WPrfflAuw0UZ8e9g9Etcx-0Ua6FizYdqteg,1309 +torch/include/ATen/ops/cudnn_batch_norm_backward_ops.h,sha256=CvPwl4Zi-6t2y37mniFYIyRpQeTof0XYsqP8zZAyQw4,4057 +torch/include/ATen/ops/cudnn_batch_norm_compositeexplicitautograd_dispatch.h,sha256=2_8gBdeAkFD0894w4NoP1AIRYDwpK3_wi2yrvQDSKRc,1579 +torch/include/ATen/ops/cudnn_batch_norm_cuda_dispatch.h,sha256=oT_kKmlSZdQcoCznpDQu0A5Lmx3MFoRohRsYKl_yXRk,1014 +torch/include/ATen/ops/cudnn_batch_norm_native.h,sha256=RrfG2G2A4aGqe-2GteoUoA84VbPpKNRLs7B-4beyKUo,1202 +torch/include/ATen/ops/cudnn_batch_norm_ops.h,sha256=ZJSRGttSpRwgsf6fzII9SK9PSc86PgRyJiXkMP330bQ,3744 +torch/include/ATen/ops/cudnn_convolution.h,sha256=56ZIJWMZ2NwfbQF1bxtzhfwDDW9qYtk9uoWrs0wHOcE,7643 +torch/include/ATen/ops/cudnn_convolution_add_relu.h,sha256=0v9GIUlaI778-zxaZ7991utXiVUz15uxOcufg9D3IlM,8222 +torch/include/ATen/ops/cudnn_convolution_add_relu_compositeexplicitautograd_dispatch.h,sha256=FiQhO8AgxCUruvfWvcbtSS-K0kOz2A1B-HDsWSnGHXk,2050 +torch/include/ATen/ops/cudnn_convolution_add_relu_cuda_dispatch.h,sha256=kqV74BIl8sUnk19R74cOWVQe8u-cDaMJW1dzEKEsZZo,1289 +torch/include/ATen/ops/cudnn_convolution_add_relu_native.h,sha256=kZGEh6lbA-WNptIbx5dp-HJOqGMs5mrUq2xfElw-haE,1067 +torch/include/ATen/ops/cudnn_convolution_add_relu_ops.h,sha256=6ArUXQWc8pUcKSC9idc9LEoqo8yluge03zHAGPCzwVQ,3300 +torch/include/ATen/ops/cudnn_convolution_cuda_dispatch.h,sha256=YaFY9gjKM4k1dNQOLAgTlELXPHQnZOONzBjMOnWfwrs,2251 +torch/include/ATen/ops/cudnn_convolution_native.h,sha256=AxjtJMj0uaUDs9oiX7XQwmN5H5S2BUV8lIae8HLy5H8,918 +torch/include/ATen/ops/cudnn_convolution_ops.h,sha256=w5C4E_SBMlb_J17X-IAzhd3IHo0IiFIzoQCNHLFDl2I,2906 +torch/include/ATen/ops/cudnn_convolution_relu.h,sha256=cCnPOztEZr0C2SW67Uzm5GY0xb3tdNx7Ndgl-kkrZJ0,7048 +torch/include/ATen/ops/cudnn_convolution_relu_compositeexplicitautograd_dispatch.h,sha256=E8-Dw2wHlMtfb6v1T01xscasJkjpA2BLZ6PI2V2b5Ws,1774 +torch/include/ATen/ops/cudnn_convolution_relu_cuda_dispatch.h,sha256=CzOW78g0C5EYt5F0m4ZNZT979enGWKyrErbhPx4Mu0A,1151 +torch/include/ATen/ops/cudnn_convolution_relu_native.h,sha256=8YO2RDAOce-3OxqgMraSfT9iCaen0JHmIDYqU_VeuhI,929 +torch/include/ATen/ops/cudnn_convolution_relu_ops.h,sha256=cinQgL-epDn59sXAorT6EQCtCPi5deSRPdVyU4usHwM,2852 +torch/include/ATen/ops/cudnn_convolution_transpose.h,sha256=ki9ErwgWBEH4r1KIO0QjnIBDCYZVnBkzPSFbNfIhucc,8859 +torch/include/ATen/ops/cudnn_convolution_transpose_compositeexplicitautograd_dispatch.h,sha256=Ak52MBqjj8TkxX5MtRq7zLDR_o3c0_SdQJXU1VDvbTc,1974 +torch/include/ATen/ops/cudnn_convolution_transpose_cuda_dispatch.h,sha256=xmvTx8EkFeA4j-xcn-a1SHY6LZr0XCE4xRAPxxn59T8,1251 +torch/include/ATen/ops/cudnn_convolution_transpose_native.h,sha256=kPQrDnV0hsSLUwqbBHdC96NYjpXeBRSksxVa_sfR9GI,1029 +torch/include/ATen/ops/cudnn_convolution_transpose_ops.h,sha256=-euPsLDgfpwp5nafk1mKgSbN2csnwG3PSo3rNqX4wNw,3202 +torch/include/ATen/ops/cudnn_grid_sampler.h,sha256=4MfOw7VD9vCrfnW-dEgn-aJY2unwbuVm6yPpRvYjmaA,1263 +torch/include/ATen/ops/cudnn_grid_sampler_backward.h,sha256=HDj0FGmZUCAFnHwSa05JGoaercdsdNUryG62ftnGL1k,1768 +torch/include/ATen/ops/cudnn_grid_sampler_backward_compositeexplicitautograd_dispatch.h,sha256=blb1k5yEpgDhv-rEyyTpUiJ3PcvMuOwe2aF3lUvf7HE,1117 +torch/include/ATen/ops/cudnn_grid_sampler_backward_cuda_dispatch.h,sha256=6uvFt8Ug12xf33J9ANxaaRw67-qmJMwSXdSY69QXI50,825 +torch/include/ATen/ops/cudnn_grid_sampler_backward_native.h,sha256=hSy5i-E4TbXzfJpHPg1dJeQxPUVRT03xGucIqBmHXO8,782 +torch/include/ATen/ops/cudnn_grid_sampler_backward_ops.h,sha256=dKSQl1gW3FlHQX979J9yv-d_wvsjnVx92rJb79w_iog,2362 +torch/include/ATen/ops/cudnn_grid_sampler_compositeexplicitautograd_dispatch.h,sha256=sX_Pp6YMhGvDf0YSUuEeayTbV_Aqvp9oC_gHZ-HnzKI,941 +torch/include/ATen/ops/cudnn_grid_sampler_cuda_dispatch.h,sha256=YwZPDzrrJAefCUs8Vg15yk7OdmObQI4xsKjJ2veupUI,759 +torch/include/ATen/ops/cudnn_grid_sampler_native.h,sha256=2BBgEo_wr3TOH7dhAFLcw6_vWqKTnealg987CdlIB9Q,636 +torch/include/ATen/ops/cudnn_grid_sampler_ops.h,sha256=yO2grglRr4SBRLFnkpDbI7ri_5xKFLs1TY-hLkurWCs,1835 +torch/include/ATen/ops/cudnn_is_acceptable.h,sha256=AvNHoMC4VgK5xGjFknZbWH_6Q1W6R-TiNlf3S0lks4o,658 +torch/include/ATen/ops/cudnn_is_acceptable_compositeimplicitautograd_dispatch.h,sha256=E0ZgfkRQcvvGaP34ONxiQn1PtPZlFmH9nyYCz3-wGUo,771 +torch/include/ATen/ops/cudnn_is_acceptable_native.h,sha256=TX3GXpOxkrqKJ7cV-fzeGjdkwusjYyp6x1TAMetroaQ,483 +torch/include/ATen/ops/cudnn_is_acceptable_ops.h,sha256=D3zNVwsWkeKGpVO8FW9NNku6YIOZMskTO2sV9NDD-gY,986 +torch/include/ATen/ops/cummax.h,sha256=lAdzXk8TScxMaZeNTtAKl1P9gFrtS4jkhUF9vEwnHmw,2327 +torch/include/ATen/ops/cummax_compositeexplicitautograd_dispatch.h,sha256=JAsbyGjv8yHd9qvsIub_amJCvIqUy7kHnWUMFa1qpDk,1089 +torch/include/ATen/ops/cummax_compositeimplicitautograd_dispatch.h,sha256=ysKrZo_zIiCtP2NM7FWbx3t2jjXesY3qvwVU6d4PY8k,1101 +torch/include/ATen/ops/cummax_native.h,sha256=Ce4hkVZlCp-tENKtyiK5NPtmBfl_Hs7kzOxUj5rIWD0,900 +torch/include/ATen/ops/cummax_ops.h,sha256=S2OwuixN8NCm1P7w1xUrbu2B9nV-Bzz7gSu1qzB_6kI,3595 +torch/include/ATen/ops/cummaxmin_backward.h,sha256=cH0PeQMIYtfj3bs9_8bMk4M_PuMG1MRDIOjcMi33mxQ,789 +torch/include/ATen/ops/cummaxmin_backward_compositeimplicitautograd_dispatch.h,sha256=zgUxDvn3JeRV1T8HH7wlJ_SOmfWDblm-aRCdDuRLD6A,843 +torch/include/ATen/ops/cummaxmin_backward_native.h,sha256=fLfwo45uwaw7aYOrSZPKdDk3U8ILCYWHhMlAWBi05x4,555 +torch/include/ATen/ops/cummaxmin_backward_ops.h,sha256=Loa8RpajstYXErtEYtUKvzDBpDwpTNnpUSPxT2GVHcE,1225 +torch/include/ATen/ops/cummin.h,sha256=Fos2sjmO1MtJFhI4WseiJTCkkJpncI4AqZEhfWwNOBQ,2327 +torch/include/ATen/ops/cummin_compositeexplicitautograd_dispatch.h,sha256=kc3U0-PI7dbqXr1f0IXzDXAdz29cGdqR4FhmKxkXw2o,1089 +torch/include/ATen/ops/cummin_compositeimplicitautograd_dispatch.h,sha256=-0jp9zEiv4a_fMS1YeyFUKwVVAAKkA_OE9GEkAn3IZI,1101 +torch/include/ATen/ops/cummin_native.h,sha256=4IKRwEzcBBWBCdiZE6N71isg_epEtgiGjNF0QPMV598,900 +torch/include/ATen/ops/cummin_ops.h,sha256=_BNGFROmStBNEyh_qn8vY1XGTGSEFd4OEVG0od2-q0s,3595 +torch/include/ATen/ops/cumprod.h,sha256=pKMufOxGr4gXJebck6O7XBGZwih3dIukDVK5S9Rex4Y,2268 +torch/include/ATen/ops/cumprod_backward.h,sha256=Zbvdyeo8c1K4fv9wKuVKQeTgez8Q8q-jC7mhcki6Oyg,778 +torch/include/ATen/ops/cumprod_backward_compositeimplicitautograd_dispatch.h,sha256=wLxWUZU6Af4t8iHIxKTlR7jCShqfM-MmgAgxaE0ORHk,840 +torch/include/ATen/ops/cumprod_backward_native.h,sha256=AmPmjCOmUOLazXfhJ7OidBinIrw_CROiOwECzsD7eIY,552 +torch/include/ATen/ops/cumprod_backward_ops.h,sha256=LCG7KQ3hYNzvBHGNf3J-J3DsgecR8miUl2uOSY4Up2s,1216 +torch/include/ATen/ops/cumprod_compositeexplicitautogradnonfunctional_dispatch.h,sha256=JzraimtePKAMn9p_roECtzfZ06XXYjNwmWYRA1P8V9Q,977 +torch/include/ATen/ops/cumprod_compositeimplicitautograd_dispatch.h,sha256=Q45oAhAVXUeNc0TkWM9eNYhJQZnqeta90DHtHLRQbLc,1245 +torch/include/ATen/ops/cumprod_cpu_dispatch.h,sha256=qsEnFPsganQjrj0gqSgBSxEq-_tTCm77MOcR0v9SnLU,1185 +torch/include/ATen/ops/cumprod_cuda_dispatch.h,sha256=lDi1Y0ARYav17J27Cbx3O0iRLpH5Am7AjTtMV6RNNgc,1187 +torch/include/ATen/ops/cumprod_meta.h,sha256=9XRY50JvBhnfSBgriQQIjKANHjVhnLtxbsqVCrseHzQ,628 +torch/include/ATen/ops/cumprod_meta_dispatch.h,sha256=W2NL1dxsyJhZoAGx8gxmNcrxCqtlZ_APIsrx0K_HcCk,1187 +torch/include/ATen/ops/cumprod_native.h,sha256=75LSZMgafB3lbHFfeZigT9zW4gHl6UfpyI5Ns0NaJM8,1035 +torch/include/ATen/ops/cumprod_ops.h,sha256=kFvrAZHVXVrWtAP-7GD9Tmq3qdwnnMVa-5YoAOtEIvo,5032 +torch/include/ATen/ops/cumsum.h,sha256=n4vAkniyLagQJobzZasVejoxQQ7FT4GgUk-InbhK8Vk,2249 +torch/include/ATen/ops/cumsum_compositeexplicitautogradnonfunctional_dispatch.h,sha256=6ZD3AdwkexbSfcUTOpSZF-1szE5KBdI104o6wLFfBiQ,975 +torch/include/ATen/ops/cumsum_compositeimplicitautograd_dispatch.h,sha256=BB7HXKkzMIlaaDNLruXnWqr-wArIFRu7zKI6JHbGVOU,1241 +torch/include/ATen/ops/cumsum_cpu_dispatch.h,sha256=sa1pIZYNXLQdQlUPKCnDhGC2Cqj_3gOpzggjbQKiHXU,1181 +torch/include/ATen/ops/cumsum_cuda_dispatch.h,sha256=_3SDPDF93LuJxzFw6DV5h5f0Vk_9Jn5ZrjJNaeVHS7Y,1183 +torch/include/ATen/ops/cumsum_meta.h,sha256=qzUABoh91rLaJnEaxIHpo0Y8P5FML508vhkWfsiagyE,627 +torch/include/ATen/ops/cumsum_meta_dispatch.h,sha256=mMMsQHHkboQ6nhR4J5ss-fWBdb_dEBrBOAissQXax-Q,1183 +torch/include/ATen/ops/cumsum_native.h,sha256=-zA3A2WOFiJp7w6oLGpANiTkh8-Wjk0nkGxcBID4Asc,1029 +torch/include/ATen/ops/cumsum_ops.h,sha256=xlwPzHMZcfUBMAkQe5wJetmOfYxKtgaa-htS0amWq1I,5014 +torch/include/ATen/ops/cumulative_trapezoid.h,sha256=E7RHoCFIdDMXVpPTOH_lWakVlKMc4USZf6Nptxqf79I,988 +torch/include/ATen/ops/cumulative_trapezoid_compositeimplicitautograd_dispatch.h,sha256=8ZQtDNIBh_WBa_GRz5Y3eO_cP5jkEtEoH1wy8mknN5M,919 +torch/include/ATen/ops/cumulative_trapezoid_native.h,sha256=rST-OvAVHnYaZ4FksgdBZv2n2mn1j-Kypjn3sQaJl3I,631 +torch/include/ATen/ops/cumulative_trapezoid_ops.h,sha256=du_Q-ICs0YLOqj1kr5peSvhApZMrCOCIAdY21MpOLho,1832 +torch/include/ATen/ops/data.h,sha256=L0OT5aWe_jDYOH5J_QL6rejk1s6r2sIW8z4vsaP2Y3s,478 +torch/include/ATen/ops/data_compositeimplicitautograd_dispatch.h,sha256=7rT5Ey_ek1MWNWGDWMK7RTglC-A8vrIUHUoogFmPrO8,762 +torch/include/ATen/ops/data_native.h,sha256=rtO_fpeo1PjUbz2pXX2DnNq5L3BZt0CGWODVhUZyc8c,474 +torch/include/ATen/ops/data_ops.h,sha256=u5ZB5e0cuxEmIjBTvPBdEhTaN3oCXI_ZS7zVWaw209A,961 +torch/include/ATen/ops/deg2rad.h,sha256=2jO6pZWQbdgj3Q2xDykjJ_bX8y1tJaEMtQwdXGdMJKs,1159 +torch/include/ATen/ops/deg2rad_compositeexplicitautograd_dispatch.h,sha256=DLSR889V3eHxDTyTDgFYjBFbYzyQ4e_g78K8hW3VHqU,976 +torch/include/ATen/ops/deg2rad_native.h,sha256=vDQZNL5ozGGnjkBH1C5HyCBej0e_bQLOzuYKdKW_2Xo,1034 +torch/include/ATen/ops/deg2rad_ops.h,sha256=OdRtNfWstIS8NuMt9u39a8jLyG23xWV_yPS9o1hnnl8,2131 +torch/include/ATen/ops/dense_dim.h,sha256=pzi1MAHRqIvvy-ixoLkJ44z6uSQ7LG_oyIcLLiM95hk,483 +torch/include/ATen/ops/dense_dim_compositeexplicitautograd_dispatch.h,sha256=Ui5ifUlQs-P3-k8zdjpecNs6nDTcm1yfXzdgc7flXXw,764 +torch/include/ATen/ops/dense_dim_native.h,sha256=WBphoXk9ym-SN9cazdbbL5HdKHokr2MKrClKgKiewkU,610 +torch/include/ATen/ops/dense_dim_ops.h,sha256=dfFIdqBZog1yvhySDqWSuH-uZTbRkmTEMWQcAgXduyM,964 +torch/include/ATen/ops/dequantize.h,sha256=jg0ccOXko2AFxCL3cJh0oQI51NXdxNEC7IJ-xmN4KJ4,1697 +torch/include/ATen/ops/dequantize_compositeexplicitautograd_dispatch.h,sha256=G79b9-0_JeOtygHHMAr06A1m2Npq-6vwZf8wU4ddJl0,1026 +torch/include/ATen/ops/dequantize_cpu_dispatch.h,sha256=PgMCnHQK2lewGWM_pKyXztqTdjHq1sphVO5wR-wODHE,724 +torch/include/ATen/ops/dequantize_cuda_dispatch.h,sha256=sbUXS-BxQPCsWFII-0-Yj7zMWV1u_aMl5gIEhBtWCPY,726 +torch/include/ATen/ops/dequantize_native.h,sha256=hQKoXBJQHjfowCXsCXLJJAE60cwZ_WYGCcHtFqjl-pU,824 +torch/include/ATen/ops/dequantize_ops.h,sha256=-OZ-tyqbEcR6eBGwELdE5M85Z0UpNdHHxetJke3sNog,2891 +torch/include/ATen/ops/det.h,sha256=1MvDbkXrA21hbO7QEABWNSYTDb2OmjldNv1Y9tRM-3Y,602 +torch/include/ATen/ops/det_compositeimplicitautograd_dispatch.h,sha256=vMhr6dD8nYriKxlO7LAas9mbHdj_bRMEbwKfkByWGcg,761 +torch/include/ATen/ops/det_native.h,sha256=16WNOQzEA6WtqclXBThD26JmI8TXWv2NWpyB_gcwoN0,473 +torch/include/ATen/ops/det_ops.h,sha256=Vj5DXd8PKWt2U3DCF6Q1VRea4zF-QQTORJS1tMO1nks,958 +torch/include/ATen/ops/detach.h,sha256=AZrUxQh5TUvGQ5Q6dW1ejfjVtJeLkbUZDh7ejvPV44I,762 +torch/include/ATen/ops/detach_compositeexplicitautograd_dispatch.h,sha256=crcZzI_9nwLGaJvHqbeMlfQpDK3rghIb7ftfD7-K2Yg,815 +torch/include/ATen/ops/detach_copy.h,sha256=BT5OYY1O6B383hr3pxjlZ1CQNNNhT0CwrTRvflzluLM,1054 +torch/include/ATen/ops/detach_copy_compositeexplicitautograd_dispatch.h,sha256=xqbDYRrKQ3HENY3MW9a_H2FO4r_MZEU-BPYRjjTJxKE,877 +torch/include/ATen/ops/detach_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=vW0hj53gbojTYp7awhEvCMgH3ATWwxH5LDf6gHofF-M,795 +torch/include/ATen/ops/detach_copy_native.h,sha256=z2_5KtGIRJZTF48JzDDdRrG3m-Hw7nEN2h5N2RZdaZQ,564 +torch/include/ATen/ops/detach_copy_ops.h,sha256=d8aBlrElKzmbT7QyKh0KcxyrnjALAKKtRr8TNprn4yM,1620 +torch/include/ATen/ops/detach_native.h,sha256=MKpf1vx8bCOWGfXInBFSHRO-7iRTGHYGUKbChNuHh3s,527 +torch/include/ATen/ops/detach_ops.h,sha256=HCMLv23LQjjPvBb6SRS5SoI6sHW6Jdr5AJa3OYPdNHQ,1505 +torch/include/ATen/ops/diag.h,sha256=uPXhn3So5E7V7iOer3Eev2WuQ_i5mQ5KNRfBGA0XnFk,1120 +torch/include/ATen/ops/diag_compositeimplicitautograd_dispatch.h,sha256=edYpNbx-uPwMurX3pmgjbRTGlyh-cXPUVb4hup1v0KU,973 +torch/include/ATen/ops/diag_embed.h,sha256=FRWPZiRdsv996Pc4sbw1Zo2ryZtthKFaW-l6N2LGuag,1372 +torch/include/ATen/ops/diag_embed_compositeexplicitautograd_dispatch.h,sha256=gg8ug0EHOEm6fljI_cZ4Uoc9S-U2fToKJT2DCO-DFr0,971 +torch/include/ATen/ops/diag_embed_compositeexplicitautogradnonfunctional_dispatch.h,sha256=BqH6JU_YRm-CUQFRUTvWhpZ3mYiXTmYXqqaZKlpTebs,846 +torch/include/ATen/ops/diag_embed_native.h,sha256=hmaRtMhqJIi3QtG5luDB2fdw8m52vjNbN_Ag5webUu0,658 +torch/include/ATen/ops/diag_embed_ops.h,sha256=mptAqmzVQsq5iG3HhfAOAcYv3B0InnlM7lUkqDnELQ0,1924 +torch/include/ATen/ops/diag_native.h,sha256=EG-qHeRdzxbNlMztMGMWrucza0iTb79L-dSf8G3e_Tk,588 +torch/include/ATen/ops/diag_ops.h,sha256=S7jqrigTQIddIssEpjQGpYFWhe36gLkZf8_oCTi3vDY,1700 +torch/include/ATen/ops/diagflat.h,sha256=R4byDCzX2vSlZj8j8xWK-DOu4oCp0Pgl0vf5G4jYQ2I,662 +torch/include/ATen/ops/diagflat_compositeimplicitautograd_dispatch.h,sha256=iAv9nwwoz86t3SP_zl3mQCCfQAF_QsHYZ_vBg8lLqh8,784 +torch/include/ATen/ops/diagflat_native.h,sha256=e-4lYP4GFNVEi-jBwDwQV0RcAXC1bSSyxg8BlMUugDw,496 +torch/include/ATen/ops/diagflat_ops.h,sha256=MOgSp8i8tHXdiBw5qpgqGCrWmrCauws3VABcgKQW9W4,1028 +torch/include/ATen/ops/diagonal.h,sha256=9ob9IsmpHkBnj9GHRFRDAjsg5vSJd46PcPiIB9msYgQ,1062 +torch/include/ATen/ops/diagonal_backward.h,sha256=MYLwWkle1wiJKNFdC4HLzPIDtgSqu7Fr1weolB_9NfQ,5219 +torch/include/ATen/ops/diagonal_backward_compositeexplicitautograd_dispatch.h,sha256=AOBdU5Kt4gWehW06MCyD-vmUcCLIvk6MhVu9952CYQw,1711 +torch/include/ATen/ops/diagonal_backward_native.h,sha256=_TRHJV69sKVT_l0IS56lL3ctA68rNWwLKunjNzBag9s,758 +torch/include/ATen/ops/diagonal_backward_ops.h,sha256=VNqKKNrZmP5mYSw_CLSF4fAd4BKZjNAEfh8gamUTOB4,2210 +torch/include/ATen/ops/diagonal_compositeexplicitautograd_dispatch.h,sha256=YNCL9amA_G1wo5NJs2cWY13NlZUJhLaoM6LR-9sjqJ0,816 +torch/include/ATen/ops/diagonal_compositeimplicitautograd_dispatch.h,sha256=eFMJSnz1dq1oAiLRHonak-2wtUy0wI6EqqvdkPVofBw,840 +torch/include/ATen/ops/diagonal_copy.h,sha256=h8Dliy7qanMqfh6lh8MsUDD9PMM23fotrsWuX5U-H8I,1392 +torch/include/ATen/ops/diagonal_copy_compositeexplicitautograd_dispatch.h,sha256=FqVEXXDUGjaBarEzXdKgH6vPnanN89fGkBUiL3JWP3U,975 +torch/include/ATen/ops/diagonal_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=1eCt651vj8kU98-XvhO2c7oWCIupIeanTt5z2gUjEzc,847 +torch/include/ATen/ops/diagonal_copy_native.h,sha256=MTJGn6nsU1UNbrIXQoerj22S8aHLlAB9UHQubCRotr0,662 +torch/include/ATen/ops/diagonal_copy_ops.h,sha256=tASZqgsDLLnaqQ451bt-C-OV_8HuYCBef4d6w4ovm8k,1938 +torch/include/ATen/ops/diagonal_native.h,sha256=m2xvoKGDzeO2MLL5CpJCZms9RLlT_wxjN-bU-BTpqss,658 +torch/include/ATen/ops/diagonal_ops.h,sha256=pyLMA9TiT_xQcDDKJwbodaLaLCG5qU4bjbMGLXPQCwA,1953 +torch/include/ATen/ops/diagonal_scatter.h,sha256=at7dMO2qJ6mTwTrOA7gE0LV9JR5CXdBKW6jNPnuVCq0,1545 +torch/include/ATen/ops/diagonal_scatter_compositeexplicitautograd_dispatch.h,sha256=St221yGHbtk1c16W8KqHEsb1Gp78kre72V65iTIC6Uc,1029 +torch/include/ATen/ops/diagonal_scatter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=gQ6Hz-GumZ1598dqaSGzKKSS7l1kd3AcB3m4kIsIa8w,874 +torch/include/ATen/ops/diagonal_scatter_native.h,sha256=9KUzFDZ3Vf8ZlCperM0qNWhgM6Bn5hI2p-EmOGema1E,716 +torch/include/ATen/ops/diagonal_scatter_ops.h,sha256=fBMgl9nXo9wOAi-Zr9vxuZ4D1IiDmduYdEIG85GDCKU,2116 +torch/include/ATen/ops/diff.h,sha256=tUzUjpGOkPLf5xrelz_Ji8ojAe1-Hmkm9jiAuapqSkI,1612 +torch/include/ATen/ops/diff_compositeimplicitautograd_dispatch.h,sha256=IZ7Pp_125yA_6VmKZzwnN1dc2LJBBvoY5pO3BoxCZQc,1276 +torch/include/ATen/ops/diff_native.h,sha256=z6uBf_EuL4iSXo61OfKJi-11JKZ3udlcU8FUX2_FtFM,787 +torch/include/ATen/ops/diff_ops.h,sha256=VMOW5_ETJHNoT8SsTENLEglIS5bdGcJiQi4ZKaF7OvU,2342 +torch/include/ATen/ops/digamma.h,sha256=HlpxyydQR7iBigXOgAp0GAD3mYGT8TVYtEhDpEUjKIc,1014 +torch/include/ATen/ops/digamma_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9X1iwB9nsSRxsdntZOMRgNxmGsS45_ZQhKj_pP0pwAU,843 +torch/include/ATen/ops/digamma_cpu_dispatch.h,sha256=MzP1whO9KAJd-sPTA-0DmQFm4nc7wudhV4fErigm4RQ,932 +torch/include/ATen/ops/digamma_cuda_dispatch.h,sha256=jR7Kxf3Kutt18KxH-jso7KLOqnEnbxR_Bq6rrPpplc8,934 +torch/include/ATen/ops/digamma_meta.h,sha256=MuFJFEdAd85i23KO67BN1Pyhg11gLPRnTlk6vjU5jWU,576 +torch/include/ATen/ops/digamma_meta_dispatch.h,sha256=MuEijZQNPnfdw2EgfID4yZbv9rKtm1BwMxsKMYIHJ9Q,934 +torch/include/ATen/ops/digamma_native.h,sha256=hX6_mJIrHT8s7YUi8FOqEjwlJWStRyNzFl8FqKeu8Zs,599 +torch/include/ATen/ops/digamma_ops.h,sha256=95pRtKCzuqvWcr25N4q5uURQ2pe2JkrU5GfP2iDUgi0,2131 +torch/include/ATen/ops/dist.h,sha256=YIw-jJZ7b8M_nXwmuHH-fPA7eb-V1fB9q70TZi-IdwY,1240 +torch/include/ATen/ops/dist_compositeexplicitautograd_dispatch.h,sha256=pc6-HxVURdAmbQaoZuiKy0zBIcwKICyCIK7PEulFXsI,1063 +torch/include/ATen/ops/dist_native.h,sha256=HLchRNZYeC9K47Rmtn_FNTJPRtQObtGYRmeDFyjL7VE,648 +torch/include/ATen/ops/dist_ops.h,sha256=wyD7JR6GdQQ8otEPNeRNCs6wUtNu3EcNt_8m_ItGIPI,1902 +torch/include/ATen/ops/div.h,sha256=GqyHywWOQSXhZp-zK6QHR81mjzCK_kppyvQAMBjN1UA,3714 +torch/include/ATen/ops/div_compositeexplicitautograd_dispatch.h,sha256=jGY81xqohLjATZfHV-8xA5iJtzG7DtjtDtG8DwZbufw,1614 +torch/include/ATen/ops/div_compositeexplicitautogradnonfunctional_dispatch.h,sha256=R4g2Ygc6XiMBmfvmt8TU1GRuv0QX5xrkQQH62cEu8YU,1136 +torch/include/ATen/ops/div_cpu_dispatch.h,sha256=Z0FGxxyAYxfZ8rU2h2EfSYne-88OdmnoluIgub7pgVw,1570 +torch/include/ATen/ops/div_cuda_dispatch.h,sha256=brfTgPlIsfAJhuEy-eB7UNCk_VL_vyJm6ffBs0w1AD0,1572 +torch/include/ATen/ops/div_meta.h,sha256=PI2nE2o6qNBHB75yv8NVnKoFuBvUU20KOn215EHOQvE,807 +torch/include/ATen/ops/div_meta_dispatch.h,sha256=oz5UE3e8SY829iYZzwL_Ysba-HApIX66AJ_zINMP2kM,1572 +torch/include/ATen/ops/div_native.h,sha256=HiJp7_Fk3D9m7KcrSIWlqr0Ez6F9Rc-2UI6BUk7hdZY,2508 +torch/include/ATen/ops/div_ops.h,sha256=3Y66woCta1WXOps8pe3WYXVPt6KrNAzlztfaUtRt6RI,9332 +torch/include/ATen/ops/divide.h,sha256=2TOqkk0yDs-8LiWcCM0o9S_DkOQyY34jEdzLg99lwXQ,2613 +torch/include/ATen/ops/divide_compositeimplicitautograd_dispatch.h,sha256=faKU5tLCupTnFlS3ykrAU4EeBOGbEy6wqfaj5uEQQzM,2050 +torch/include/ATen/ops/divide_native.h,sha256=f_GhN_TAnbZL6Uk4SzbqhLi2uapMCNvoWgJAy7qqcl4,1503 +torch/include/ATen/ops/divide_ops.h,sha256=DgbPd1QvpZrlhHT09uhWYRh1Tyl8xtWmKQcifN07Kck,7812 +torch/include/ATen/ops/dot.h,sha256=skbc8oylA49FqPsm0eo-L-uEN76fY8h-NJZrAHB4csE,1124 +torch/include/ATen/ops/dot_compositeexplicitautograd_dispatch.h,sha256=usIcLuparc2NVrF5TntbpbjqgiYJq-dhnbfKBM-ixjs,915 +torch/include/ATen/ops/dot_cpu_dispatch.h,sha256=ywDI3f52zvW9ZKo6diE_gnWH81jZEoKLcdRwFswIp5w,744 +torch/include/ATen/ops/dot_cuda_dispatch.h,sha256=A86RTtAXKBpGuszl0vJJJXB-rvKc5BIKkMrFAUY8Wuk,746 +torch/include/ATen/ops/dot_native.h,sha256=5JC5I65GS6hllzwZxoG5IlAQu72PfHtmct8YJypvFV0,685 +torch/include/ATen/ops/dot_ops.h,sha256=gv9JbRQjSCoZLAJy8r_guZrkocYQvKipACGh6fSzl-g,1750 +torch/include/ATen/ops/dropout.h,sha256=R0NBgQEBlw27q94UKRw4oZHO_SvOv71YcbIIgHnsw3Q,872 +torch/include/ATen/ops/dropout_compositeimplicitautograd_dispatch.h,sha256=1oNmNCeNCWLHWL9vxzq3kL67NmtOUC8Ir9mLr81pUbE,862 +torch/include/ATen/ops/dropout_native.h,sha256=v6aHxY_JDeUwSkNAjH3z5xATlslRALfayObPGlgtr3A,574 +torch/include/ATen/ops/dropout_ops.h,sha256=mEk39af2nGsRYGCqYkV5bmExg6es_HIHHUYCPXBtGO8,1666 +torch/include/ATen/ops/dsplit.h,sha256=DB1NMUqGPVoRj9eP7aaRkfO_u9KO5fCLA0KORl9YtwU,916 +torch/include/ATen/ops/dsplit_compositeimplicitautograd_dispatch.h,sha256=Lc_xGCKzhxHNU6F2ztKMmp3eKEXyWbtDoBFsS4gUTb4,891 +torch/include/ATen/ops/dsplit_native.h,sha256=pjcKkFeuAbFveqdh-9sYzOYfX7IXUInbYHQd8DBT2Pk,603 +torch/include/ATen/ops/dsplit_ops.h,sha256=95tud01xSOmnkYVwEZXPNYSCuUmJtbKvrDFYqugFXQ8,1785 +torch/include/ATen/ops/dstack.h,sha256=oqJ7MkCcMuD5ztosjs054AKc7OPsFOgjJHFzrYXcNX0,1025 +torch/include/ATen/ops/dstack_compositeimplicitautograd_dispatch.h,sha256=-PJkYjYTP0wjfRHnQapUQfgNNbnBYTdnEAawd6IfuGo,918 +torch/include/ATen/ops/dstack_native.h,sha256=e-atQF59iMcOF9zVb4WQXnYYxoQK0THGt-Du3KcRvic,552 +torch/include/ATen/ops/dstack_ops.h,sha256=4m9Fy1WhPy3wNTRmRS7PCp2yHUEN2talXlJXvu-8kZk,1588 +torch/include/ATen/ops/einsum.h,sha256=-BC-27ymxVsWpQoPxc0FLGwwc6JH7UhCSNhNONif43o,744 +torch/include/ATen/ops/einsum_compositeimplicitautograd_dispatch.h,sha256=Zmt4msCBOKK5HKC5PQoZU3miKuRq6ocR3BKgmzS3cjI,835 +torch/include/ATen/ops/einsum_native.h,sha256=NkEj9gJDjwl-2nqxdtAwmB6gQv0eBy0MmeEwOj8pttU,547 +torch/include/ATen/ops/einsum_ops.h,sha256=JMHliVu9Pa6JRbdHnl_DhB6LgbF8l1UMQxOxmJZMRVs,1158 +torch/include/ATen/ops/elu.h,sha256=jXoLctn6DX9sJPnflP5iv8QzPaoJhic57EvXo1na08w,1785 +torch/include/ATen/ops/elu_backward.h,sha256=abZB6KbF1ooU5x8jeluqyIbRbGcL3xsALVey7cFP8eE,2025 +torch/include/ATen/ops/elu_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=qlCwY_IF0NLw-2QiE0jNvaoCpd26zZVloEMivjNBIZM,938 +torch/include/ATen/ops/elu_backward_cpu_dispatch.h,sha256=6oHWY1Wd3wCF7Qx-uJWFii0PBI5jgqRvVJ0mjgiQwnQ,1335 +torch/include/ATen/ops/elu_backward_cuda_dispatch.h,sha256=fdyXrsKqOmCUyGCynwJB6d4_L_c-d-d6VVSJIKHpi-I,1337 +torch/include/ATen/ops/elu_backward_meta.h,sha256=cbNC4IrM0hI0BiavyIEEfVlJsrtB1NFjm7J9K9egeb0,723 +torch/include/ATen/ops/elu_backward_meta_dispatch.h,sha256=aBlORlbO3W34dcLIfKr813uBE6GBuldwHwh28Ry-2N0,1337 +torch/include/ATen/ops/elu_backward_native.h,sha256=wHDc6Vt6IBoBWbu3eHGJgfo38VIc_yUh6srQkUE019E,763 +torch/include/ATen/ops/elu_backward_ops.h,sha256=zX5DWE4G9aQiAX18ZwWC6dKNWHjGQLOuOpX0gN0_r48,2596 +torch/include/ATen/ops/elu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=h59xyTdy10hFVr9wqnuWFXy1RDVR5VG3mIUD1lDUzB8,1015 +torch/include/ATen/ops/elu_cpu_dispatch.h,sha256=MQ8vMkSuDHX55lAxNnE8GxsxQdYPu8IcXX_quyvHxRs,1270 +torch/include/ATen/ops/elu_cuda_dispatch.h,sha256=c65G_16MwTnsFIaTU3okr2z3a9D0i1a5jL3U43FHPmY,1272 +torch/include/ATen/ops/elu_meta.h,sha256=xLloj9L_9M7bKX7WK2Vc4sU-rHtXZ5y9HD5Vw7XnFQ4,656 +torch/include/ATen/ops/elu_meta_dispatch.h,sha256=xRS9WTykrUufIbwrl9jW_EFxnQWvpVqdDpIOjNcLFek,1272 +torch/include/ATen/ops/elu_native.h,sha256=Xg2KEpBCVixeDO0n0LQQJB7LBQVNQjivSLYC6w8gY8w,671 +torch/include/ATen/ops/elu_ops.h,sha256=HRrvTP5tiN_Pb1tKku0NO1xbLcD0W2hR2Oip3f99k7c,2941 +torch/include/ATen/ops/embedding.h,sha256=s8_qYKYfytvUjcF7Gft0V3DBBxrz7wOs0aXNFO9WVXs,5367 +torch/include/ATen/ops/embedding_backward.h,sha256=ggx5gO_N2Z6H9R6AVDXB-TKE1CtZFq7DvJyVK5207Ys,2178 +torch/include/ATen/ops/embedding_backward_compositeimplicitautograd_dispatch.h,sha256=5J0XifO3Bq1LhMsYc9b4M2PZNufoKk-ufaJlbRo3XBs,1073 +torch/include/ATen/ops/embedding_backward_native.h,sha256=HDOExksQOI2v7uf7afAXkVj7OURhms2K7UImwqn5CX0,611 +torch/include/ATen/ops/embedding_backward_ops.h,sha256=g80RywFuN75gTB_QMR8erO2OH39-dnsiEIZs-y21Cl4,1387 +torch/include/ATen/ops/embedding_bag.h,sha256=nnTWIUKv9bQUkFN8lsLZ99F_trmmw2F7fd8GaOS8KYw,1931 +torch/include/ATen/ops/embedding_bag_compositeimplicitautograd_dispatch.h,sha256=9BuoONsmj6yiKNSmu9pdWum6SGALKEIlpR3qtW1KGo4,1371 +torch/include/ATen/ops/embedding_bag_native.h,sha256=Iezn2XDGSMH-H_-63M6-1b64KAagSXVdMC31CwWooTU,1083 +torch/include/ATen/ops/embedding_bag_ops.h,sha256=wJ4PfDPbzpfAcUl0SGoQauCjBxyfcBoxlggLq3OVlSI,3304 +torch/include/ATen/ops/embedding_compositeexplicitautograd_dispatch.h,sha256=8eczPbmzXRyTbrpuGcqT8-kLZzcRab0-ufAGe8L3x0w,1777 +torch/include/ATen/ops/embedding_dense_backward.h,sha256=PdxrSNZK3JmTM7nZNj0PeZn9Bj9Ah8e5o4qugmdbcqA,5994 +torch/include/ATen/ops/embedding_dense_backward_compositeexplicitautograd_dispatch.h,sha256=tq6A9LTD0Kiig8wrCh9tiiSnGPH7J1sYViaDC3p5x4c,1534 +torch/include/ATen/ops/embedding_dense_backward_cpu_dispatch.h,sha256=xflS-vDS_njA_ikBB6utdaTMEEwrXs9kkv3wQnXhRRQ,1029 +torch/include/ATen/ops/embedding_dense_backward_cuda_dispatch.h,sha256=FequpJMa_WaOnF_1PX5qQN4trbOiwDBe_ZqoDndUMQk,1031 +torch/include/ATen/ops/embedding_dense_backward_native.h,sha256=fENFnwBxtTZ20TE5k_BTv3LLOvXy0gJnVPBmfg3wkXM,992 +torch/include/ATen/ops/embedding_dense_backward_ops.h,sha256=Tk8MVUeGvXvA2C-W-CS2YBqKm4xwu-UsnoEeeVq0RbU,2418 +torch/include/ATen/ops/embedding_native.h,sha256=Q1wL1DsOD95P5r3BuYmpmdjxhMJkAn-XbucQLZfbx3E,949 +torch/include/ATen/ops/embedding_ops.h,sha256=nk3ngB_010_nHCBKokhUvjfP8TWgiV-thp_AviE_g0I,2252 +torch/include/ATen/ops/embedding_renorm.h,sha256=sR6hAoPnZ4NwjsTGuA53FyJzi-FZKCtic-4wvee77WM,1844 +torch/include/ATen/ops/embedding_renorm_compositeexplicitautograd_dispatch.h,sha256=Hk99z9_8zaNCvQpsJA4at3iwE8dSNruNPzMbJ_IRy9Q,1140 +torch/include/ATen/ops/embedding_renorm_cpu_dispatch.h,sha256=T1QIHLFDW8ngs9ywm8fFp1JHzZE_GzlBZfHc-JDjmKo,790 +torch/include/ATen/ops/embedding_renorm_cuda_dispatch.h,sha256=9hv7EzsAbrhN9UKfbqdPAzJ4yHqyoWndeDVBQuEM8bs,792 +torch/include/ATen/ops/embedding_renorm_meta_dispatch.h,sha256=YxzMsc1SvfuUmiH9JV6tZS1tgWYJ-yzPloNfM10S9pE,792 +torch/include/ATen/ops/embedding_renorm_native.h,sha256=3vHXqcea7dwJSKnt7qbGk2rvkzWlZOoU5E4C1VCo0eU,957 +torch/include/ATen/ops/embedding_renorm_ops.h,sha256=VSABSWqnupL1SrtQgmKXiMdz5hBSyyKf_BsczniT9ZY,2845 +torch/include/ATen/ops/embedding_sparse_backward.h,sha256=z8Z-mPMHBf2IGh8FgBBhGVLE322f8s8ldlklirs6bkU,915 +torch/include/ATen/ops/embedding_sparse_backward_compositeimplicitautograd_dispatch.h,sha256=59UmBtuADhejdZEqMfc59TUmjy3d00WJ9AWdpNms-p0,878 +torch/include/ATen/ops/embedding_sparse_backward_native.h,sha256=wchEKSJtbcqFRtdKnsM6DH3UQMSL_CAiBmNi4mbpZrc,590 +torch/include/ATen/ops/embedding_sparse_backward_ops.h,sha256=Oe5gVl43EkLIOeFKS1OQeWvrrj2dAQDLlL483Hxj51A,1333 +torch/include/ATen/ops/empty.h,sha256=p47Lt42OEKEpTPb5DxagCwvYNKEap1pfa6x0zqRIsCg,9245 +torch/include/ATen/ops/empty_compositeexplicitautograd_dispatch.h,sha256=Pg-u6nc1hTg5hgbXXN7D5Sqz5qjYqS3scw7L_snbeRk,1520 +torch/include/ATen/ops/empty_compositeimplicitautograd_dispatch.h,sha256=Ab2ZE86_FOwxSjIBAyZijv2vrVRNdNZbApOKiOCUQV4,1256 +torch/include/ATen/ops/empty_cpu_dispatch.h,sha256=gG-pV-BNnabTixJcdmtNT5LEJIpxn56BugUeU66o4T4,1464 +torch/include/ATen/ops/empty_cuda_dispatch.h,sha256=BVrJX4MYNNrdnBEoP30nV9NaAj0tZbRnspu9rFZmxPY,1466 +torch/include/ATen/ops/empty_like.h,sha256=7OmZXu2TP-f3JYsx9DtsvhyCew5o2-frkV8NqNFnmDo,2198 +torch/include/ATen/ops/empty_like_compositeexplicitautograd_dispatch.h,sha256=NKoLc4TIRD7zVWF3qNKUX1Q3EkCgl8jEMh3cblcBGGw,1392 +torch/include/ATen/ops/empty_like_native.h,sha256=WJRtc27X0rMQMboseAWCKU39HS-XUyHf1zkS9szB9nY,1987 +torch/include/ATen/ops/empty_like_ops.h,sha256=SY_0TzQGTRGxvDFKk2jpbEZUFNdZJuIOAvs9V4GHq7M,2444 +torch/include/ATen/ops/empty_meta_dispatch.h,sha256=qS0_VLwGOvGDuBMelOVg_pWU0y08ynSVTA_jZljk4Tw,1466 +torch/include/ATen/ops/empty_native.h,sha256=LINSwIpGiCcYlQ3a45LVVlZSXskAIsIt2jM-EF3Gqr0,3592 +torch/include/ATen/ops/empty_ops.h,sha256=3Pkn_7-Yx-NKHt2XZVHMhi20F8q5tJEilLbdZoitwr8,4724 +torch/include/ATen/ops/empty_permuted.h,sha256=3WPGg42ayUVlNh9fVDia8iYuAnjOGOBDoKPDBqQpi2o,6795 +torch/include/ATen/ops/empty_permuted_compositeexplicitautograd_dispatch.h,sha256=FqK5pM0eV6-WOdCt_gjN-x1HuO9MnwdJbkY-QTPjR9I,1938 +torch/include/ATen/ops/empty_permuted_native.h,sha256=W3t9gkqyfxwrdDjjMpvy-8M4lgCUXzkoSOh6R8DMOK0,809 +torch/include/ATen/ops/empty_permuted_ops.h,sha256=RP8Qet97GLUyZQ0ENKZevkHgQN_y47fSr76ZDevlV68,2356 +torch/include/ATen/ops/empty_quantized.h,sha256=CA_xEiLjr1_H-vam8-EkesBkujviXj60SHaMDdA49jA,2459 +torch/include/ATen/ops/empty_quantized_compositeexplicitautograd_dispatch.h,sha256=3a73sgUINb7bGc2T0AVhFuosSoGhimuZZ7v6wyrEYQ8,1048 +torch/include/ATen/ops/empty_quantized_native.h,sha256=O3aJeFUHtbF8X5duSsdCsp-cL7zzwIkFsqC3XuAkV-c,892 +torch/include/ATen/ops/empty_quantized_ops.h,sha256=skDwcSlXuCCKnKR0NyaxhAoIpmRL9j0oqHyhNKH7Zpk,2638 +torch/include/ATen/ops/empty_strided.h,sha256=bOjaHUQ4TlFR7ykYpRf3yDtYFJtpie0F7GwFsDwQ8tA,6658 +torch/include/ATen/ops/empty_strided_compositeexplicitautograd_dispatch.h,sha256=9Yx0W23lS5yaKrujxuaO8x3yQMEwplxPJfvN3GAtPTA,1166 +torch/include/ATen/ops/empty_strided_cpu_dispatch.h,sha256=RpE01KBO-jwQHXdOPNVvTavTUhmFLDXx3oJl1NYikRg,1374 +torch/include/ATen/ops/empty_strided_cuda_dispatch.h,sha256=PHkb8AeJjOpBqtjB_JyUfRBxNsu2J-fhAYnS6v7jQQw,1376 +torch/include/ATen/ops/empty_strided_meta_dispatch.h,sha256=3EVRhA43wL0iN2-rmPL-3OOBn9d-CvMl7QwIJ0meI5Y,1376 +torch/include/ATen/ops/empty_strided_native.h,sha256=Ta412gvTTfFwwsT8sP-yF6eBpPMHCQ53K5I8rNcY6x8,1546 +torch/include/ATen/ops/empty_strided_ops.h,sha256=8Bb3OZWRp4NNUNmBxCzaHwMXSGmf2dno-1SARIjmegs,2326 +torch/include/ATen/ops/eq.h,sha256=eyB6RqomaelYzfZTX_GOhLtYLXHmu30X8v8KAxx0m1o,1819 +torch/include/ATen/ops/eq_compositeexplicitautogradnonfunctional_dispatch.h,sha256=KBRnPMNuCIKfzjhAwMFIMu8zPyTBUJclRyW5rJ4s3pM,1034 +torch/include/ATen/ops/eq_cpu_dispatch.h,sha256=dX1UerPELGPV4lzsajFq2BNzJ4U8rteB_iNG6pY9MJI,1366 +torch/include/ATen/ops/eq_cuda_dispatch.h,sha256=D5TAunT-rKbYZugffS42_hxtzXbOG7-6TYZN8kob5fI,1368 +torch/include/ATen/ops/eq_meta.h,sha256=Oajgs7DLd9o6yrcnrliIWDQSQ_IUNMst597kz7o3uwU,751 +torch/include/ATen/ops/eq_meta_dispatch.h,sha256=LB_A-cJJ9BdJ6XbKFiU-riS2O7XDrXM5M1AL9AU340k,1368 +torch/include/ATen/ops/eq_native.h,sha256=-Q3nDIWiB2rCjJd7hJibWhHzPTR-v6qUPdxaQ9RI6m0,1385 +torch/include/ATen/ops/eq_ops.h,sha256=UzpohP8DOI_2J4dBJr9FFTRAGw_1N4UXPr5T6TUhJhM,4376 +torch/include/ATen/ops/equal.h,sha256=j_72vRWtf6rsSGCkWp6KX23Xp9uTUO4L88GoZtonKbU,649 +torch/include/ATen/ops/equal_cpu_dispatch.h,sha256=j5YI9YxgBAETewaOudtFJrDBjlWTdCdgu5JbKAuO3G4,739 +torch/include/ATen/ops/equal_cuda_dispatch.h,sha256=mchBRbi709Z6951APBwwJTDKDAvKDG1mstukVN301iQ,741 +torch/include/ATen/ops/equal_native.h,sha256=iIVC-DymV_xikZN5YFT67DAiIGq2Cahr8bpOYV8ldeg,664 +torch/include/ATen/ops/equal_ops.h,sha256=lMNnRNS62M-XQ-fSn30z2OQ1pte6JM3VZLXVqVK2mlc,1030 +torch/include/ATen/ops/erf.h,sha256=n3IaajwfRxySlqWOK0SPTGg7H97l-ne1j41_BaAMQoI,1107 +torch/include/ATen/ops/erf_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wGh8OQNFw08ICErp-Tc3_W_brhkUYUmKJuLpARD357g,835 +torch/include/ATen/ops/erf_cpu_dispatch.h,sha256=AvAjSKKI1QA5n9miS9De5mCT6BjVMIxeImwH-wHWf-k,916 +torch/include/ATen/ops/erf_cuda_dispatch.h,sha256=GWFJ8FLIUo8rGp7_yO_BRyqAuw76g_VYO_uw50s8OoY,918 +torch/include/ATen/ops/erf_meta.h,sha256=QjcgbpbAB6s6HqCea32wuk1tQzvK0Bbo0uNYfKIClvM,572 +torch/include/ATen/ops/erf_meta_dispatch.h,sha256=-R-SQRDApQ2COAqEA-W7zlEGrIZ1SBlzcQTi82sChNs,918 +torch/include/ATen/ops/erf_native.h,sha256=qe6lYXANkCsIB60WpZKTPXygdOXQcUwPsOm2O2pFxYY,989 +torch/include/ATen/ops/erf_ops.h,sha256=hG_7-iUbRkngBasOzzP55Yd6z-QGkMlNDGSRTXOf2Gw,2095 +torch/include/ATen/ops/erfc.h,sha256=o9rknBo7A5wS19ZMFS4OK_T3UD7jmCsQqUAAjQBmAWQ,1120 +torch/include/ATen/ops/erfc_compositeexplicitautogradnonfunctional_dispatch.h,sha256=q-k3nbIGUOcOnj5MsWXChdnTMBhn8-5kB8sd5Fb8xas,837 +torch/include/ATen/ops/erfc_cpu_dispatch.h,sha256=hEQv9d-XZkOz2G1sh8F3jnvjT6YJzyq3vw7tDttSjbg,920 +torch/include/ATen/ops/erfc_cuda_dispatch.h,sha256=d72OS5rCSKyaC6Xu_CqyaFFb8tHLu4Qex1YSUW1GpOA,922 +torch/include/ATen/ops/erfc_meta.h,sha256=b-ujAJ8xXu37gqoYsDoIbGRvK0XFG60aJiY6uB6py_w,573 +torch/include/ATen/ops/erfc_meta_dispatch.h,sha256=CJYQDuJ3y2tvJcXtA7MiRjhxM_3TVPmnVuAwc4swutU,922 +torch/include/ATen/ops/erfc_native.h,sha256=665TdYaS8HRAD3WqHwfBa2HqBGcmbSpOMSxnDWmRKHM,590 +torch/include/ATen/ops/erfc_ops.h,sha256=H_aU8tbgl7X3vvL39uUdkycnlbpxOs74d5nJbdz8VHM,2104 +torch/include/ATen/ops/erfinv.h,sha256=-GkEMXG455C59Ts6MzyI3Qr5YA7Sl5BweGqU-xY2UXo,1004 +torch/include/ATen/ops/erfinv_compositeexplicitautogradnonfunctional_dispatch.h,sha256=fUne7QfatltqmYEf3TRh9rhA6Y-RciRnVYL_GASFiC0,841 +torch/include/ATen/ops/erfinv_cpu_dispatch.h,sha256=Ztz4EPsFCKaHv5JTq-fV5OlxVqC2is5dhPrd09k8w7s,928 +torch/include/ATen/ops/erfinv_cuda_dispatch.h,sha256=aY594X9BP_pSYTU5pf-ZV_YpZmtCLig4Ez01-xQZ3R0,930 +torch/include/ATen/ops/erfinv_meta.h,sha256=2-1OfxsCEOzHflAxPctPZ5Gc3-t-MSRHtsgwlOiFoig,575 +torch/include/ATen/ops/erfinv_meta_dispatch.h,sha256=IB6PXMBYIu_ZPlpUpHwJ0gtZR5cN03-SrfZ9lIdi-ac,930 +torch/include/ATen/ops/erfinv_native.h,sha256=1NILyKthmIypHhfinVSciPHUVtGcwNaa6PpBefWRFa8,1016 +torch/include/ATen/ops/erfinv_ops.h,sha256=Ofu5H07WDV1OSEwjgQ19PvqiT7o-Ifc4DzjoK2ceUqo,2122 +torch/include/ATen/ops/exp.h,sha256=HST1dFdAOoWcKlSCXl6b6SILYw3-Kgw-wh3Bdq-guTk,1107 +torch/include/ATen/ops/exp2.h,sha256=qzOJKD9836Lbl1UiYNMuP9UWtZ0HkqoF3YtAJvMImZY,1120 +torch/include/ATen/ops/exp2_compositeexplicitautogradnonfunctional_dispatch.h,sha256=0sDVcNT2fsPkoBdF7c2SLkRb8Qu5BC_ZhTYVXCxFTqM,837 +torch/include/ATen/ops/exp2_cpu_dispatch.h,sha256=ChHI9xli18UWhLndDAE-VrZtlBDl609msXhfsomnRT4,920 +torch/include/ATen/ops/exp2_cuda_dispatch.h,sha256=kHFfupmTIOQ3oT8rn6NcF0aEI8wbGtGBjX9Tk-maJ7M,922 +torch/include/ATen/ops/exp2_meta.h,sha256=OaxrEwwwSvVTNq_Lu_FtO-f8HbdJO-6myOYAdeLVZXk,573 +torch/include/ATen/ops/exp2_meta_dispatch.h,sha256=wu8riUDi1xrg5kTk0hr_0c65UIbBLYFFwaJhTSr_fS8,922 +torch/include/ATen/ops/exp2_native.h,sha256=xKFu-mlxFX7kEZcP0rd-wkeEGx1X3DAavXvtvwVtR2w,590 +torch/include/ATen/ops/exp2_ops.h,sha256=chG265Me1u53Y4PQtdQS20Qes-UlaItIbdrw-NRIuJc,2104 +torch/include/ATen/ops/exp_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2rqohVS0dE2UKHaFzeWd2kxoZGugi7StrreNa1F_gFE,835 +torch/include/ATen/ops/exp_cpu_dispatch.h,sha256=2lTOIfH7qlYfKCswj8TQo6jF6baWXRvXXOSZW6-qs78,916 +torch/include/ATen/ops/exp_cuda_dispatch.h,sha256=MY8adYFNWWvvAhwbmDuDMoWissmDQ7GMZDZTh8zwR0g,918 +torch/include/ATen/ops/exp_meta.h,sha256=iQWD43Z3Syb1MeGcJdEWP78kDeOxngd_z8NVzm6QdYk,572 +torch/include/ATen/ops/exp_meta_dispatch.h,sha256=faByptHQZXch-YyobWHQDNAEq9PVfcLYmtDs8XX8jRg,918 +torch/include/ATen/ops/exp_native.h,sha256=c5vLKeghhkAr9pGMOVVH1BsPA2YGjqTIX9jnTG2tgmQ,587 +torch/include/ATen/ops/exp_ops.h,sha256=bpDlyVLIS-pAUxgcsDdFvSQc016KvaAWVxrcDCRfgxw,2095 +torch/include/ATen/ops/expand.h,sha256=eVSgv39ev8gGfmoydOPK-u3a3ErgbPAPSZYAafU9B-I,1031 +torch/include/ATen/ops/expand_as.h,sha256=vh8d1ZMNHfrugsePNhfsrZuH0pV0j2h1H7NE8ok6qzs,483 +torch/include/ATen/ops/expand_as_compositeimplicitautograd_dispatch.h,sha256=cVEtvoJtb283EXs5fSIIfIxBIVYJDQd4ARPpCCyRD0g,793 +torch/include/ATen/ops/expand_as_native.h,sha256=tRYv30yjlZTx3eoGCytiOk-kyUqPmhNy6qe6xLi4QLI,505 +torch/include/ATen/ops/expand_as_ops.h,sha256=jQf0ptthpog-W75Ji874b89x9haQZ8a3wYPvEAwMcr0,1068 +torch/include/ATen/ops/expand_compositeexplicitautograd_dispatch.h,sha256=Qx0mu67trWhHgomTJ65zvGfIkyWratnKpZ8cCcS0Rhw,915 +torch/include/ATen/ops/expand_copy.h,sha256=_NbRiiAILeGx9Moy6GOmCtW2A_ISEB-JrN3a2jAOWpQ,4133 +torch/include/ATen/ops/expand_copy_compositeexplicitautograd_dispatch.h,sha256=PxfPj2yE_V2uwySAAggHjWBcQW_hqe2TA8zoYhgz52g,1226 +torch/include/ATen/ops/expand_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=roKXKPuF_RUB9Xj9pbeHE-qMCfmwbEU5uds7m8CcJvQ,951 +torch/include/ATen/ops/expand_copy_native.h,sha256=FiOhUesIg64_q9Dvc8GNXazQdc8EHq1yiiApWlFSY04,666 +torch/include/ATen/ops/expand_copy_ops.h,sha256=nPiobJsUk88sSzgGdi0m2JTJDtXLJQtKpl3CIZFg0_4,1913 +torch/include/ATen/ops/expand_native.h,sha256=we5VOP_s1f_RvJTM2E48YXEkGUTXuq6PoxV0mQpFQvI,519 +torch/include/ATen/ops/expand_ops.h,sha256=M9vN1aIeE8ggWslW2t-YLroxHdVs0-Aph6_srwAR2ZU,1121 +torch/include/ATen/ops/expm1.h,sha256=3Hxbpc9UMRsyKLE89A-tBKYt2qIZ0qGFH_wRrvYCSEs,1133 +torch/include/ATen/ops/expm1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=vYezwxrqAV3EN_qhHxw729lZtQzbNnM3w8ieEuCtH9I,839 +torch/include/ATen/ops/expm1_cpu_dispatch.h,sha256=W0rNlEV4hw3jspSM4-TYpygYaW6JrTwYtG2KuHd0RCU,924 +torch/include/ATen/ops/expm1_cuda_dispatch.h,sha256=kgOBS_KiKvv-5_DCd6iN2G8JXMvGnoEnK8WyzCL7v6E,926 +torch/include/ATen/ops/expm1_meta.h,sha256=jkyN6d-5oriCtIcbLkjq6izWpyj92fv7arYezbwchGs,574 +torch/include/ATen/ops/expm1_meta_dispatch.h,sha256=8ssRg75TMNvGC5FTliSsbT4bmgqr6DAgNPS_hktXrvA,926 +torch/include/ATen/ops/expm1_native.h,sha256=jj54vpSEMhg9TjfCRz55MHir4WwGczNxeDCeuCVkI0Q,1007 +torch/include/ATen/ops/expm1_ops.h,sha256=o6sHPgbwIL4YyBHT_qMiZrY36ClGHy2qYQQRcWldKhw,2113 +torch/include/ATen/ops/exponential.h,sha256=K1Q3ZnU6pdAhhJ-z4tU7Wkvzj1ImheFnIAALR8pJz1A,1439 +torch/include/ATen/ops/exponential_compositeexplicitautograd_dispatch.h,sha256=l7rCj1wxjhWVtwsQpq9KIybjIa0jxB-6JJ2pxFeadHQ,1138 +torch/include/ATen/ops/exponential_cpu_dispatch.h,sha256=0vDNI67kjAHPfEBn5V7WtWuY3Xu7nlljoCgieHu0eKM,795 +torch/include/ATen/ops/exponential_cuda_dispatch.h,sha256=01VrYCz3oTzj6Tn1u7Y87Hz0lEdBX6PIDHKVrpzmH90,797 +torch/include/ATen/ops/exponential_meta_dispatch.h,sha256=9xHejLTHsd_h5y2rqrRCXxj3ti4yYOINfQjGpvnyHyA,797 +torch/include/ATen/ops/exponential_native.h,sha256=9NqL0ZlpayEX4X6DYpZaOmm9N5QSpfAOPqliguwLDF8,822 +torch/include/ATen/ops/exponential_ops.h,sha256=yo3zO5fDSDmJsReDVmKOu02OC0Z6mODEGpp58mJik6E,2755 +torch/include/ATen/ops/eye.h,sha256=IQ42HVzkjYJKph0sGr2he1dcXboYGTq1HGYhhak8CeE,9684 +torch/include/ATen/ops/eye_compositeexplicitautograd_dispatch.h,sha256=ehsUFJ30ITnyahPix5YLgEtyNsXSa45xW6zlFwQjy3o,1802 +torch/include/ATen/ops/eye_cpu_dispatch.h,sha256=WagdK81J4dFGus7FCwq8mk04QHk4watkX2B9KfU4hqw,1254 +torch/include/ATen/ops/eye_cuda_dispatch.h,sha256=fZMjlfp0iFk0bev_thUZx7p9nIENKSuCGO7OzIAGrIM,1256 +torch/include/ATen/ops/eye_meta_dispatch.h,sha256=2LSooSb0RwLAP5uJzCUe_hzKr4ptVi2_XpxzNpCGW70,1256 +torch/include/ATen/ops/eye_native.h,sha256=REgaCMG7LyvDDwY5NL4C_KFVNk6uLFjmV4qfnijRTP8,1105 +torch/include/ATen/ops/eye_ops.h,sha256=_Gb9G4TDSM9eE6mDUkxwJ-TmuBsGlgTRPFkUMYGS0Hs,3699 +torch/include/ATen/ops/fake_quantize_per_channel_affine.h,sha256=Jf3lrhkFElXZYkQF0DLY1vTFFbm_xnEYWEGTofZTB2c,947 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask.h,sha256=_jpcj78tJ8T1RSwvxWYLvOoyRs-nuSD5UGjZRRWWAjI,2270 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward.h,sha256=X4w1OUto2p0tZT6aTUOs3sQPYW2JQswqFtIlinU1NPQ,838 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_compositeimplicitautograd_dispatch.h,sha256=JxMXUPjuACR5mmp7VN-Pg2NUmBPWCSm0EWJXrs54vto,834 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_native.h,sha256=Su2x3IMYG_whyXpWWrJLSozPt3-twXE7xCpYuGdvrZA,546 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_ops.h,sha256=0aXSZbw3wAQei4McrkoopnK2zRSOtD9Dq_Tjs17yHbk,1185 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_compositeexplicitautograd_dispatch.h,sha256=ZXMSU5pKcylMlfjAWD6neNwsbZbPKao2K5mEvx4dT5Y,1251 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_cpu_dispatch.h,sha256=jVtpXecxXCFm47C-QGx8SZzCHyLdGaL4PQ4NJwMTQts,890 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_cuda_dispatch.h,sha256=W5X4cgVJWmhcpTyyqsP3cV7X1A7mYOgucyrfRbYUrp8,892 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_native.h,sha256=26GdKVXpEqA9BgNWZIW4n6_FkDZUnmvnrZQU2uJNci4,916 +torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_ops.h,sha256=yyunkmqPmbwXn9X0fBj2b7N1ic3WUS8M4G128SrUx7E,2786 +torch/include/ATen/ops/fake_quantize_per_channel_affine_compositeimplicitautograd_dispatch.h,sha256=NoDLgqodKweiKMwy428J_3MP4RoT5_zWod6iKVHWsKw,899 +torch/include/ATen/ops/fake_quantize_per_channel_affine_native.h,sha256=zBzKtUwam8Yuxh9Qh7pddkBL1R5lnd1G55QqAMBG3yk,611 +torch/include/ATen/ops/fake_quantize_per_channel_affine_ops.h,sha256=DMPgArn-qJXFboMEtAxekBg16XPeUmR6XsgY7h39GZk,1403 +torch/include/ATen/ops/fake_quantize_per_tensor_affine.h,sha256=LjRRuzLRKx3EsYUc5qTb1E7hMB0w_ftDO4YPRxyMUzs,1325 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask.h,sha256=xLBisuNwnRyt6rBNK_877kAB3Co552khzTGTaDiY60w,2089 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_backward.h,sha256=VtGluxwTTfo3kdvtrZQOkEjTTL7w9ZLP5U6lRnmEW5E,834 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_backward_compositeimplicitautograd_dispatch.h,sha256=kgv_kAu6UywN6olqJdChDagPCRRJwjtGMoox6s5AOaA,833 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_backward_native.h,sha256=6yI-jfYeO5wXUIVWE6N3RMIAjuxc_J9Cpln0hizFxRg,545 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_backward_ops.h,sha256=yp2Ddw1xVKxOAHWP2GdhALcWKN3g08IUFlKV1GeEja4,1182 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_compositeexplicitautograd_dispatch.h,sha256=fbadkfIeu7cPKJZ13oVqbDSuZvJn07TygRegim86hRk,1175 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_cpu_dispatch.h,sha256=5_zbe8f0icL81e3zCyenXS5uFjQz8qjbIkFWONtP3dM,852 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_cuda_dispatch.h,sha256=zHXqY_YUz1Qz-NKDPDRfJr5bqx0SwSQ2AuNHcjG1il0,854 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_native.h,sha256=3u5BLBB0l5M2NZgrdekvS5NLf1TL1FfBA5AQ3bC0vn0,840 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_cachemask_ops.h,sha256=lWz7yiH2wKg1m4bpqIOqPK8p_pulENAZCfpehrIg3xM,2540 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_compositeimplicitautograd_dispatch.h,sha256=CuUbT204hgwVISgRbz0QceEE_pkHUyBMx8mblUoNBdc,1035 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_native.h,sha256=1RC-vBFFDYwMYHjYFF5A7eAhZyr_GjdqKmOZCMLZDDo,747 +torch/include/ATen/ops/fake_quantize_per_tensor_affine_ops.h,sha256=kxRTM4PR8-lOoucnFvu4-w1XzNnFZoYqxd43rPk7kCw,2243 +torch/include/ATen/ops/fbgemm_linear_fp16_weight.h,sha256=mDZMroqtnhCeTEZ7LGHpAbY9d-4SJu_3-8HhmV4ig20,808 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_compositeimplicitautograd_dispatch.h,sha256=2TIc3R-T-6XY5prv9HM0Xk_v0DnkOs72lpAipZPTMIg,843 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_fp32_activation.h,sha256=aiwOuEolob0ccSsOYxXeGiU2ZGgRMIY-GHOKaBQhXTk,872 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_fp32_activation_compositeimplicitautograd_dispatch.h,sha256=mTUsi4muu5y4oUza740uug5WYOpl3_9TAzZaC4rxWeY,859 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_fp32_activation_native.h,sha256=xWjridW6s-2fCAgWCQwGRt6mD6IrOHPU-8m95pbaxkY,571 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_fp32_activation_ops.h,sha256=3G0KdbtxwYSZgkKsRajf-jglH0GtHF4gPk3OGCBAeGY,1268 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_native.h,sha256=NgxqsHWVLv5tkj0r1IYl7vsRU31KjyAToq_1UEOXEYQ,555 +torch/include/ATen/ops/fbgemm_linear_fp16_weight_ops.h,sha256=MGQSTQ6sKt9auskl4TaLD4yh5r6OOVSau90Eg3egLvU,1220 +torch/include/ATen/ops/fbgemm_linear_int8_weight.h,sha256=J-L6_AUcWpqOjgZn5cB7_uVTucdq-bT5JA-fgeI0N_g,1053 +torch/include/ATen/ops/fbgemm_linear_int8_weight_compositeimplicitautograd_dispatch.h,sha256=VOF4hdnMmoQ5V3QOUBKW6kwK4jRD5YsTtIBkS8WsKnM,966 +torch/include/ATen/ops/fbgemm_linear_int8_weight_fp32_activation.h,sha256=fFrL5a6xBkEHScUw-fDqqevSIlgTHJXqP7Yint5TtkQ,1117 +torch/include/ATen/ops/fbgemm_linear_int8_weight_fp32_activation_compositeimplicitautograd_dispatch.h,sha256=B32qVHSu0izolZAH83PYBxnLvJbCQquD-fdfj8E2c54,982 +torch/include/ATen/ops/fbgemm_linear_int8_weight_fp32_activation_native.h,sha256=NkLGW8AoihJNGt2pQpVf-UwW0JMPOLIXHRr1rgbGvTs,694 +torch/include/ATen/ops/fbgemm_linear_int8_weight_fp32_activation_ops.h,sha256=a-l3MvA_fMwqrWDQoHDQ6P0ZvxT4357JH2MioADLmKU,1669 +torch/include/ATen/ops/fbgemm_linear_int8_weight_native.h,sha256=QgVNG_v8aIIuxXeMDiQmZUEeSolofRyrhYHrStsTrJM,678 +torch/include/ATen/ops/fbgemm_linear_int8_weight_ops.h,sha256=zCDFZuL84AOzD2C5Q37s7FPo83WjMTagxxbnVvwk49M,1621 +torch/include/ATen/ops/fbgemm_linear_quantize_weight.h,sha256=87JNMInwcmn_RcqrWHBYV3IKOiLyyBozEsF4yW8DuKU,771 +torch/include/ATen/ops/fbgemm_linear_quantize_weight_compositeimplicitautograd_dispatch.h,sha256=0Zti5W1-T7nBUKrbxj2ACecAUbif0Mi1P616rTHAOBQ,828 +torch/include/ATen/ops/fbgemm_linear_quantize_weight_native.h,sha256=3oEWgVRkDALVpekpMGgY7EP90wxcznwybRv2j-pdN2Y,540 +torch/include/ATen/ops/fbgemm_linear_quantize_weight_ops.h,sha256=zUFZFDx07sjQE5edAZy-76tm-3Yx1r0nlEkrXB-yGiI,1181 +torch/include/ATen/ops/fbgemm_pack_gemm_matrix_fp16.h,sha256=NM4wrkfygCn82qqxWLH110U4KNIJZ9fvPGY_j5bqBW0,705 +torch/include/ATen/ops/fbgemm_pack_gemm_matrix_fp16_compositeimplicitautograd_dispatch.h,sha256=Ih5X5QkgaWjElwIZOm_kTdPRVdIJNO_zgwdUCBUd0xA,787 +torch/include/ATen/ops/fbgemm_pack_gemm_matrix_fp16_native.h,sha256=6L2yf6xFEvsWnNeax4PTvTdT6h-Vzm9T_aXtbi1cKVg,499 +torch/include/ATen/ops/fbgemm_pack_gemm_matrix_fp16_ops.h,sha256=uHnTxyEhzUQVN4gGzbZ9nxHKsc18qxmYip8orHYMtqw,1036 +torch/include/ATen/ops/fbgemm_pack_quantized_matrix.h,sha256=vMaAs53kKPkRZXhXRc_pITDZeuaJUXjpJBvlnDtz3gc,957 +torch/include/ATen/ops/fbgemm_pack_quantized_matrix_compositeimplicitautograd_dispatch.h,sha256=5xbXNBNxNtd1wLmRxo1hwUOVwifhP5jCi7uPgLKYLr8,886 +torch/include/ATen/ops/fbgemm_pack_quantized_matrix_native.h,sha256=3tykektJY3IknTQ_QYWih_jsJd_plu3y3kP-Q2GCT3Q,598 +torch/include/ATen/ops/fbgemm_pack_quantized_matrix_ops.h,sha256=LUypTsc9n3vyzyOqA_tj11BKFt-IET73UOVT17GfHck,1722 +torch/include/ATen/ops/feature_alpha_dropout.h,sha256=oEHqLR4lLkmgk6W-nL-iNb-AsoKnzcosa8fS9EWaVu8,970 +torch/include/ATen/ops/feature_alpha_dropout_compositeimplicitautograd_dispatch.h,sha256=Zh7Rcis7WPpgQySTDIHQCCX1AwNskH1EmBmvLhLYMQo,890 +torch/include/ATen/ops/feature_alpha_dropout_native.h,sha256=fzEhHbkam3g_hVTUknFNtTArEvV0FEdEO0exshGBL5k,602 +torch/include/ATen/ops/feature_alpha_dropout_ops.h,sha256=v4VRElDt16niN_4AW_n-40hdUKF0scweyOXFlx9Zh0s,1750 +torch/include/ATen/ops/feature_dropout.h,sha256=XBZ82cbdfAX2_cF6jo78a-y9G2bU2K1FasYczO8IfTA,928 +torch/include/ATen/ops/feature_dropout_compositeimplicitautograd_dispatch.h,sha256=wkzqotHqspY7l-2e0WUj3yjHtVDBM9sobxIsjs31qR0,878 +torch/include/ATen/ops/feature_dropout_native.h,sha256=S6uUBV4uLaJnwwXmCijeyTC5GZmWjo2Ppo7Y4IGisTc,590 +torch/include/ATen/ops/feature_dropout_ops.h,sha256=eENUR4g9IGXrZknNsXe-ET6qJHiZdQ-sxGQv19n9Iyw,1714 +torch/include/ATen/ops/fft_fft.h,sha256=vWCQh1cgidaNyWtux5P_dX8OKZTSI9tLFlkdjxL-ax8,5029 +torch/include/ATen/ops/fft_fft2.h,sha256=PCVoArpkKwDEOn1l2S91FvcW1UDmpJYvB3L9sqWUEFI,5322 +torch/include/ATen/ops/fft_fft2_compositeimplicitautograd_dispatch.h,sha256=-63JI5LwDzL26TDo71QQtrucMIRHDtBlDfLG9ec53mk,1854 +torch/include/ATen/ops/fft_fft2_native.h,sha256=qScWOcJRA4LNlLLb_dYDlEfDDj4uNyOGwlJg4-LpYHs,792 +torch/include/ATen/ops/fft_fft2_ops.h,sha256=cDWCS6_WID0P0crSwG07Key6aBA3EFIGeE1OZ4wxKF0,2236 +torch/include/ATen/ops/fft_fft_compositeimplicitautograd_dispatch.h,sha256=4lV_52KfhyykEVl7DXt7BqHQRwG7NE45vJTE3p0KvyA,1789 +torch/include/ATen/ops/fft_fft_native.h,sha256=dj3_TB4jV53jr6bpDxjhXpGebNYnq7Vf_w46VdbRnqU,773 +torch/include/ATen/ops/fft_fft_ops.h,sha256=FdYrAOTu2sfTbMkhCxDKdjlVZba76Ic6zbM95dB8WSM,2172 +torch/include/ATen/ops/fft_fftfreq.h,sha256=Mt-LyUIbNL3xEg7C2YENYtiCOrTyokikcGwdYw7xeEc,1741 +torch/include/ATen/ops/fft_fftfreq_compositeexplicitautograd_dispatch.h,sha256=HfrBKswuThmSxq6fiQjlI0Z20hqWBM9HOUgIKCArSzU,1162 +torch/include/ATen/ops/fft_fftfreq_native.h,sha256=CPt5eeTpAVVBetuMOnGWjajIdVVmAjxzeBVAb4BMBRk,717 +torch/include/ATen/ops/fft_fftfreq_ops.h,sha256=CTM4M0o_r2IZEyKsp9CYhpOGjFZm53PnTCpX13--u6c,2108 +torch/include/ATen/ops/fft_fftn.h,sha256=LAaxcaTIJALF4nTgUQvWyBrjrT6no3yiwUCfq_aR8vM,5462 +torch/include/ATen/ops/fft_fftn_compositeimplicitautograd_dispatch.h,sha256=0p17AC3NBvhQzXAsWJaa3hwC_ouF4DV7TCm4A4ID3Mg,1930 +torch/include/ATen/ops/fft_fftn_native.h,sha256=KJU7NitLmJJK9gnPrRuU_jwVo3Mj9y7xEWIvvmvZQ8U,815 +torch/include/ATen/ops/fft_fftn_ops.h,sha256=IlQawkb1LYIjxuLmf9cX4Fag7bKMoaWrDblRS6o00Mg,2280 +torch/include/ATen/ops/fft_fftshift.h,sha256=sFXKsbW4ejAC12NpsMS_kCFTsqsJwxJs_CrC0wNjvEg,705 +torch/include/ATen/ops/fft_fftshift_compositeimplicitautograd_dispatch.h,sha256=qGaK5cJvXcS_7dVd4GEU3Z3_Jn4w0ckokajhhKDPSmA,814 +torch/include/ATen/ops/fft_fftshift_native.h,sha256=X7OLUTiCLMpdvoK3_8SWgomPQNoQQu6J-EFcmvgy2ek,526 +torch/include/ATen/ops/fft_fftshift_ops.h,sha256=G5LiZIENrC6FLTPZDGg9vdp-wrrBdX4Uutj0vJG1Vf0,1086 +torch/include/ATen/ops/fft_hfft.h,sha256=b6uk5xFqXExRr_1Mp_0Uzjmrmwyf13CRJlen5q9-c80,5060 +torch/include/ATen/ops/fft_hfft2.h,sha256=YpqkhqfPfehJNYtB6PptAqqY6YM1GGeVj-tXG40xK38,5449 +torch/include/ATen/ops/fft_hfft2_compositeimplicitautograd_dispatch.h,sha256=UFySt2g7VfxF5SGulAU6OAoMJxx4IuaZAu6XotrhDiU,1908 +torch/include/ATen/ops/fft_hfft2_native.h,sha256=WZv6JIXPygHE7ooyYV06D-JYJt-udHfoahUazgX0Tzc,806 +torch/include/ATen/ops/fft_hfft2_ops.h,sha256=T0rQmyJurAYPOdcqtzMT2Qt3TxZJqNVk-Z78CRq7YvQ,2278 +torch/include/ATen/ops/fft_hfft_compositeimplicitautograd_dispatch.h,sha256=Z1LglLvVY3U7PQiZVcCYQ84ekbV26-3ej-hL9c3OIjQ,1795 +torch/include/ATen/ops/fft_hfft_native.h,sha256=IH0OLDZz56ie2Znxi6gFGC7VaUaPR-sUnX7NVCgbdas,775 +torch/include/ATen/ops/fft_hfft_ops.h,sha256=HmrSo7aVyLTt1urM2QxbDLfTvgiiuxkKvgangdFYUS4,2178 +torch/include/ATen/ops/fft_hfftn.h,sha256=M3objDDD1DDqqkZZEjKYierN-46OZZawiSnKZdYCVH8,5589 +torch/include/ATen/ops/fft_hfftn_compositeimplicitautograd_dispatch.h,sha256=gh5mkh6wI36YmzPYBkIhkV8qGxLnA7E8tdsgsnFnBlI,1984 +torch/include/ATen/ops/fft_hfftn_native.h,sha256=zaVwShwmZUoXKxbbiqPCbt2sb5LebtwB-sfjbOofpXY,829 +torch/include/ATen/ops/fft_hfftn_ops.h,sha256=YRGVLAvkIy1-41uKHW7Dp29KWRwooBvkzIP9Z47rNtk,2322 +torch/include/ATen/ops/fft_ifft.h,sha256=ivD8EU4oyGvbFDQnVVB9jO7HLs0nDjE6PjQGPYJ4KfI,5060 +torch/include/ATen/ops/fft_ifft2.h,sha256=Xxuqg7PecrfAEWfCmO-ucCD86XlLEERw33q_Ws1H1VU,5353 +torch/include/ATen/ops/fft_ifft2_compositeimplicitautograd_dispatch.h,sha256=t1OHLnjimNRbZULaDzAgUXUIyNzfWX50GlZ_N7l3wic,1860 +torch/include/ATen/ops/fft_ifft2_native.h,sha256=45Hh8RvwffprU5J_jJcBXjQtrKNWTUMg1Xk0VZE5kxM,794 +torch/include/ATen/ops/fft_ifft2_ops.h,sha256=b1flbHx1iMKco-xd0UgniGaFDRZVnmjOLDqkQt2JmD0,2242 +torch/include/ATen/ops/fft_ifft_compositeimplicitautograd_dispatch.h,sha256=QsjeBgxYFA32XKl2MHjTJWDxcpee2llGesu9_0Gwz_s,1795 +torch/include/ATen/ops/fft_ifft_native.h,sha256=zd3krxsYz9NGr3fwlXFMwF2h2kL0DLpbpjuaUEj8_v0,775 +torch/include/ATen/ops/fft_ifft_ops.h,sha256=vvxpH8DxyQl6ymsXw-Q1zkCe9GmHR3uw7UF75PubOxs,2178 +torch/include/ATen/ops/fft_ifftn.h,sha256=Keci47_nof0PBt4kkiFz71G22UVPINAlRgal9tTFuk4,5493 +torch/include/ATen/ops/fft_ifftn_compositeimplicitautograd_dispatch.h,sha256=Si8WuBPgT2bl671aHYBk9_RryMHBPMbLh3843FuKgT0,1936 +torch/include/ATen/ops/fft_ifftn_native.h,sha256=jHLznplViQ9Rj3jRpZTxaFe41M_n3z8vFOUmPBMh_Lk,817 +torch/include/ATen/ops/fft_ifftn_ops.h,sha256=Fe5464Z1YOMWYHjKRpfrzvhtst0KdHlyuL6cevV3NYw,2286 +torch/include/ATen/ops/fft_ifftshift.h,sha256=hcuxTVIvylWo-KfxBRtF4tGKqw2hC09R9BqrDsguO0c,709 +torch/include/ATen/ops/fft_ifftshift_compositeimplicitautograd_dispatch.h,sha256=uMvh2lZ10qWekzS8rp2_hSlMUnaaXL7uw2VJp2_L1ac,815 +torch/include/ATen/ops/fft_ifftshift_native.h,sha256=5FzEtKaCvLULA7zgfLEnQQYl13yCs5DbOs7BaKbitVw,527 +torch/include/ATen/ops/fft_ifftshift_ops.h,sha256=_SuUBjaxmGlTcm_1J3VbmlaqRHNxoPY2mhp7kUBAhek,1089 +torch/include/ATen/ops/fft_ihfft.h,sha256=Y8LXVlU1pVEuRWaq5TxewR_7wfAX638KPwnu-cv0F2s,5091 +torch/include/ATen/ops/fft_ihfft2.h,sha256=gIkpckEDP9mQp6wTusjVEYH_7S2Sc4IgagQpcC-xT4w,5480 +torch/include/ATen/ops/fft_ihfft2_compositeimplicitautograd_dispatch.h,sha256=E68gBjiI9AwGprd1_GkY4eBG70jrZA8ROYIhGaJr7fA,1914 +torch/include/ATen/ops/fft_ihfft2_native.h,sha256=DGZ759FCpsWfD3ExwZSubbV6lX7BQLb0jVfNL2cxxj0,808 +torch/include/ATen/ops/fft_ihfft2_ops.h,sha256=wO4977rFpQRPqidA__rAoTxNtRbWwwFDfyZGrS7iwQE,2284 +torch/include/ATen/ops/fft_ihfft_compositeimplicitautograd_dispatch.h,sha256=wCiKc9ZesGAyvSVYWnZYfPQYHCysxiA3pnk7dFZETu4,1801 +torch/include/ATen/ops/fft_ihfft_native.h,sha256=vZNVCtiAuAC29TNq-BIPIToI4df6wTA3wXoPHiSlrdA,777 +torch/include/ATen/ops/fft_ihfft_ops.h,sha256=YFA4tsTOumgO--6q11KCyhajpAWH_B3BqCIQ4gp2IAk,2184 +torch/include/ATen/ops/fft_ihfftn.h,sha256=v6k1_xyAo6F62bUjji4GQdwuRmMDAFVKPf346us0bGA,5620 +torch/include/ATen/ops/fft_ihfftn_compositeimplicitautograd_dispatch.h,sha256=NiuNeHBQO7FkBdHP-wfjHPQZ38pSTlz46_AQUSDahro,1990 +torch/include/ATen/ops/fft_ihfftn_native.h,sha256=1fsQTXwuC7BoUmW-PpXKL_0inbWf7ucAShGQrHtv9Pk,831 +torch/include/ATen/ops/fft_ihfftn_ops.h,sha256=UTcCsqBbnpu-CzmsqqBCFE0iWgIvCQe3pU24ZkfL_wc,2328 +torch/include/ATen/ops/fft_irfft.h,sha256=ByVq9d1WcJPwBM0V1gjG7vv0OQtQhE9LVSX0bzRsa8Y,5091 +torch/include/ATen/ops/fft_irfft2.h,sha256=VPVaaece-izUMKl2F7JxhpdebUhfY-DH-B7FU2M8cMY,5384 +torch/include/ATen/ops/fft_irfft2_compositeimplicitautograd_dispatch.h,sha256=yTuVJByVNUTC1Cc90_J6RQwgy88-RxzLa_FG6FxPJV8,1866 +torch/include/ATen/ops/fft_irfft2_native.h,sha256=sGi_m3GB8669nE63hvw4mN9qrj6PV1dsuCdUk9L38Tc,796 +torch/include/ATen/ops/fft_irfft2_ops.h,sha256=oSge64mITmvb2BL2qRUkg0Y9MlIZyfyBoacCU-t_Q6M,2248 +torch/include/ATen/ops/fft_irfft_compositeimplicitautograd_dispatch.h,sha256=MlJkG8MnQYOR_jHmS9qxXADyhYt4y18d2cc414CC2Zw,1801 +torch/include/ATen/ops/fft_irfft_native.h,sha256=BC99ztD2OMwEguVuGBtKISpR_cPqWXHWVGv-qvh6pG8,777 +torch/include/ATen/ops/fft_irfft_ops.h,sha256=yfAMVIrbEZ9_R1KEB8Rl_66FqxJmQhFF8OuvSapBBBI,2184 +torch/include/ATen/ops/fft_irfftn.h,sha256=e1VCVdJvAh2pzrqCnQo4l5TjbSPtuN1qyyWsBrMb4S0,5524 +torch/include/ATen/ops/fft_irfftn_compositeimplicitautograd_dispatch.h,sha256=HfX261YyX0exsGBbjvWdPXUwrve6b3psgncmRbRExAQ,1942 +torch/include/ATen/ops/fft_irfftn_native.h,sha256=FL6KShD7xT49BVby3bs1Gsj2yF35fP__5vOovNr3h8o,819 +torch/include/ATen/ops/fft_irfftn_ops.h,sha256=80VQCJudwL3PNgeldM8Wgw07KCyTS9v6WUHsdKJBrhY,2292 +torch/include/ATen/ops/fft_rfft.h,sha256=RF3rrmaOi-wh9BaOd-u0nWjJsP5nvEfQBrG5nsOZSoo,5060 +torch/include/ATen/ops/fft_rfft2.h,sha256=BP1hpqKB55TKo98I_9UlO7FHLXKgtus2KaVkyrEx3EA,5353 +torch/include/ATen/ops/fft_rfft2_compositeimplicitautograd_dispatch.h,sha256=DSGWBvw0F9aSczdkv-hIUUUMFgBZgvbpcBz3KOi8uHQ,1860 +torch/include/ATen/ops/fft_rfft2_native.h,sha256=1VoZ3OnKclcreKxl1QL9SPSR21GFmLSCRyBP0nF4Cko,794 +torch/include/ATen/ops/fft_rfft2_ops.h,sha256=oAZ3J0WS0zpwIdE47z8TzPkAmJ5ry-FOnYbAuPXftco,2242 +torch/include/ATen/ops/fft_rfft_compositeimplicitautograd_dispatch.h,sha256=ZWujkLv9qtstBdOgEJMeGM7FW--GBnPWt-Mi0WRUyPQ,1795 +torch/include/ATen/ops/fft_rfft_native.h,sha256=wqt0FZclnnEceAwKZRms_KPjaH1ZOoy1QLucGclhcK8,775 +torch/include/ATen/ops/fft_rfft_ops.h,sha256=7RDeIbg0fhhRevQOtTW1ErXGNQnon_pKKyQSbsib5yw,2178 +torch/include/ATen/ops/fft_rfftfreq.h,sha256=tZBlbuNEwUvwyO6jZJeM9l3MSi21Rg0E3kbXBINwhUU,1754 +torch/include/ATen/ops/fft_rfftfreq_compositeexplicitautograd_dispatch.h,sha256=rs7f69poYTw6l38XI2DlxdDb7Bk7jJep_urRBEunSxY,1166 +torch/include/ATen/ops/fft_rfftfreq_native.h,sha256=9nnN8f45kFu6pM55NWPxDldCzPOYqv09rAogJX4e-J4,719 +torch/include/ATen/ops/fft_rfftfreq_ops.h,sha256=OwbfUDD_ZRa0Y1xEc_F0r0H-GeU3mZUqQv1xd0JCBek,2114 +torch/include/ATen/ops/fft_rfftn.h,sha256=OIFA1Qmx2WaNTyECveCQp8ROHBYZvdMkZVQNptKOxuE,5493 +torch/include/ATen/ops/fft_rfftn_compositeimplicitautograd_dispatch.h,sha256=D0sMa7ywf9jkQk5f47LWRG_25mIapDLkdbdoszkNs0A,1936 +torch/include/ATen/ops/fft_rfftn_native.h,sha256=Qg2z3AkBLX49GM5403zFiRv5G6fwzDcEr1DXp1BEKY8,817 +torch/include/ATen/ops/fft_rfftn_ops.h,sha256=IwplfoWv_yvvm9EG7-AUoyuW6knCV6eLv9c22TNx79E,2286 +torch/include/ATen/ops/fill.h,sha256=2O2R_k4MP8En4J_2UgDW8dZxRBtEl7tyk-vTrISCWb8,2251 +torch/include/ATen/ops/fill_compositeexplicitautograd_dispatch.h,sha256=eeuep9t-XE5g9z8ucNeM8xG-hMLfQENipkzrpSfpbVU,1276 +torch/include/ATen/ops/fill_cpu_dispatch.h,sha256=OgPpV9sOfxOj71SpumrO9Yf_J8orp491V9q4mKuANlA,816 +torch/include/ATen/ops/fill_cuda_dispatch.h,sha256=GIEBMjK-q5Z3YlI48yA8Sn5WVL_tJiEcS7QfnWgTGLI,818 +torch/include/ATen/ops/fill_diagonal.h,sha256=mVr_31D_sEwelSVpJ878sgg-hVx5M5ReHMzxMNlV9TA,487 +torch/include/ATen/ops/fill_diagonal_compositeimplicitautograd_dispatch.h,sha256=IC_PggetcV9GrhMDYpV67jcqVCbgOJUrY3rWlBAvbDA,816 +torch/include/ATen/ops/fill_diagonal_native.h,sha256=3_gNPXIK3SodKhXoLJz6CGixNfmjWbDU8k9qKsKBz7o,528 +torch/include/ATen/ops/fill_diagonal_ops.h,sha256=SAs8QTwZ0z9GAs_MAeBJD_0FSbz8VfK9mA9FRmEVPwI,1133 +torch/include/ATen/ops/fill_meta_dispatch.h,sha256=PulYSplLtOtLRK9kRr8EoAnppi9xv_1qUW7YmR98Oyg,818 +torch/include/ATen/ops/fill_native.h,sha256=DisSnf6JGUOYPjE5Hvu67pfIgiA68Yl2omvfO3iPwfA,1526 +torch/include/ATen/ops/fill_ops.h,sha256=AjIKTBH9ovot4q726ucTKou1y1Os8GXtGVpzaVcte1c,4412 +torch/include/ATen/ops/fix.h,sha256=opT9EwMbMNyFmMxCRjY-1iD6tA13rdjAEXV0EWM_r2U,1107 +torch/include/ATen/ops/fix_compositeimplicitautograd_dispatch.h,sha256=1rAxNUzvAbucElQo95oHWe1wygZVKF-WEyl_Pj12c-Q,960 +torch/include/ATen/ops/fix_native.h,sha256=FyH-3X26dxEa8ItClewcVoiqF946epkS4LUJVtNGs4Q,596 +torch/include/ATen/ops/fix_ops.h,sha256=lCmpWYwGNLDF4l7pqtDyxkO9Gtk0_hSCN1Z1YBYMgRc,2095 +torch/include/ATen/ops/flatten.h,sha256=cS_XzatY_vh4PW8gWVy3oUL1CqT2fMq9uxHnYcO04-E,1617 +torch/include/ATen/ops/flatten_compositeimplicitautograd_dispatch.h,sha256=rm0Uvy4fOf8eOGuu-oQHmobX2FZOwNqV79yT4vUfVek,1136 +torch/include/ATen/ops/flatten_dense_tensors.h,sha256=Uy1c040BV8qK92KZtrWWx-i-U6qedMm5NLHCVfgxFtA,681 +torch/include/ATen/ops/flatten_dense_tensors_compositeimplicitautograd_dispatch.h,sha256=0lIlNllTecb53AhiJcPd97M7aH8sir5Y4rIEYtfZs7I,778 +torch/include/ATen/ops/flatten_dense_tensors_native.h,sha256=ER_0Geo91mwElASTAxhnl9TZ27iNiAlLvuNQ3X0viUY,490 +torch/include/ATen/ops/flatten_dense_tensors_ops.h,sha256=F0Bewv2ki-z6zI1VibH3U9HX49IaFq-kqTPu3QXtLR8,1011 +torch/include/ATen/ops/flatten_native.h,sha256=r7wFPkRZHUVLZMP02HZWHctccl5Jsku_XXG8f-CccmY,848 +torch/include/ATen/ops/flatten_ops.h,sha256=PQrBHQVW921UmWGQ7N4494NXmxYbu6kmzU5l1Cn77BU,3429 +torch/include/ATen/ops/flip.h,sha256=jhydEX7Jy2Kq9xRATDo0IvBubIcu9EEeQl0Bx261C9g,1104 +torch/include/ATen/ops/flip_compositeexplicitautograd_dispatch.h,sha256=-r_AVB4zpyiBpa71S-Hbhx4tKU98SysKQpsHszhozP0,907 +torch/include/ATen/ops/flip_cpu_dispatch.h,sha256=lL16AEyy7O39BJk2Gq0UlEvRDYU97F-RfJRMQc0upa8,740 +torch/include/ATen/ops/flip_cuda_dispatch.h,sha256=WhurYoQcJKvC2LwKjm7EpRRo3WxRGSd2K7s8um6BNLc,742 +torch/include/ATen/ops/flip_native.h,sha256=xoeiZxndc287FqeHzmFVQzPI0kxLWILvhV7JX6TJU2o,594 +torch/include/ATen/ops/flip_ops.h,sha256=cEM7ct1DHXkklKTn11nwEwkCteDx7ciCK8CbTIHt7qA,1724 +torch/include/ATen/ops/fliplr.h,sha256=UciPNvEzvhOnenmnUx_ivilPse-xCsGELrtZVBb60xM,614 +torch/include/ATen/ops/fliplr_compositeimplicitautograd_dispatch.h,sha256=9XV_gwYUMMmToJ3jz7Xt17O1Rq9U_ms2846qAHuoPOQ,764 +torch/include/ATen/ops/fliplr_native.h,sha256=s-n8tR6ML_M_Uv9CmKKpgMU5tXAU83bssnfNt1GCEnY,476 +torch/include/ATen/ops/fliplr_ops.h,sha256=PtMIEuYnpg-8O3a5I7WmsMue4vFjNi5H8txZ5CGCA74,967 +torch/include/ATen/ops/flipud.h,sha256=Trv7mc42t1HnHlOh_rsbDaWa4cxsQYwKG_AILlunul8,614 +torch/include/ATen/ops/flipud_compositeimplicitautograd_dispatch.h,sha256=mbHibeplISZiLHUH_R0-CtytsxvIvnb4VoU4Lw1w7c0,764 +torch/include/ATen/ops/flipud_native.h,sha256=J18-dEhEu3-OSHDiUY7n_hhHpYMLDYKFYmehMeif0Lc,476 +torch/include/ATen/ops/flipud_ops.h,sha256=0AEMwOx3jDxlgizGygdHUlGRVCF8RMZ5j7urURRUbvU,967 +torch/include/ATen/ops/float_power.h,sha256=-NGpZwfQOFy6BeQgno4hsZqRaApFmNe3D-5UqdNupKk,2908 +torch/include/ATen/ops/float_power_compositeimplicitautograd_dispatch.h,sha256=xMb7SNBUrwOKBn-DJ28syR1Vq7tEon-BaC-QtFAJO-g,1819 +torch/include/ATen/ops/float_power_native.h,sha256=JY-Htam-2YRyWWbMCU1Na8k1psFdISFoDisi9FLOR-w,1192 +torch/include/ATen/ops/float_power_ops.h,sha256=RBT5aLwP2qDm7UBlhu_KEJzKDI-vBEMaRLEzqBCNaPw,6093 +torch/include/ATen/ops/floor.h,sha256=U_eq1y-FhIajUGFlEh0jkDZsi1wH16a3p29C1fIc6Vg,1133 +torch/include/ATen/ops/floor_compositeexplicitautogradnonfunctional_dispatch.h,sha256=FSIm3qQRi1-5baZ2ID70SgUY0jQLlg2n2Iuz0erAj8k,839 +torch/include/ATen/ops/floor_cpu_dispatch.h,sha256=1HJEpSrpqg8gZcfcCCNZVhMiHclY0Zd5ihTTmGW6l6s,924 +torch/include/ATen/ops/floor_cuda_dispatch.h,sha256=ra0_yJwg-XznM-CY2zHiClZO4rosku8k3tZt62VEqh4,926 +torch/include/ATen/ops/floor_divide.h,sha256=dMTIFfu3NBgaUjZFB_H-61odxMqFhHxlshqerlE6s_U,1967 +torch/include/ATen/ops/floor_divide_compositeexplicitautograd_dispatch.h,sha256=v4JOo2ZeNw04rzype5BbY67n3Y4dVqvrlBUK8ZQLCDE,1100 +torch/include/ATen/ops/floor_divide_cpu_dispatch.h,sha256=Z3r0UdfFs-NFum3XuxIZ57rJCzG9LSPwJkzmla3p9tY,1056 +torch/include/ATen/ops/floor_divide_cuda_dispatch.h,sha256=4QOyX8Rh04IPkn6gYnB2fiTA_6OFMkjQ6XGkEBYXoe0,1058 +torch/include/ATen/ops/floor_divide_meta_dispatch.h,sha256=QilrJ8wT4vLGLraRxa7tU_j8fHqFZklIW2_3Vpa9u98,751 +torch/include/ATen/ops/floor_divide_native.h,sha256=A-zhUY5NghoKf8WdarWdfYRTNAkHVkm8q5bQxBQ5Hdo,1295 +torch/include/ATen/ops/floor_divide_ops.h,sha256=qMuqqKKDDAJXi2VS8Sbjo-pBxOJBXasjN4-eQ9vJIKQ,4515 +torch/include/ATen/ops/floor_meta.h,sha256=4QN8jnGsSLYKvqx8rRE-qMGmwvBPX24R-k7OiXN3iD8,574 +torch/include/ATen/ops/floor_meta_dispatch.h,sha256=Q0VTkuZDFb8pqAhmn8SrBhdf9wNGXhRyilLcb7nsZzo,926 +torch/include/ATen/ops/floor_native.h,sha256=aWVbxdZJmp9OTbLJB9yfKOab3mjH1-jibd2-7OJE1UI,1007 +torch/include/ATen/ops/floor_ops.h,sha256=XHBI3mxNLkfYTqoZqXt0g6Dy5uCFR3T76eigYDQqC_4,2113 +torch/include/ATen/ops/fmax.h,sha256=kkE6EzVfF2WXcMNGwmkKdpinYLPDn4YrIIOkvSkYOPs,1125 +torch/include/ATen/ops/fmax_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rYm_4zVW5tkQ6Z4ASegs8oV0rPCn9UmldA4B_EQpPDI,814 +torch/include/ATen/ops/fmax_cpu_dispatch.h,sha256=xC9MR5ByoXgZ9Od6ubz4U2UknfEudbmvsL_kYAgtKmY,949 +torch/include/ATen/ops/fmax_cuda_dispatch.h,sha256=Aqz3snnSr8Yc1S67AXDpNwR-bHePeaXWgH6tEHOV_Hk,951 +torch/include/ATen/ops/fmax_meta.h,sha256=5omt_Pkr3e0pihGRelHfkKiuJ9xX7yX8J3LPe7tpjok,599 +torch/include/ATen/ops/fmax_meta_dispatch.h,sha256=DAKrhaUAbCZkXv5I4QfIZny8ccrC6ZR5EjPGgz1Ft1o,951 +torch/include/ATen/ops/fmax_native.h,sha256=NCkIhAgZx3w_VRIuKrdZfv75mh6Xmdo0J0Yct710_5Y,616 +torch/include/ATen/ops/fmax_ops.h,sha256=O8VSpLQgambLeo_QjsXQfHMs7ZSgOVxSHYjPtIPDPfI,1750 +torch/include/ATen/ops/fmin.h,sha256=5M0b-tgFf1MTP1Vvz9ZwHmDmNXt0XbA2rqeUlv3yTsM,1125 +torch/include/ATen/ops/fmin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=tghe8Z72h6NhcS1me_RIjXKsmXBh3xa4auY1hON-pLM,814 +torch/include/ATen/ops/fmin_cpu_dispatch.h,sha256=guGGZ-3PCXFQGd8jZ2P0giHgurCwJSWt6A1IhExwtYQ,949 +torch/include/ATen/ops/fmin_cuda_dispatch.h,sha256=Sbk0kGzsBHYoI3fprtA29pCk3H4-bA9CfFSFPBhErlk,951 +torch/include/ATen/ops/fmin_meta.h,sha256=C_2-BwtDnLs4MxFe7jbn47q7vdlpS14nHgdAjbls3pk,599 +torch/include/ATen/ops/fmin_meta_dispatch.h,sha256=G0x71CFiMKa5IQL_XM0aUvTvchZ33rpbqP5GUVbk-Ws,951 +torch/include/ATen/ops/fmin_native.h,sha256=WmNef6bmOK21QceoERKSp0ENhk3lr5H2iBwErT22erk,616 +torch/include/ATen/ops/fmin_ops.h,sha256=wSZRYUccj_OmgrHhQQsVfxsoFXJ4JZW1KFyVjIxtGMQ,1750 +torch/include/ATen/ops/fmod.h,sha256=fQtl8_uLTDOSV4mzIghpoAU9yNjSVqhbuErybwSZ4rY,1857 +torch/include/ATen/ops/fmod_compositeexplicitautograd_dispatch.h,sha256=251IW6ZRK6qalybZGy_y90lhXwb0S9bM-7ftj3eU_-s,1068 +torch/include/ATen/ops/fmod_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5F5_7nBrp4Uz4mC5vP3MU7of7dy7rr0ZQWpJinlSHVI,889 +torch/include/ATen/ops/fmod_cpu_dispatch.h,sha256=pJsiXx87F2cIOTeJXt52oXY3Fst3qiPrwcs5dHUKYvw,1024 +torch/include/ATen/ops/fmod_cuda_dispatch.h,sha256=E8aPSaegD2ja82Mani5LuyyjBuFdtv23eb9xN9cIu_A,1026 +torch/include/ATen/ops/fmod_meta.h,sha256=tXi10E6Wr5hOtMo6F1h7fZP-uTom6PO9w6bh3ZxTDII,606 +torch/include/ATen/ops/fmod_meta_dispatch.h,sha256=4kze2hOScbx16l4wxC4VIjGlH9wsDVGIIzquxpzlrP0,1026 +torch/include/ATen/ops/fmod_native.h,sha256=VK2JR5mea2r8PZsFnilIoGkxg7IysG0biHSo8O41pPM,878 +torch/include/ATen/ops/fmod_ops.h,sha256=mYp3x0sUoKSO3S-HC6mfl0ZREOVgWkZPuc0N4LMM1EA,4412 +torch/include/ATen/ops/frac.h,sha256=CzIU6iYiz1oGC59adafuS9ngXqGS8VQkRvMfFdD3oxg,1120 +torch/include/ATen/ops/frac_compositeexplicitautogradnonfunctional_dispatch.h,sha256=W2V1FuAyQb8zcEn4avkmF3ud0GpTANgaosSqk8cio3Y,837 +torch/include/ATen/ops/frac_cpu_dispatch.h,sha256=gNULicNZORnxMkYnnGxYwWLv1rrk61j5eFx1bn171MQ,920 +torch/include/ATen/ops/frac_cuda_dispatch.h,sha256=19Mv27gfEZmjJfOyGx7Kx01XrKVwHGjcEfK0jUCbAWc,922 +torch/include/ATen/ops/frac_meta.h,sha256=py28L98h3ril8xM5yw1CRyiHLdpxWhi_1767wUxPvnI,573 +torch/include/ATen/ops/frac_meta_dispatch.h,sha256=qqFSiVycZ_nbQw2XZ1o01fXIVG-JvTS39K7YJN197fo,922 +torch/include/ATen/ops/frac_native.h,sha256=kwjdxujo_Qt4l75tp_irpIY_Blyqvehu98N2UpfanP8,998 +torch/include/ATen/ops/frac_ops.h,sha256=t28YiOy2C-VCJDBWRri3bSViyNt7Q80l9KlUk1X72wI,2104 +torch/include/ATen/ops/fractional_max_pool2d.h,sha256=iXE8tFZ5TMBsipdO0elZ7OizjwWBrg5EkUhxKRxBSCw,1997 +torch/include/ATen/ops/fractional_max_pool2d_backward.h,sha256=JDRDWdYz4Bm9yBSVULeRbEE5kq8fSscAE6HmlDQLRAw,2040 +torch/include/ATen/ops/fractional_max_pool2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=D7Fq_HvtOmaZNO-JmS8zMAaKmGTg-nhPE_QATnmy4BY,932 +torch/include/ATen/ops/fractional_max_pool2d_backward_cpu_dispatch.h,sha256=F8V1UiuYn8eLLp8-Z4VqUXkgy7ZP7fhNUKK3iDijiDg,1317 +torch/include/ATen/ops/fractional_max_pool2d_backward_cuda_dispatch.h,sha256=z4iHw57ZlWA8l44p9B0z2g2Msyofddy2TOW5mf3NnDo,1319 +torch/include/ATen/ops/fractional_max_pool2d_backward_meta.h,sha256=i0fEcDa7sRFuMBAajrQ6ygk_JvNKfAOv8pjqdBOqzeo,717 +torch/include/ATen/ops/fractional_max_pool2d_backward_meta_dispatch.h,sha256=Mx_5XZRVJ1m5StO2p5rVK9D_zyprPb2OC8m2qlr6428,1319 +torch/include/ATen/ops/fractional_max_pool2d_backward_native.h,sha256=DZ2vs6jOReTZF3bcJEA0ene1UWUHsJTAWxx08BhPnWY,1108 +torch/include/ATen/ops/fractional_max_pool2d_backward_ops.h,sha256=Ht1qL5r-i0FliHmA7jtPYgtMqRHUxqzY7EyQJx7FSKY,2548 +torch/include/ATen/ops/fractional_max_pool2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=iRQhoaCx7-QyzYW6Lv93EwBwVEDmqxNEd33AEWlCe9s,923 +torch/include/ATen/ops/fractional_max_pool2d_cpu_dispatch.h,sha256=YPDvgDmFvmY7yrfo7zqELDAWB5j5rnlXisfSn2pNj4E,1330 +torch/include/ATen/ops/fractional_max_pool2d_cuda_dispatch.h,sha256=lDcDU4Bj8JkJF8aIYbYMWR8koVjxWVulMtlWFaEMxX4,1332 +torch/include/ATen/ops/fractional_max_pool2d_meta.h,sha256=_BGBu9letV0Wj6lxFFCVuBXXpcLca0qjJ4gErf-NoBY,683 +torch/include/ATen/ops/fractional_max_pool2d_meta_dispatch.h,sha256=958iJSudzKBpARsyJZ2Sdpiv3ckBa-cwGu39qcMyxFU,1332 +torch/include/ATen/ops/fractional_max_pool2d_native.h,sha256=ie05or9YrbJeVrF8cmNL6E-dpH9AEYYS4ity-wYMscM,1069 +torch/include/ATen/ops/fractional_max_pool2d_ops.h,sha256=-6NSekqohHDEBj3E2vMa8Tdlg39ALvIHQ1jsk8uKCac,2562 +torch/include/ATen/ops/fractional_max_pool3d.h,sha256=tBswzRfmwlAzpxfIx1rIsR1QFGTYQK0MYqj9iFjGnYE,1997 +torch/include/ATen/ops/fractional_max_pool3d_backward.h,sha256=hCr-lZeriOuy0dTG3Fvg0tqqHXCN726r8yFLRCB2UPM,2040 +torch/include/ATen/ops/fractional_max_pool3d_backward_cpu_dispatch.h,sha256=DzmYqWlyLGQ56bt6q1LB0gu7UUVOM4I6_fMT5ZxZjqw,1317 +torch/include/ATen/ops/fractional_max_pool3d_backward_cuda_dispatch.h,sha256=YWehyENpjOCpl53WCN_LQ2tefhMdNYnCkqnpL9kXFyg,1319 +torch/include/ATen/ops/fractional_max_pool3d_backward_native.h,sha256=GvWZnZiTLbcgbAJlSSV8Bv2UlgC9i40TnZ8W4_uDSFM,1286 +torch/include/ATen/ops/fractional_max_pool3d_backward_ops.h,sha256=3PLVYp2ZAvfFTcKOSZmD1liq767msfIFOlI14P2Cglw,2548 +torch/include/ATen/ops/fractional_max_pool3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Df3mXbR3ZSkyBn_po-aw0S_klTBtq6W6cL7XuQsjO10,923 +torch/include/ATen/ops/fractional_max_pool3d_cpu_dispatch.h,sha256=5XSp9ULXOMzUcXdZhognO_kylqK4rJ7uqLqoO0JTrgA,1330 +torch/include/ATen/ops/fractional_max_pool3d_cuda_dispatch.h,sha256=XF2IigJH3aePEw4MLmQvrel4zghnJc52O-F5lZZxi-E,1332 +torch/include/ATen/ops/fractional_max_pool3d_meta.h,sha256=-k2C_4T2gu_1kWHy-ysr9h-HiaF6WQpPrlEQsYSclIs,9807 +torch/include/ATen/ops/fractional_max_pool3d_meta_dispatch.h,sha256=_AlLnae2SvCij3j5DPZQ0a3_GuxGh_BtKrr4bVBtaxs,1332 +torch/include/ATen/ops/fractional_max_pool3d_native.h,sha256=kRaD_7o0QQqMg5YGUgwnZSK69T3p-4QmWX-5fO-wUug,1339 +torch/include/ATen/ops/fractional_max_pool3d_ops.h,sha256=zpgaUGkcDQ6kxduMN2knZc-AHsCRfJcYmKccjFAKi2g,2562 +torch/include/ATen/ops/frexp.h,sha256=NMtbl8MH1zvOE7VSFWC08ImRBR6YSkP_3WWwb_UjykI,1345 +torch/include/ATen/ops/frexp_compositeexplicitautograd_dispatch.h,sha256=C5T-EyKRo4gmsuvd-iuV2vgx9Ja-Y_aE_Csd5VE5m4k,788 +torch/include/ATen/ops/frexp_cpu_dispatch.h,sha256=AgQXoTT3yVNRYAFBAT62HBou_K9BHfutH_1lVLkGvmk,931 +torch/include/ATen/ops/frexp_cuda_dispatch.h,sha256=8St131-MWHi5P3PMunUH2wXjpgCEbcawIzPduqZvQYs,933 +torch/include/ATen/ops/frexp_native.h,sha256=rOmhgqbvbCK5jjPpeWDX5a1FqBy_zPUmHRVisk2Mj5c,632 +torch/include/ATen/ops/frexp_ops.h,sha256=ALC97KfwOW9vdr95BLjCPxzO3QjcEq2CvJC0v1fRtmQ,1937 +torch/include/ATen/ops/frobenius_norm.h,sha256=oU6Wto6hgqYTebcspAJL0C3Ul5grompooR0olnxsAcc,1347 +torch/include/ATen/ops/frobenius_norm_compositeimplicitautograd_dispatch.h,sha256=Vplafky-TGtQHN_wEUvSKvkm-kTjOI8s7J07QLQHaDQ,1062 +torch/include/ATen/ops/frobenius_norm_native.h,sha256=upzPZmLV_3W8l8Y1Ol05CfgjYbSyvKnY7aqLiGCbB9Q,646 +torch/include/ATen/ops/frobenius_norm_ops.h,sha256=AvnFASOYEQJoqNmYP64uS5WKe0Q9iIlydEG5YOXmTAE,1899 +torch/include/ATen/ops/from_blob.h,sha256=vqG2laZSL6_gDQ5pfyS5dYndCp6fuANp7EXO6YxzDVo,4163 +torch/include/ATen/ops/from_file.h,sha256=P25wyFntkVd_P7KlgvCjDOzs4s4bE2IYdFNtIUQVsg8,2185 +torch/include/ATen/ops/from_file_compositeexplicitautograd_dispatch.h,sha256=RBl2aF8FqKZyQj5nMICd2S6_hVsTtcTZ3ak2EcnfUr0,1016 +torch/include/ATen/ops/from_file_cpu_dispatch.h,sha256=YRM2fuVveXR2HDnFF0OLYmGcC0qvKbePekNJBXyvwNY,1098 +torch/include/ATen/ops/from_file_native.h,sha256=D0GJoGwSIoyfHn9_OiWHu0eGY1TNd6fLSt9HNRGHSCc,860 +torch/include/ATen/ops/from_file_ops.h,sha256=zRUp83wgeEP_RybhCAukmB5uOyklzdXPzLbIxkE-Ghw,2516 +torch/include/ATen/ops/full.h,sha256=_-EZsxfRwQNC57O3qMmKkH1maMSz5090GxH1utUwMpQ,7934 +torch/include/ATen/ops/full_compositeexplicitautograd_dispatch.h,sha256=fKR5xDo5VwK3HAR2hqvej_TBuobBV_RzxQdrMiwLVBc,2546 +torch/include/ATen/ops/full_like.h,sha256=mKBJcpYRhcQ16XgQEtCUrj9nU80VlLZ6Q07uICdc7dM,2433 +torch/include/ATen/ops/full_like_compositeexplicitautograd_dispatch.h,sha256=yGw24MrMiaK7Ei490idnvc3vdiTh4x_u3ow9shSrrW4,1512 +torch/include/ATen/ops/full_like_native.h,sha256=kuVypWwUkAFeJ9j7DQUU-nGGRprUS5UdapuRVHuPd48,892 +torch/include/ATen/ops/full_like_ops.h,sha256=5VjHTAFEzUlYCrIxMILQH_5JGkyBTj82bbyBoveexMw,2640 +torch/include/ATen/ops/full_native.h,sha256=ZbPDRmCFRJZOuDxcT_GvFnhuDJ5nyQ0QB2MYS2W4Gys,1190 +torch/include/ATen/ops/full_ops.h,sha256=QSy8hH0Qn6VFQkE34kOyhhe-WOzzRMs6eCZzilRppaA,4407 +torch/include/ATen/ops/fused_moving_avg_obs_fake_quant.h,sha256=v76jK-JK--D_LK26fom-zhzxuE7Cs6BmbFMMAGJKHhg,1429 +torch/include/ATen/ops/fused_moving_avg_obs_fake_quant_compositeimplicitautograd_dispatch.h,sha256=XFqA-I8sGlLALfDNmDK3A2M13mx729qZA-RwCh95_Lo,1090 +torch/include/ATen/ops/fused_moving_avg_obs_fake_quant_native.h,sha256=AXDi80_YM9CsSUVhnr8Cf_bdgtFAPbq56YLxQ7g2GOI,802 +torch/include/ATen/ops/fused_moving_avg_obs_fake_quant_ops.h,sha256=BakBxOPq2_b-wDUWfhA0ZPrVewSgjNxhkdHD5rxBbeY,2019 +torch/include/ATen/ops/gather.h,sha256=RnxYbryBjDjq2S4Ia75Rf2MOayUWt6sYAKJdRc0mwpE,2405 +torch/include/ATen/ops/gather_backward.h,sha256=i861TPu0KRLyWdOfxS6gpT9RcCVBZ13Hl6OMsaAEINQ,817 +torch/include/ATen/ops/gather_backward_compositeimplicitautograd_dispatch.h,sha256=RgBHAj_gl1r5mQeI7NFnyIVpwmngidmOrXQVqm9dTxM,855 +torch/include/ATen/ops/gather_backward_native.h,sha256=4Txo1OeZkpNW6ib3wkjV2RyM4Rt-jylwL1xuE4M9Vs0,567 +torch/include/ATen/ops/gather_backward_ops.h,sha256=LswmtEQfl8ilD3cq0p-bjkev30T6mg2iFi0SCOVugw0,1267 +torch/include/ATen/ops/gather_compositeexplicitautogradnonfunctional_dispatch.h,sha256=-ErXazNAujTIXpCCEuJwv92j1FwaEo1Inge9rqAP5O8,853 +torch/include/ATen/ops/gather_compositeimplicitautograd_dispatch.h,sha256=3xQOOuztKR3h5rAEFkB2__OoIHwIsEQWU24eAwOhoS4,1116 +torch/include/ATen/ops/gather_cpu_dispatch.h,sha256=iktZfREucwqpg7QzlrKHc3qnrccMdxt9Vz7lU_X90Ao,1060 +torch/include/ATen/ops/gather_cuda_dispatch.h,sha256=8ZAxgji8Dnl1NvdRxbs_HZnwFrkAG6LuZc4Qs1nrWD4,1062 +torch/include/ATen/ops/gather_meta.h,sha256=JiiYpltfPT6OmmWSR_rwEj2I7iFBpXWVoSGZ6-8-lvY,632 +torch/include/ATen/ops/gather_meta_dispatch.h,sha256=UrzpQw1xHVUL8caKQVTHH1RB25UTWqhFipEDQ3_kqKQ,1062 +torch/include/ATen/ops/gather_native.h,sha256=lj9UeeAWq3O9TRGX1tLtkGJqbXXqsQMjIl3xyqmcsUE,913 +torch/include/ATen/ops/gather_ops.h,sha256=EdGa_OY3Q7WqxtCkyxtStjVcNd0cloomyvqsoTxb88A,3615 +torch/include/ATen/ops/gcd.h,sha256=2UrJuhjoVE6r13RCU4DKlFmoUypIss3cpzMND0KHeg8,1295 +torch/include/ATen/ops/gcd_compositeexplicitautogradnonfunctional_dispatch.h,sha256=oYwr05ANpnT363tYor-dPh5OFXFoUrSiDAV91MAtGIA,887 +torch/include/ATen/ops/gcd_cpu_dispatch.h,sha256=l4YAY3lA9xO4eVQeROYHO8xm4Z6hnzi38B6ij_lj-tU,1020 +torch/include/ATen/ops/gcd_cuda_dispatch.h,sha256=zjshszA5noT6rIeVL_-kYsFT02KiQbBIfJ4nMykq6c8,1022 +torch/include/ATen/ops/gcd_meta.h,sha256=5yVgaKwZl6sT8D8_tqt-Af3uv9vobtYRoTJsYtGZiMI,598 +torch/include/ATen/ops/gcd_meta_dispatch.h,sha256=9kvofJJIPymWLcuVdyQTph15kr7Xcf0EBojR-aivgCg,1022 +torch/include/ATen/ops/gcd_native.h,sha256=msHMq6iFf4lxJB2IAsOfGBogQfY3bmrIZUURByjywtY,613 +torch/include/ATen/ops/gcd_ops.h,sha256=cjMxuU45a20Ugxq2FPKawzxkCRpStfc4tFNu_Lw8GdM,2353 +torch/include/ATen/ops/ge.h,sha256=KNBgK6fzROL2BCO6EqEnQq7s84692IGkkRml_cxzAUI,1819 +torch/include/ATen/ops/ge_compositeexplicitautogradnonfunctional_dispatch.h,sha256=cjmE8YjUbKJr2qoetaOOZa-m5kl9obt-pNmJdmaIm28,1034 +torch/include/ATen/ops/ge_cpu_dispatch.h,sha256=tRMpQFauiFzmEMU_eyMXuxGSyQGdwmcd_ciWApQZMnQ,1366 +torch/include/ATen/ops/ge_cuda_dispatch.h,sha256=WpJp8XvQMVCmi0C-V_gLxRGkKfFfqW4ff8CAJkc19Ok,1368 +torch/include/ATen/ops/ge_meta.h,sha256=bTw7mPunxjkUz7AwImAKB62CkOed3wdFopsSLDmbl0A,751 +torch/include/ATen/ops/ge_meta_dispatch.h,sha256=CvQK0S14D2zdUH7i4-fsQiOrIYFUcG2Hs6xrjzfDSLs,1368 +torch/include/ATen/ops/ge_native.h,sha256=vYfnw_FjCFuPRXyGtG_97Q63V_3-ceYat3zT6DBgIjA,1295 +torch/include/ATen/ops/ge_ops.h,sha256=jZNR4gkcrVjJCGC8vDbj32EiR26frKmIlGbxGjg9sq4,4376 +torch/include/ATen/ops/gelu.h,sha256=QgG_O80C7pQC7vnNY4_EEA_i2ZlL70LOWf5qR62GlTc,1415 +torch/include/ATen/ops/gelu_backward.h,sha256=HUDfOQ9ooYi2UdM84TmzbDWKW8NAOrjOoAi0yoArRQU,1557 +torch/include/ATen/ops/gelu_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=LidVJsBV89MRQr3-axMjIZsLSlqtgRZAHntThFq8Q3g,866 +torch/include/ATen/ops/gelu_backward_cpu_dispatch.h,sha256=s3vhJTpyR2vxSRSiTK_Rm7UP9PFMP0P_6_OBimi3WW0,1112 +torch/include/ATen/ops/gelu_backward_cuda_dispatch.h,sha256=k_NUWbnYLchjNuqtoG2Sfp3nQztbT7vc3SosGypmcG0,1114 +torch/include/ATen/ops/gelu_backward_meta.h,sha256=NRtJMBvuiIChz_HyC3fmZ7aMPjNiig5JuakKmEuMMis,644 +torch/include/ATen/ops/gelu_backward_meta_dispatch.h,sha256=85Piox3vP6LC_9ZdrLu9ZNBC77VqyBE2gYi3IQ8Z7E4,1114 +torch/include/ATen/ops/gelu_backward_native.h,sha256=BZTMXNh2khzOksFrK8RD11OLZahkZkmMwjCFJs4fPZ8,1194 +torch/include/ATen/ops/gelu_backward_ops.h,sha256=TkSG-ntI2ddeVEZ5zxDLf-ECmlUORpxS2u8m3DyUfFw,2089 +torch/include/ATen/ops/gelu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=eLvTVbIYed1-qTh8AdWPZ96wGZRm-sWy3zmc_BRBx0c,911 +torch/include/ATen/ops/gelu_cpu_dispatch.h,sha256=FZXTnR5XNKvMoV-RDTWezdASU2cKU2z4w7DEhy8S9qQ,1061 +torch/include/ATen/ops/gelu_cuda_dispatch.h,sha256=AZmXiQ0RjUhtnv_KIoDA1DKqh8kEy3BOCblP73l09_c,1063 +torch/include/ATen/ops/gelu_meta.h,sha256=B2T2jHHmtP6e_kNokX0XZuYTk3Q1WcGo3IpZqR-T_Rc,603 +torch/include/ATen/ops/gelu_meta_dispatch.h,sha256=JFOhpdjAQkT_e7FEnHbujceWux7UHLHPuMA8H3ue83A,1063 +torch/include/ATen/ops/gelu_native.h,sha256=2W9ZAeiS5vkIx0lNQgIFATwEaBREfKT4IrhuJDT1UVw,1400 +torch/include/ATen/ops/gelu_ops.h,sha256=jcv8wIbypkRCALP6kBMEXrE1xF-E7M2oFKahCrC_63o,2416 +torch/include/ATen/ops/geometric.h,sha256=t48yWL6Vs0KMZJ-2P-4K9MJofIy-6EBrdh8igpPFUE4,1373 +torch/include/ATen/ops/geometric_compositeexplicitautograd_dispatch.h,sha256=TUd0uqBKo9tR2-7IS94TsbZ_h-vIXJ0kYU4safr4hBk,1116 +torch/include/ATen/ops/geometric_cpu_dispatch.h,sha256=KlFg1l-OJ4GMzbz-KE6o4m3TmAoY9nrW2P4GTBXxGx4,787 +torch/include/ATen/ops/geometric_cuda_dispatch.h,sha256=ADqP9Mo98wuZgxm4Ta7UjmqsOc66Bm-E5CY3hTNX6MU,789 +torch/include/ATen/ops/geometric_meta_dispatch.h,sha256=wnReH5HldE1hdugv9ZgAEdIeJnFwM8bAUBkmXfnwyUI,789 +torch/include/ATen/ops/geometric_native.h,sha256=1VhkRzs9szXZwO46cEpoAOuwEdW1W6HPhyyqsELkkR4,800 +torch/include/ATen/ops/geometric_ops.h,sha256=HzuOY5E-x7VkJsGn1wVpmb4TSy8qf3JvenZ2XTFNklY,2695 +torch/include/ATen/ops/geqrf.h,sha256=ayHq2QxJvVI4MsR5LS0Eed93aIgEV7ixSgjVELQLxO0,1187 +torch/include/ATen/ops/geqrf_cpu_dispatch.h,sha256=MnTJuO9R1jrWbFLy79DU4MIAXQXgFvmr8cSHQWsoNkQ,985 +torch/include/ATen/ops/geqrf_cuda_dispatch.h,sha256=WQip14-Uo9guM7oBv7vdJsptgdJ1nbf99KMgo07ItYU,987 +torch/include/ATen/ops/geqrf_native.h,sha256=B5TEMLxI5qcV36gwxJgS3ZCEqF4kORw21ojl0yTRfKU,620 +torch/include/ATen/ops/geqrf_ops.h,sha256=pM37zThjjJLd7PMfwuzvx9nKUNeEXPSoDB62-K6sET0,1830 +torch/include/ATen/ops/ger.h,sha256=LBUB1_JqHiRlScleXtmNIV5i7rqvxw3XSHPq-qm7P_I,1106 +torch/include/ATen/ops/ger_compositeimplicitautograd_dispatch.h,sha256=y2TnESZk-XQq0Puu_XfqIxCW8LQ8T0yZor9bL-vYC3E,987 +torch/include/ATen/ops/ger_native.h,sha256=oD07x4gMEW7ApiOl_ytYCc_mcHF1PPlkY4j5ma08ySY,598 +torch/include/ATen/ops/ger_ops.h,sha256=JiSjkYG7eqLEvo5RtLTENFaVoq3yI51tzOJLxswMJrA,1738 +torch/include/ATen/ops/glu.h,sha256=I_aBDUpELFUJYxQY2mezzy6NP1X0on7ML2iAURA8nKw,1070 +torch/include/ATen/ops/glu_backward.h,sha256=y20nlM6yDo0FHjZzWjBl1V8IidBtPEJAMSRodLZtE5Q,1410 +torch/include/ATen/ops/glu_backward_cpu_dispatch.h,sha256=lQ4iPRYT1IvRyy_f06PnowzyH8oqxKhgyBPFO-AWGjo,1044 +torch/include/ATen/ops/glu_backward_cuda_dispatch.h,sha256=ElFssOj8pVG168yy0E8uiyTUFDWI9hGuWIk9koiH1AE,1046 +torch/include/ATen/ops/glu_backward_jvp.h,sha256=eLtCRIixqgXiA9BnIpOktzygzhMXkMZmoZBOnjkbuW8,1767 +torch/include/ATen/ops/glu_backward_jvp_compositeexplicitautograd_dispatch.h,sha256=YarW-uwH0b0Ejr6MYuGMQVRgvg92VO0Y2cuFeCr-oWE,1125 +torch/include/ATen/ops/glu_backward_jvp_cpu_dispatch.h,sha256=Uo4n8sIPyTkP1BABdijnJXlCIkixZlS90aeVg9lYGyE,849 +torch/include/ATen/ops/glu_backward_jvp_cuda_dispatch.h,sha256=kTS7w4HjDPgtHfCRyPae5Bas41-DCaQ40XQsYt2k5Io,851 +torch/include/ATen/ops/glu_backward_jvp_native.h,sha256=zsD6LiHsWJm7KLClxsiL6jvJ4_4AoPj8yDh6T9MTZrI,812 +torch/include/ATen/ops/glu_backward_jvp_ops.h,sha256=78VafEYwVaCZQSn7Ru3yc1oUbkuubAHvuAgU5UQurps,2438 +torch/include/ATen/ops/glu_backward_native.h,sha256=QgBs6rrw1QsDgv83OzhXOlPt9_w7KEXhTvmdNVELmxg,922 +torch/include/ATen/ops/glu_backward_ops.h,sha256=XG0-W9vZqA6XHuGLOjahZB0ux9GLStEE9OM1fFr_pYs,1964 +torch/include/ATen/ops/glu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=uYoGBVMBtBeSUbmPmOaInltz-sUUTCkVJAOK0u56tC0,803 +torch/include/ATen/ops/glu_cpu_dispatch.h,sha256=crP-5fbYrSPl3-M_poyyfcflvxmAHKYvAv21WZoDSLc,913 +torch/include/ATen/ops/glu_cuda_dispatch.h,sha256=l1TPcyaWIMpbNf8mPbeATW_FComn246sWixL6Fal-YE,915 +torch/include/ATen/ops/glu_jvp.h,sha256=5zCj8pHffUP_xRSN7K9tbmNsr0gF3IeRTPiZq-TsLAs,1305 +torch/include/ATen/ops/glu_jvp_compositeexplicitautograd_dispatch.h,sha256=k_lw9-e2cU3fU2gtT7T6Ks1ICKswtwxGlnAd0fRCdLc,983 +torch/include/ATen/ops/glu_jvp_cpu_dispatch.h,sha256=CNgzSjRKNFmB-qot4tuHWPzgzv-NPDmHh5EncwoQCmY,778 +torch/include/ATen/ops/glu_jvp_cuda_dispatch.h,sha256=E3eGmMAoXJBHHQM9R6O-i5mJxfvTmVsFQ2yRQobwv-g,780 +torch/include/ATen/ops/glu_jvp_native.h,sha256=mCpTqNDWGqJihabQdb6w7T4Rah9u_aW2EYCfL7MbqHI,670 +torch/include/ATen/ops/glu_jvp_ops.h,sha256=lBK0hhzYwIM8w22sr2s5KNseYwG0aQNvPcn1vd6lH3A,1980 +torch/include/ATen/ops/glu_meta.h,sha256=SIa4UITUFERd_49qleCK9TZnpVOOH6RxH8hVNwt2ls4,585 +torch/include/ATen/ops/glu_meta_dispatch.h,sha256=Xwf774Ourowh70eqFz9gBQP0VdSoCJ4xAG5OYR1VqOM,915 +torch/include/ATen/ops/glu_native.h,sha256=teUYaznuc2kDmNHFKBc__KZE9z_sZr9Kl-ashZd7O00,600 +torch/include/ATen/ops/glu_ops.h,sha256=fYbX9FWhQenJNLIBMFkQ1K2uEVtnQrg7LjmYFB-LEGs,1666 +torch/include/ATen/ops/gradient.h,sha256=Poj53VhShOy-SGMfegPAXvuDr-NN6vR-pNVlIUoHiBo,2849 +torch/include/ATen/ops/gradient_compositeimplicitautograd_dispatch.h,sha256=tLJE7osgAx6rPTQ1JZFfoUFD7Ic_RP7KrFxmBq_uQ9U,1784 +torch/include/ATen/ops/gradient_native.h,sha256=YbW7u7VZXn20CASVAG1cWT_3t8KBeoQBBtEocPBQ_gw,1496 +torch/include/ATen/ops/gradient_ops.h,sha256=guN9kiMoedkbHB4PWbYWdBHYEYicbSBn9m3lIFZDVVA,6480 +torch/include/ATen/ops/greater.h,sha256=DAx1_z1IQB660ap49drSGv8kBKKNXzrfkShytblPaj0,1914 +torch/include/ATen/ops/greater_compositeimplicitautograd_dispatch.h,sha256=lbKvwzKcV77BSoYL8-6um4kdEKKIRWCAWz7fK4PWVjs,1450 +torch/include/ATen/ops/greater_equal.h,sha256=E02Va5miq7EdjIrBRGYmw4NTCp1YUZ7IgPKJQEmPn3g,2028 +torch/include/ATen/ops/greater_equal_compositeimplicitautograd_dispatch.h,sha256=5PVSe4ZwHrBnH65M_m1MlSlnfQ4tb5uGqajsmHOoQn8,1498 +torch/include/ATen/ops/greater_equal_native.h,sha256=OPZ5-7RCrIasic5GUFiKjE0Cjp7v8h5iXS2FHqTBeoA,986 +torch/include/ATen/ops/greater_equal_ops.h,sha256=v9zJcRoXdyHo1cdB_KhlgDOGshqCzjjZNWRur0mTFB8,4574 +torch/include/ATen/ops/greater_native.h,sha256=xe8_WVQBqtTwCw5rA3m6Qc2ZLp8lVTbNx5uU9K2F0Dk,950 +torch/include/ATen/ops/greater_ops.h,sha256=9mop-4kYGEcn7VQ2QwZ6ApMSHNQBANgxZTQt33UBT6k,4466 +torch/include/ATen/ops/grid_sampler.h,sha256=oyGClqnYSgmGmRJ1qw-JnMa4B_mtHaHiXT_Clu3maXM,866 +torch/include/ATen/ops/grid_sampler_2d.h,sha256=0HRrVco5WCbWIl8Xagwrc0wlU6cyZz6qydaqQqwooOk,1778 +torch/include/ATen/ops/grid_sampler_2d_backward.h,sha256=v9UJT-Dqa9XkZ0JSWojsRChR6JGOqStBl4vifqqzoRU,2474 +torch/include/ATen/ops/grid_sampler_2d_backward_compositeexplicitautograd_dispatch.h,sha256=WF_mk4Q5ON0IqLSvXq5i8grWGrU1gJt_mFZKGfz_foY,1321 +torch/include/ATen/ops/grid_sampler_2d_backward_cpu_dispatch.h,sha256=rRMAfwtGcudMQ-ggTVDQk3XfT9Arxb01xT2zT3OpvsI,925 +torch/include/ATen/ops/grid_sampler_2d_backward_cuda_dispatch.h,sha256=ie6KxsszaVT2Rl1w38llNRmByVIIV8_jAHNiQxVlqL0,927 +torch/include/ATen/ops/grid_sampler_2d_backward_native.h,sha256=umcfUK0hRyquaNdFvmXF3_XJrgDesb3Ae0KORfpUl6c,1254 +torch/include/ATen/ops/grid_sampler_2d_backward_ops.h,sha256=fpGfE_u9yyOkhN_uYCtsRqoY7qC8QKsfIbRaDHN95LM,3004 +torch/include/ATen/ops/grid_sampler_2d_compositeexplicitautograd_dispatch.h,sha256=DdZ_puSwkm2nGBdv1fUrqB5c3kLMdkfzJTChUl1WNpc,1077 +torch/include/ATen/ops/grid_sampler_2d_cpu_dispatch.h,sha256=2Y4cKMMiJkyxfCNkyuIdVnWRDeoQtmvZ-fdIBKIDZlw,825 +torch/include/ATen/ops/grid_sampler_2d_cuda_dispatch.h,sha256=tnSJZgv6YQ2THedSiB7MDkgF3eh9xBKPDQaSCicxupk,827 +torch/include/ATen/ops/grid_sampler_2d_native.h,sha256=aAIsKlEjAPN0dLtXIe8PXEAlxUcawM1k90V_raHEFoE,932 +torch/include/ATen/ops/grid_sampler_2d_ops.h,sha256=0p9hEWk1UgijXQrY4_hPQVjAYlPkS1z82y4qxcdeHLw,2268 +torch/include/ATen/ops/grid_sampler_3d.h,sha256=ql1k_aNAb4UFXz5xfOzbQRLMHOEMQ2KQu0s_Yd2MvmU,1778 +torch/include/ATen/ops/grid_sampler_3d_backward.h,sha256=QyXJxcZRzZmvcOApLpYWkSLNJA21EG1cLSZlstJKVxg,2474 +torch/include/ATen/ops/grid_sampler_3d_backward_compositeexplicitautograd_dispatch.h,sha256=ZbI7r9KSyNaoT6U4fM6FjD2x1id-RThaVn02AXYMIz4,1321 +torch/include/ATen/ops/grid_sampler_3d_backward_cpu_dispatch.h,sha256=kD7aG-f4_nkVLdoP8XpzU8isEZkvEOlF8kaWy7L7OYA,925 +torch/include/ATen/ops/grid_sampler_3d_backward_cuda_dispatch.h,sha256=gKxrdwb-2IT51ijl8PXANrELMJgsmkeTifAxivYBGHU,927 +torch/include/ATen/ops/grid_sampler_3d_backward_native.h,sha256=DutqYvbspTgA-0FBhXbXNDRgqkvzezgbOqCA3cIUlb8,1254 +torch/include/ATen/ops/grid_sampler_3d_backward_ops.h,sha256=Bhxg0yA_Qe1t2KEdmVEW2N1bmE7VZR6SNzEVVNUFuu0,3004 +torch/include/ATen/ops/grid_sampler_3d_compositeexplicitautograd_dispatch.h,sha256=iYQyA7rCoJ94mqtNvDWElBk1cVQahmB2pX6HCkjKGbU,1077 +torch/include/ATen/ops/grid_sampler_3d_cpu_dispatch.h,sha256=hH2MXILS_78WmrIAiejNC2j3qreFWT9fGlCuWY5uFwA,825 +torch/include/ATen/ops/grid_sampler_3d_cuda_dispatch.h,sha256=u4IULs0PcIya_JzRwIKmqp0axB2qsIqJkrIH8eRWR54,827 +torch/include/ATen/ops/grid_sampler_3d_native.h,sha256=cw1OVAhsA38JhgiXfY57sPh0iIkHqC_4FQ3P_zy9ZZk,932 +torch/include/ATen/ops/grid_sampler_3d_ops.h,sha256=YOJhka4TZVz9tRmPeFEODmHgIAh2P3BGh-c9pr2sut4,2268 +torch/include/ATen/ops/grid_sampler_compositeimplicitautograd_dispatch.h,sha256=4lhrlXaY14QaSNZOXYJ-5TlDARdmL0V780NgdiQngDE,866 +torch/include/ATen/ops/grid_sampler_native.h,sha256=yta0XC79IR_Y1qsdheBDC5knSrWYTT4epBuzREGvNl0,578 +torch/include/ATen/ops/grid_sampler_ops.h,sha256=Ef3IvwsM168R5yAFTqdoX5JWZcPBwKfkFyl4gj65V7w,1297 +torch/include/ATen/ops/group_norm.h,sha256=0I9jAkBWHf3ESGbCo9UQj78MqQgTi-a5X57j9Mct_uA,932 +torch/include/ATen/ops/group_norm_compositeimplicitautograd_dispatch.h,sha256=Vner38XN3HuPSNF9Qvcuv0HghufQRwqC5qBD0ji7Gqs,924 +torch/include/ATen/ops/group_norm_native.h,sha256=pfkN8NDbIX4H5pp7EckzzVXstZ2_PY1pUlhssNgVpD0,636 +torch/include/ATen/ops/group_norm_ops.h,sha256=WP65S84DWcp49NKao7wd1vh9n3Gd7DeppvEW85eqIlQ,1453 +torch/include/ATen/ops/gru.h,sha256=V7f7k9jaq_F9rZ8U0vpNcLZH9Z3VcVW5eJVV0Tt_FFs,1549 +torch/include/ATen/ops/gru_cell.h,sha256=jESH7xHtiv9g7K7t3n7LTB9z0L4qXz0dvsKWPO8Qems,891 +torch/include/ATen/ops/gru_cell_compositeimplicitautograd_dispatch.h,sha256=UmGvZYjJe85Zjo1saHKIiPervwhIjRupphiXAxNbJRY,930 +torch/include/ATen/ops/gru_cell_native.h,sha256=UQXnHDPMbb1oE20jHiJr2BRwuHbT1TSrPFs5IizPOH4,642 +torch/include/ATen/ops/gru_cell_ops.h,sha256=61KZJH2V1es5SSJRjj59y1Nx-3CTVH4SqIP3gqhzgGQ,1499 +torch/include/ATen/ops/gru_compositeimplicitautograd_dispatch.h,sha256=xe0XpYVnxpc-1irRWnb_RBp7aHTIc6TB5_310usWhW8,1175 +torch/include/ATen/ops/gru_native.h,sha256=ABzde9UYDNg62fi8NPnn324xiAOsoFHkiSp_pmzH9_c,887 +torch/include/ATen/ops/gru_ops.h,sha256=VpqIa6ZCY-a314jkKdYZH21JrrS5-Z2_D20_rtb_zcY,2732 +torch/include/ATen/ops/gt.h,sha256=IL5u2L1IrVL4SC0inSFyVswzp85WJ50KAF3ups6q-RM,1819 +torch/include/ATen/ops/gt_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Z2sQFPwBYrYYaz7bYFG7f1Rbyc0lzuLB6j0FmZdm_e4,1034 +torch/include/ATen/ops/gt_cpu_dispatch.h,sha256=KvaYtu_1mtuxBINrePTunv1ibkZ9j5jbvDQKH55X8Zg,1366 +torch/include/ATen/ops/gt_cuda_dispatch.h,sha256=22MIm_dB38x-ppbu4fTD2OFy4SCwOKR8aQrdqh5dKVY,1368 +torch/include/ATen/ops/gt_meta.h,sha256=7T6sBf0piuqQ8_sEPJ30uxPaR2SA-q9HFgTsbu1ZqHA,751 +torch/include/ATen/ops/gt_meta_dispatch.h,sha256=1PUK0yKZPj9DMvmkqw0uyxD2HqpSBrnVh2r-4Joyw-4,1368 +torch/include/ATen/ops/gt_native.h,sha256=s0um8U1mQm45wiWPkOoRftpklHuJsK9bwOMuRa4TmgI,1295 +torch/include/ATen/ops/gt_ops.h,sha256=OLFyTV7TwK5E1ThATW9B8JfHibINNwRbLfRCc7k-8Oc,4376 +torch/include/ATen/ops/hamming_window.h,sha256=zFe4NglsgWmsqlxbwAVAOLlJvusnm8T3bgqyAiVDXs0,7015 +torch/include/ATen/ops/hamming_window_compositeexplicitautograd_dispatch.h,sha256=ngk_T-tHVj7GMt7soxv6uoNPzlBHN67IIwi0Wc4JTq8,2910 +torch/include/ATen/ops/hamming_window_native.h,sha256=dGYNZZYZKp1ElaV2bkEhZ9RcqWLyN0BTYPPrhH91NGQ,1842 +torch/include/ATen/ops/hamming_window_ops.h,sha256=didNbi08UiyLBCl0mK5aWoCChyftBIAEptJd2eGeZOs,8017 +torch/include/ATen/ops/hann_window.h,sha256=oYcr_p-wrHYvLUS0MWDfaCCzfO5V-bZfntjRZiG-LVM,3294 +torch/include/ATen/ops/hann_window_compositeexplicitautograd_dispatch.h,sha256=JxgJgwxa1OSAoBuGrQv7-0xKpRAJnuELHP1ZkjlHmaI,1674 +torch/include/ATen/ops/hann_window_native.h,sha256=oWQpxJEu5Scu5IE1c2JvHsT7z2HO5vdaNNnugM6i_mQ,1051 +torch/include/ATen/ops/hann_window_ops.h,sha256=alK6EZkqMI_hHargFuyiKo-89cO4X-Ekl6ylhHksfvM,3917 +torch/include/ATen/ops/hardshrink.h,sha256=0KESwWuRTqNdtIkWFMQ8fcyAS70z0ykluxhB1hZNE_Q,1205 +torch/include/ATen/ops/hardshrink_backward.h,sha256=DcSjr59j3OAU8n6THuOSK5Ae41jDqFjf1tFQBuR_V6U,1513 +torch/include/ATen/ops/hardshrink_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=T97Gh9wvV3lqj-8FMqWKimFZCd1BDLJhaIGjh_oWe-E,858 +torch/include/ATen/ops/hardshrink_backward_cpu_dispatch.h,sha256=R6_yNVq93aeFWmIJQwtUvpO7ZWcNeFPiRQvJb3eZ0yM,1095 +torch/include/ATen/ops/hardshrink_backward_cuda_dispatch.h,sha256=FIxyjseyeGX2SM1DHrf3X1B0fniRAlp1C2-kyZh_vvw,1097 +torch/include/ATen/ops/hardshrink_backward_meta.h,sha256=ROJ5XAEYrMe6-FVL5f8nj_CrjseaLnb26IVdxWl4o_A,643 +torch/include/ATen/ops/hardshrink_backward_meta_dispatch.h,sha256=N4bTge3dwG34fwY83eeqGt-tC70WprBB5R25iPgkMB4,1097 +torch/include/ATen/ops/hardshrink_backward_native.h,sha256=8-8NK5HcPNqYdPfkYfQeZIeKLHjSB2HievfjY4vuXDI,697 +torch/include/ATen/ops/hardshrink_backward_ops.h,sha256=n3HibVd1chiWVLvnD_fnA9NbJ5x6YY4qhvFgkIze0K4,2072 +torch/include/ATen/ops/hardshrink_compositeexplicitautogradnonfunctional_dispatch.h,sha256=BEljD7K-wbeB9zh-lTiXgJoHXH4E68zTp2V9R1hGbC0,824 +torch/include/ATen/ops/hardshrink_cpu_dispatch.h,sha256=5br9nPVtUGtCf2nHZ0HOmQDi1Q2D9ZBhbhadwBIayas,975 +torch/include/ATen/ops/hardshrink_cuda_dispatch.h,sha256=tSLTM6Ml5Sl89xFsFfxdEqr62V8QIc85fFqEj7AFIDE,977 +torch/include/ATen/ops/hardshrink_meta.h,sha256=_6SL6Y6s6dtFrfGEmFqt-tH0MDWzixcooi2gLbA6Klc,605 +torch/include/ATen/ops/hardshrink_meta_dispatch.h,sha256=rzvVtNptBs_NW14dR1Y2efGfTJGzMzanxGYHSM25--s,977 +torch/include/ATen/ops/hardshrink_native.h,sha256=4RBWx3oG1W0nPFOXUDJVOFD3d6EufS0e6RD15gJV5mQ,634 +torch/include/ATen/ops/hardshrink_ops.h,sha256=re3NgmdlL7PmiOlhySkwbIrggNLOqM9NPn3njOXD64s,1794 +torch/include/ATen/ops/hardsigmoid.h,sha256=tyFfHjpjil7HUBcqPn8rXJzmsDg3apf2ZJE5JDtV2mE,1211 +torch/include/ATen/ops/hardsigmoid_backward.h,sha256=BSt85uWTVFU9hnYjtayL_0isRCoXnuymrONMfJs3jfE,1409 +torch/include/ATen/ops/hardsigmoid_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2mr7MsdmJNhyb9nJ09L7eGAk2AHhQUP2FvUUF0z_LMM,836 +torch/include/ATen/ops/hardsigmoid_backward_cpu_dispatch.h,sha256=f-SY4XsJM2VoT_mdgI-0Y6Lv5Qtxlyah1N61MT0DjZE,1029 +torch/include/ATen/ops/hardsigmoid_backward_cuda_dispatch.h,sha256=V-FVIqG8zbPeVBh4CbZlQQymph4aWIfXTm-Vq0Nx_GQ,1031 +torch/include/ATen/ops/hardsigmoid_backward_meta.h,sha256=qGPm4BRUGHrxqevIyyfaYBNxgYEmmdQIA4VQ2QI3yeo,621 +torch/include/ATen/ops/hardsigmoid_backward_meta_dispatch.h,sha256=bwhCBuN0XZfiladaNyJqcpMITZEtxEk_jaix3YI9vRU,1031 +torch/include/ATen/ops/hardsigmoid_backward_native.h,sha256=o_rnl3M1iIA6RITxQrGeGUiHepLTWChtL_VHIKvfRZs,677 +torch/include/ATen/ops/hardsigmoid_backward_ops.h,sha256=_ovJBRMzNPbfhKk18r3dfn1xeO1Tv09sbs8MgDnB0no,1924 +torch/include/ATen/ops/hardsigmoid_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xhn5O2eYc0MdXm5NeL1kFCAzwmdAuZFSog28gALPnks,851 +torch/include/ATen/ops/hardsigmoid_cpu_dispatch.h,sha256=q53Nkv8F3mXxfv4NiRwzv2UbcI3w4zvBD5vIFSjcFtI,948 +torch/include/ATen/ops/hardsigmoid_cuda_dispatch.h,sha256=NxtvQvKoB4EGWbZChNDnwU49Wk6UZBqTjb-h7ZDC17Q,950 +torch/include/ATen/ops/hardsigmoid_meta.h,sha256=xQHQVsyFR37OLoLBBXBVyQgJC6cKerkHn6rk_URkRiQ,580 +torch/include/ATen/ops/hardsigmoid_meta_dispatch.h,sha256=hH-nL7cRx3mPwN60hn3aPB5HjzCz5Ttn3OfhodRCeEI,950 +torch/include/ATen/ops/hardsigmoid_native.h,sha256=vOOK2_O2ifEIakKltSRwOd_25i7sk6_YOKr9sDoIqRs,781 +torch/include/ATen/ops/hardsigmoid_ops.h,sha256=Q7Lcqp8k1e39Tx2nNrY0lr3luTi73ckXDBXLXYkXxik,2167 +torch/include/ATen/ops/hardswish.h,sha256=Ld7Jlh8g-_MK41bEkNYOdPB4m3qd4x_IhfOnRTYDf1o,1185 +torch/include/ATen/ops/hardswish_backward.h,sha256=iITip6k0oWkgG3Q1CbLTZg1nLHk6SZx9hyURMojOg80,1319 +torch/include/ATen/ops/hardswish_backward_compositeexplicitautograd_dispatch.h,sha256=rTgzA7AqtQGOeR4PK7ZDMgfqW8SDY5gY53CVc3yjoY0,955 +torch/include/ATen/ops/hardswish_backward_cpu_dispatch.h,sha256=HMty385lv2s0mC6Qj-c4UJAatCRzLrPOKwSV3JjjN2Y,764 +torch/include/ATen/ops/hardswish_backward_cuda_dispatch.h,sha256=PyhZciayHwqkWVRVM-51kiDfHh953Y9wBjCRckH1S_k,766 +torch/include/ATen/ops/hardswish_backward_native.h,sha256=K7WSBh6pxSCsQtAEtfWE_-UOrZ_luvjCmfipJ_Cmruc,642 +torch/include/ATen/ops/hardswish_backward_ops.h,sha256=x4LL9nUk4zn-3vFUJ35kp8xG7yIJRzC8Lpr9ENXlR_c,1870 +torch/include/ATen/ops/hardswish_cpu_dispatch.h,sha256=kBXk20TCrpj5cd2PEP3wqVuQp_fzOZKFo5l5KxtyjSU,940 +torch/include/ATen/ops/hardswish_cuda_dispatch.h,sha256=jhpVRHRjJIK75jVRIdaMuAe40RtUFSC9FJjJHZdca1w,942 +torch/include/ATen/ops/hardswish_meta_dispatch.h,sha256=u0i5X_UvWqpMAbS2rrq8pQ1ZL7nAbaGZc6ZNqpK4eEs,722 +torch/include/ATen/ops/hardswish_native.h,sha256=TsIMuR00g3mAIW2kA-8qX1ZSn6gG5prEUj4eZOVyDIo,614 +torch/include/ATen/ops/hardswish_ops.h,sha256=xX0OSIUyE_WcPaWylVAt_gFoGWuIlisT3Fmux5saIw4,2149 +torch/include/ATen/ops/hardtanh.h,sha256=YvcsH3VOHzz6tR2gylfGCJU-RAAxn7SOc6VxKB_8vV0,1631 +torch/include/ATen/ops/hardtanh_backward.h,sha256=-nAS5DEqQSl67j3cgw8Di0hjZt4n0rO1PRqvi79wvIQ,1697 +torch/include/ATen/ops/hardtanh_backward_cpu_dispatch.h,sha256=55Pc5PGjsQV609SFGDkUWnRUI5IdZdxWz-imeAfqqQo,1188 +torch/include/ATen/ops/hardtanh_backward_cuda_dispatch.h,sha256=cF7u7AY9huzUCIkXagwnALAjDoDGRYQDd0lulmCa1AQ,1190 +torch/include/ATen/ops/hardtanh_backward_native.h,sha256=jMZyo1ikBqnAH9zVRbKYKQxEW2iLyaAIWDg93jFdtRs,759 +torch/include/ATen/ops/hardtanh_backward_ops.h,sha256=NYpcsVaOMf68JeE0LQC8HdGU8yB756FAj1ZtwWhy5dM,2274 +torch/include/ATen/ops/hardtanh_cpu_dispatch.h,sha256=8W-va7CImYRRCHdu0YuvCWFmw1grgFK-KXl_x9BGCtE,1175 +torch/include/ATen/ops/hardtanh_cuda_dispatch.h,sha256=XF5uz9yI7n4I-ynsUPla4agbeJmbbrnqazzLO8PtR3o,1177 +torch/include/ATen/ops/hardtanh_meta_dispatch.h,sha256=k8nJkKne2QxLnQ8MQPKQYC6av_dpoCGzEtzx4331vvA,782 +torch/include/ATen/ops/hardtanh_native.h,sha256=J7oGzMFuyZrNuf3bNQ3oT6mpxR-W5ysY1Lt9TAjG_Fk,1198 +torch/include/ATen/ops/hardtanh_ops.h,sha256=zwr6I51K7So9OsNqp3PRGeFq9qBeCqqbNK84ksIYOiQ,2707 +torch/include/ATen/ops/heaviside.h,sha256=UvzaoSNU-4tsdkzWbg7djvT1Xy8dX_KfXWkh911IrGQ,1184 +torch/include/ATen/ops/heaviside_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rY-l1c0Jc7NU52e_G7kjORTH1MJviPxztOjWnHlp6J8,901 +torch/include/ATen/ops/heaviside_cpu_dispatch.h,sha256=5E2gw7PJtq3v7vSLkmeWtg0JsWNG0oxrhlbAMaFSKkg,1048 +torch/include/ATen/ops/heaviside_cuda_dispatch.h,sha256=vTspqK5PZ2b0wB2ehC_ykhevPvzkZBqO6bM3mEQWmws,1050 +torch/include/ATen/ops/heaviside_meta.h,sha256=ImPjLxfK4uk86YTLpnie010C5o-MT5x0OD2l0_TcvMA,605 +torch/include/ATen/ops/heaviside_meta_dispatch.h,sha256=VzCDZYncqqHuScrY6tURCAgJMHwBKXnLW-fgjakdeWY,1050 +torch/include/ATen/ops/heaviside_native.h,sha256=FSNm1fZxeP-aDrr8pQHnX0FWhWdtBz0Uia_uHlRA31g,632 +torch/include/ATen/ops/heaviside_ops.h,sha256=w9NuGJLGaXv39W8D9C9RRsAkhbDxRrtLESfmWXGNp48,2416 +torch/include/ATen/ops/hinge_embedding_loss.h,sha256=D0jwK_AkyU73SceD0bMXq1sOr2AlGSXi_IJfVvLA6go,835 +torch/include/ATen/ops/hinge_embedding_loss_compositeimplicitautograd_dispatch.h,sha256=4OceJCjlx2p7D1zTgc_DqrX6lU1epB_nwcQogI_bVAM,863 +torch/include/ATen/ops/hinge_embedding_loss_native.h,sha256=tB2cdpQxqLxJrUC6zNr95EtgZTa1fqnRrbkMHSsLkLk,575 +torch/include/ATen/ops/hinge_embedding_loss_ops.h,sha256=tDiCV1-rvy9wAsxUxtmnhO0whDjHeipb4-PFLfPkPLM,1221 +torch/include/ATen/ops/histc.h,sha256=oTl0pHcyzWEFlCFrYRfcGabfRMzkDvF3FRe3nME9WJ8,1370 +torch/include/ATen/ops/histc_cpu_dispatch.h,sha256=C0qiQXah9GCe-_OjxI_I4qnHg7dwPTIiQ-f4n5f0sws,1076 +torch/include/ATen/ops/histc_cuda_dispatch.h,sha256=J8Sh6wmvB60gW73XplingglyU2cNEbAGXvQ1EnNEgiU,1078 +torch/include/ATen/ops/histc_native.h,sha256=pMWmOaHoNk3tV9hlXIfgTuZqPgkwKKgUSHZaRlNBfP4,978 +torch/include/ATen/ops/histc_ops.h,sha256=WM2ZHpRou6rquRVaHc0fVQpfAstuCg7Y4wshgYFXLfE,2014 +torch/include/ATen/ops/histogram.h,sha256=Ex9jdO2Wjxs5rtV-CFn71quFXZ13nkc0fe-PVp4udns,3493 +torch/include/ATen/ops/histogram_cpu_dispatch.h,sha256=DLxRcRdtLdIhKQNzU2hPV1OAgPqiGRbSmZ_dTRQH-wY,2038 +torch/include/ATen/ops/histogram_native.h,sha256=-lCv_5gIw_HimtR9gJaEGP9_Xq-3xD6gLpjJo-N6Y-o,1289 +torch/include/ATen/ops/histogram_ops.h,sha256=9Y8s7_Y8yYDTGT3bwvzZ6-dpa9hVwCQNhxXDs61Smx8,4862 +torch/include/ATen/ops/histogramdd.h,sha256=AsAukX6vHqauGBVDyL2ypEd4bFY3Urwr1NEC3SepBpw,1936 +torch/include/ATen/ops/histogramdd_compositeimplicitautograd_dispatch.h,sha256=ZbbAGMbD-m0oMUU_j3fdBXdMwqbsPYfLagfK5KdvDeo,1445 +torch/include/ATen/ops/histogramdd_native.h,sha256=Z9UBTu-xmOhZGjqiIeVOVTL0A59LAsHGhLnlmbOhyYM,1157 +torch/include/ATen/ops/histogramdd_ops.h,sha256=VtFTDvDhcfb4i5G0G1GxVdX1IFlt6VvlaD9QsrLtcSk,3835 +torch/include/ATen/ops/hsplit.h,sha256=ciyYzCNXy8NIuT0SAfw4fmev3JhxOEPn6hmUSMl-RW4,916 +torch/include/ATen/ops/hsplit_compositeimplicitautograd_dispatch.h,sha256=VGu6nNlhjaBO0amvWPtO6gVWRbPoVnDUsA_C5U-BXGI,891 +torch/include/ATen/ops/hsplit_native.h,sha256=xux8e3DfO8BI-gKCpDwYpyq-itY9rxs9VpI1U9F5wDU,603 +torch/include/ATen/ops/hsplit_ops.h,sha256=9zOHA_V6PbTjyq3y72I4axN1onGHnj9RqBvlOP2FzoI,1785 +torch/include/ATen/ops/hspmm.h,sha256=OpChNY1G99q_zsEjHxgQnuf3d0MYT-KN7m9zt8dQnrg,1126 +torch/include/ATen/ops/hspmm_native.h,sha256=EjxYyLGk1-fejkb-3Tmv_bDct-RcroLE6N8yD3ZrCtY,828 +torch/include/ATen/ops/hspmm_ops.h,sha256=9z96VsjXVVeCFDtrcenbgdspZ83G5gAVj_LzMdtvbmE,1750 +torch/include/ATen/ops/hstack.h,sha256=sZkWVR6pBnticTcWCLRO9JIT1wWBzlbo8yp7BKgtiss,1025 +torch/include/ATen/ops/hstack_compositeimplicitautograd_dispatch.h,sha256=Zn5t1UPVI6QLC_chOuedSWl1gKC5V8PpacMcdmMvG4k,918 +torch/include/ATen/ops/hstack_native.h,sha256=cEJaEEYbs1Vuv5qMooozt9WFj9TP1yqh0VDvU1G0aSw,552 +torch/include/ATen/ops/hstack_ops.h,sha256=wSgDh8XOCLWaIAB-ql9KnCXbub5hcbyTGyQQRh22ALg,1588 +torch/include/ATen/ops/huber_loss.h,sha256=kH73OnUMtnpdP-zdu28QGG44GG9PVh9BCJntwve7y0A,1506 +torch/include/ATen/ops/huber_loss_backward.h,sha256=AE1apMoXFXwnbxQV7PKequUX1G0MvMTKs6IlMnvNjnk,1758 +torch/include/ATen/ops/huber_loss_backward_compositeexplicitautograd_dispatch.h,sha256=oWwfQloaCaNs6W0cTMNX28N8m7s0paHjgD1xj-O45hg,869 +torch/include/ATen/ops/huber_loss_backward_cpu_dispatch.h,sha256=TUPK4NLqHjEZjCwAnV4RIqgmrQh6Dd_KpeYiv75xsDI,1047 +torch/include/ATen/ops/huber_loss_backward_cuda_dispatch.h,sha256=h0wjUErNkhjGs1lOtM8ZTlB4EQaWLXjOTVTutbS8vbs,1049 +torch/include/ATen/ops/huber_loss_backward_native.h,sha256=jbeNQ6qeJscdeZjBsKpv9Mdl0dtdQ6GTJrriPuYNYoI,771 +torch/include/ATen/ops/huber_loss_backward_ops.h,sha256=eRHzUZIa-uxRx5tyR9yCGWCoYgBYzAzsOhZJm3ukOog,2297 +torch/include/ATen/ops/huber_loss_cpu_dispatch.h,sha256=dD7sZyQq99M_2dm3ierMP0aEgJci-9KjsWlXJLxBhh4,1117 +torch/include/ATen/ops/huber_loss_cuda_dispatch.h,sha256=zSvgocqIkQGHSS_gTNWi5VOLpcdP3BLQPV69OQ8nQD4,1119 +torch/include/ATen/ops/huber_loss_native.h,sha256=2PhVIApFl2y0YfCblVyPIVFVR9NoFbs9W-1rYhzL4TA,706 +torch/include/ATen/ops/huber_loss_ops.h,sha256=1rTsrW6m1-Fb_0G3gDYV-Enm0HtZ2jGxUTROUHLBokc,2032 +torch/include/ATen/ops/hypot.h,sha256=Kc1V2ZRdwZ4vazXrLXmfUw5WZoY1AckLadlHH0x6e4k,1135 +torch/include/ATen/ops/hypot_compositeexplicitautogradnonfunctional_dispatch.h,sha256=4mC-9q67le8qKcTvaUL5G0gqCimjGu6tcAR1tet5NoY,891 +torch/include/ATen/ops/hypot_cpu_dispatch.h,sha256=D-Oigt-GgAUHAFbSGynUZBAuWcGbJTDuIcOsMMkCH1o,1028 +torch/include/ATen/ops/hypot_cuda_dispatch.h,sha256=rp74Dn1pEKpjHS9GKzfg4vfIMKZ5h2SzkLG2nD1Q3BQ,1030 +torch/include/ATen/ops/hypot_meta.h,sha256=RmNd9h-TT6VYW0A60549mDX2rkvZRHbVsLsj4kVrfn4,600 +torch/include/ATen/ops/hypot_meta_dispatch.h,sha256=a3w2-mTk6qmemOtv4oLln2txtIRa0vM928tLSmdPySQ,1030 +torch/include/ATen/ops/hypot_native.h,sha256=0auVyge7hcVhHCzgqWRr54D-nQFItiRUUj8PWF_3FdE,619 +torch/include/ATen/ops/hypot_ops.h,sha256=ZXHdP2reacVZgN6lqYsxiU-ejwLLmh_mUNX22OLw8HY,2371 +torch/include/ATen/ops/i0.h,sha256=kO5-n2aMTlFirBo4AfhI4IJHu3e-3OymbFbiSXgkvdw,1094 +torch/include/ATen/ops/i0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=TQVvSjHcOOwAB94DWCS8hAYR2Kp41H_y4-U1g1ADRjA,833 +torch/include/ATen/ops/i0_cpu_dispatch.h,sha256=6d_rlXT__vLzsO6Au-vcwQ0l7Wwrc8ce34VvENBsPKk,912 +torch/include/ATen/ops/i0_cuda_dispatch.h,sha256=KYkT6YopLjQE5A7YALQxaPV6LhpXvsg9QKVYHUahJU8,914 +torch/include/ATen/ops/i0_meta.h,sha256=E2lzq6KbAOVjGaOQoRycgyIF97sbVFH8MQeuhaGSxOw,571 +torch/include/ATen/ops/i0_meta_dispatch.h,sha256=-99oCMwLHT1cQ90-qkJP6wGMakMhrwaLrlhlttJN1dw,914 +torch/include/ATen/ops/i0_native.h,sha256=el_kVLuJua9SmuRXWmsTeVf0_hfO3lH8dzaU8iAqfwE,584 +torch/include/ATen/ops/i0_ops.h,sha256=ICJNo1uFbXm-1LVvruLsV6pK4J1L_rnWb3nW22vdccE,2086 +torch/include/ATen/ops/igamma.h,sha256=39U_eeXkcKJUkj_VtLjufysVTBAVh_iZitqvMwbLgkM,1145 +torch/include/ATen/ops/igamma_compositeexplicitautogradnonfunctional_dispatch.h,sha256=RXQ2KFbVDx_ojy1P6X5UJe23uev5w89sYC7Co1s4Cmk,893 +torch/include/ATen/ops/igamma_cpu_dispatch.h,sha256=Fb5tOPCCrdyNgaD1UEbVKTFfKz04oFA1LQPI00gQNj4,1032 +torch/include/ATen/ops/igamma_cuda_dispatch.h,sha256=jLdlickXI6czL8mPlP6703L8rswUST7f8ijnM-MfcDM,1034 +torch/include/ATen/ops/igamma_meta.h,sha256=bCI-6IETydmY-Zoy74Kjaufe6-Rd6phF88u5JgDE5rA,601 +torch/include/ATen/ops/igamma_meta_dispatch.h,sha256=6J4N1rvt0ej8pgVYhR6toFZ0xZzw6T9rPN1YxxLeMpo,1034 +torch/include/ATen/ops/igamma_native.h,sha256=NLzzE2MBquNC2d_LrQH-QLXSZTn8UHy8l5kDJpJP_Mc,622 +torch/include/ATen/ops/igamma_ops.h,sha256=Cp_tw9o4drAq0PBvfuGzVF7r3fljdOn-pgvwZYs0L6o,2380 +torch/include/ATen/ops/igammac.h,sha256=UBs7QDe9c3LfqVxTSeZVRV6mrz09LLLX8Yp3FJWemMA,1155 +torch/include/ATen/ops/igammac_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rBlRimdNlBbYDlff1U8EpRMpzxNpxqpRW4F7Emy-I0k,895 +torch/include/ATen/ops/igammac_cpu_dispatch.h,sha256=ntIbXmvldHTviroBpPKuymzsrKL4Xml71CRXEuY1Ehw,1036 +torch/include/ATen/ops/igammac_cuda_dispatch.h,sha256=j92eayh8eWXGNxH_ETekv00BrtA3hKP2ODLhC2Hglvo,1038 +torch/include/ATen/ops/igammac_meta.h,sha256=v5lPbuxOJIcX1OgCHbsbrRluu9-pXn86BeNCXzpyPTE,602 +torch/include/ATen/ops/igammac_meta_dispatch.h,sha256=rE8v-jskpM9Mwjaxwbl7sWZjMZnxdBUv-AptmJlwU9M,1038 +torch/include/ATen/ops/igammac_native.h,sha256=L_FsPR237g6bso6bT_uUDBOvQGI4IVWLGCkFWIwbXVk,625 +torch/include/ATen/ops/igammac_ops.h,sha256=spGsIBBWFELZzx_lsi3Jhrm0ZI0QWZ2o1xrOpXapxCY,2389 +torch/include/ATen/ops/im2col.h,sha256=cNstsq6ug8DrRKB24QSdD5psACJEGkSifKayzF6EoGI,1640 +torch/include/ATen/ops/im2col_cpu_dispatch.h,sha256=_jvIGHMby1azb2qp6vPKYQpvoWHBe9TwqWy_a0IKDVs,1189 +torch/include/ATen/ops/im2col_cuda_dispatch.h,sha256=hkQedhLXBQWIE7xSjgA6OqvEsg17-6HALcK1kMCPmws,1191 +torch/include/ATen/ops/im2col_native.h,sha256=TXHwTDpTDihYKpxEhc8euvBzZLh01bxwAKE_7UmZ3H8,1120 +torch/include/ATen/ops/im2col_ops.h,sha256=WTwpjFdGVc3XBfrJMh5SbclhsB_L7Wf0xEh7Kd8h3Zg,2278 +torch/include/ATen/ops/imag.h,sha256=rq_52gfUZPoxwYQe-o9VLPX--IipecmZ5HuvM45CWU4,612 +torch/include/ATen/ops/imag_compositeimplicitautograd_dispatch.h,sha256=k6fCdg6zHmwu0bceT5Z6Ojtpvc1Z4JEivk1aSSLa7Fg,762 +torch/include/ATen/ops/imag_native.h,sha256=1wDaJMe_PNiOMrzKyr_eHwSzdOHRpSgJ1sBrF_7QLQE,474 +torch/include/ATen/ops/imag_ops.h,sha256=3Ud3uPJoIpoR5kfAUtVz2Z_S2fvkEa8RaF_uju5hojc,967 +torch/include/ATen/ops/index.h,sha256=1vMRCRcGiYRARvQTmehMpfKYHUqmOwR29d-Lg0Ob_I0,1288 +torch/include/ATen/ops/index_add.h,sha256=Qi9HmxFROs64M5Hy6ddoA1QynMlZhOMyU_3jENlzUbQ,1906 +torch/include/ATen/ops/index_add_compositeexplicitautogradnonfunctional_dispatch.h,sha256=jhkdb2fsCe29JTQ5J4K--HmYuspCmSa2JbZci2xBbxg,1035 +torch/include/ATen/ops/index_add_compositeimplicitautograd_dispatch.h,sha256=nIxcakQ_ZpfTNGdZS1L9dCgNGn6HVznqPm-eaCgBKk4,865 +torch/include/ATen/ops/index_add_cpu_dispatch.h,sha256=6Zm_b36B8uOa8RsK9oXnaz-jhAHU5gifS5XyEY66piU,1314 +torch/include/ATen/ops/index_add_cuda_dispatch.h,sha256=n7CoAI4atRcUGe0SydjiDDZGdiOT4tUyqVNkIdj9unE,1316 +torch/include/ATen/ops/index_add_meta.h,sha256=kOsc-OSXEkiOTyQehNitRuUcNSxz4h-gTV1r1AS9FeI,1144 +torch/include/ATen/ops/index_add_meta_dispatch.h,sha256=t5CO3gH4JaqR1iQSTHNvy88tA-r2yP6D_LvyaK-wNIM,1316 +torch/include/ATen/ops/index_add_native.h,sha256=lcraRQKCC52bi5FAtOXHkk24PKGAt8gW2881-6Jt25Q,1100 +torch/include/ATen/ops/index_add_ops.h,sha256=CfW0biWusdOtMCMD8KF_gRmkcu05AS7CWJBJ5KeovXw,3967 +torch/include/ATen/ops/index_compositeexplicitautogradnonfunctional_dispatch.h,sha256=mQjGM6Q3urjQ-RrwwcwNLHbNICvz4KmxM_jWGgo1rBY,845 +torch/include/ATen/ops/index_copy.h,sha256=LvO4HDqvSaM0ABejiVIYYXSBK9DBxl4mniLyVAZNqQM,1711 +torch/include/ATen/ops/index_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ISW5J3en26MzCOfMFHTZe_N86uljfTObV34QLSBnVqo,981 +torch/include/ATen/ops/index_copy_compositeimplicitautograd_dispatch.h,sha256=qjT-iZD3PVfjtbgtGiYU_k-jkH8vsLT0Kp1sw1be1Uk,963 +torch/include/ATen/ops/index_copy_cpu_dispatch.h,sha256=CTfro6ukWFyG7DDYZFC2uA3pMzxr5A_asVDws8Y5-Bs,1208 +torch/include/ATen/ops/index_copy_cuda_dispatch.h,sha256=IigJtGqwuYS8OiEzXKcWQa-MjtqX8z7pm_jXGH6_RlM,1210 +torch/include/ATen/ops/index_copy_meta.h,sha256=1f0agi6DBG2WMnyH7uR5kfWX2dAfXz6wsgeC5RwkwkA,1119 +torch/include/ATen/ops/index_copy_meta_dispatch.h,sha256=YGC5re08ZphuUm9unU6INs8CPExCTxRf8ADh8OLBMwo,1210 +torch/include/ATen/ops/index_copy_native.h,sha256=AqeGdOQksBgNR46txS5NJ5UTm-t_mzKenX_m0wQ1t90,927 +torch/include/ATen/ops/index_copy_ops.h,sha256=xnlIDBkwgSfaIrYYUSrLs48RpaZHHde9lTjuQmstxhA,4420 +torch/include/ATen/ops/index_cpu_dispatch.h,sha256=78FfP3VEGw9uTMoUejBcGTHWbICtoaPuGnRKJUgWVxw,1042 +torch/include/ATen/ops/index_cuda_dispatch.h,sha256=iH4qfSFzG75HnZBkvQyOKDla39oiYZjL_nHIjtsbfSQ,1044 +torch/include/ATen/ops/index_fill.h,sha256=YyYOd-Ea_flhVj0Bx2YWUPO3rADb1gi9sZ76TJ69E24,3075 +torch/include/ATen/ops/index_fill_compositeexplicitautograd_dispatch.h,sha256=NblXRgpfk8q9MDoExCKsyZqDFE2AnHDq8sIDuldcOvo,1546 +torch/include/ATen/ops/index_fill_compositeimplicitautograd_dispatch.h,sha256=Sn3-N4_ga-20Tz3pLU2xtbQ1Qwr4AabBL_YTp4XqWmk,1212 +torch/include/ATen/ops/index_fill_cpu_dispatch.h,sha256=h5A-lEM80B5lglKP_IZeGcdBvk_IcN0-I06A3ea6aaI,906 +torch/include/ATen/ops/index_fill_cuda_dispatch.h,sha256=jGuEpQzxFEbpk2b_U0L2Z0OSxx6W5iJ490S-gWbK12w,908 +torch/include/ATen/ops/index_fill_meta_dispatch.h,sha256=EZVJfy7fgCF9nUsh4e6rzOx6yfDQ9vX_aNnkwInYTOw,908 +torch/include/ATen/ops/index_fill_native.h,sha256=_z5gD1ZTnbdYMYTFdgZfAYb2gbqtFNbQbxDIxlQlgSY,1726 +torch/include/ATen/ops/index_fill_ops.h,sha256=yByjn0cRk3xp9vitTfGmD2VbGGzJ83zqZcXiW5zZyOQ,8654 +torch/include/ATen/ops/index_meta.h,sha256=M4QMP3ixaZgMkO55x8L23dZzQRdvjKydWlo4m-_nglI,1526 +torch/include/ATen/ops/index_meta_dispatch.h,sha256=P-8TWTTmftdxVFMe0pJuHpHuncixF-jZs1tk3xljEes,1044 +torch/include/ATen/ops/index_native.h,sha256=jFxdrBxSZU-sVntjfO43uSHTMTuLQU5ViTWrQ5ina2E,763 +torch/include/ATen/ops/index_ops.h,sha256=HItb8WTjpEJnTgNAwC-tYEs9Ujh2ws4BppgHSrXJxvQ,1983 +torch/include/ATen/ops/index_put.h,sha256=0pm_FPwdjTLVP_jw_LMrUirMTwT933A_9-LmwS9nHuc,1947 +torch/include/ATen/ops/index_put_compositeexplicitautograd_dispatch.h,sha256=tSGeYQR41rQPY64Gpj8m2zDnBjBzXQqnxv1roV42Vew,1402 +torch/include/ATen/ops/index_put_native.h,sha256=o6cVhGU6qNyeDIob2JZskGs_TOj4eQjlLKUlkcxH9R4,926 +torch/include/ATen/ops/index_put_ops.h,sha256=1Rri83jApq10YnXDmgYOeoNKv3wim9HnYS8U-9kXWNo,3142 +torch/include/ATen/ops/index_reduce.h,sha256=_7YO2JIK46O_LHWw39u8nSmlWCsx_oWfeJFCGJ3SAV8,1755 +torch/include/ATen/ops/index_reduce_compositeexplicitautogradnonfunctional_dispatch.h,sha256=R6N_xWY56S2n5BDeYZ4fTsl-Rj418YWb1lzPhnsH2kg,1083 +torch/include/ATen/ops/index_reduce_cpu_dispatch.h,sha256=kRvYVrYZRoeG2YrCNUe00SmO8GcqiAvPuLYl8tMZJcE,1407 +torch/include/ATen/ops/index_reduce_cuda_dispatch.h,sha256=NfFethrIFpZiwBOceL7Ek0PsMGFSXH4o-5fGa4Kvd4w,1409 +torch/include/ATen/ops/index_reduce_meta.h,sha256=BsHjAQSq18NsIYeJ0Jp7G7MiSacX3yQ2Y9xrDM2v6js,1165 +torch/include/ATen/ops/index_reduce_meta_dispatch.h,sha256=Rzw9n8_oqGUMr0r2kJhQeoxk3cg0lL9o1K3lGD6O8wo,1409 +torch/include/ATen/ops/index_reduce_native.h,sha256=BCw2y1qAJQfyevitD7ff_3OG1qyD5S8ebxDg0pwrNbk,996 +torch/include/ATen/ops/index_reduce_ops.h,sha256=gbWY4RLWuvE0rqUnnp7dWYOS3Yg1oOOK_-YXNh8Goys,3283 +torch/include/ATen/ops/index_select.h,sha256=S92iUng3rQ1TVtuTlgn4EqNPeCl45IN5RroJewyFlwM,2159 +torch/include/ATen/ops/index_select_backward.h,sha256=j6DqeG-wMN8idOKolTtGJPo3ochWg_zP3DRldQafRnM,1835 +torch/include/ATen/ops/index_select_backward_compositeimplicitautograd_dispatch.h,sha256=o9hT6T-ge8JSAEMeGj4p5NzH8U4zMpm5oyH7eG1Lhd0,993 +torch/include/ATen/ops/index_select_backward_native.h,sha256=uWLSvxxXrYnWVC5kpC_CvUoGAZjNcXe4aEFVtr8lqzk,569 +torch/include/ATen/ops/index_select_backward_ops.h,sha256=8OpwCOw_gqct9SchZ5R4YD_sUxWARkWfvBkq2J7uFyU,1248 +torch/include/ATen/ops/index_select_compositeimplicitautograd_dispatch.h,sha256=8f4FLKtwe-5DE8GYDoaNPjVtCl6od4KdpapNNtch7Jk,1068 +torch/include/ATen/ops/index_select_cpu_dispatch.h,sha256=1MbvSC-blT9bwOJEXpzUiFT73qsz5N-NBWlJDG1gMDI,1012 +torch/include/ATen/ops/index_select_cuda_dispatch.h,sha256=sCuNKKjpbY8aplDqQ9DyM1b-tdg-eGCjN4iTYyJLWzI,1014 +torch/include/ATen/ops/index_select_native.h,sha256=bCbKpA1fJ9HFeLH4-g4BnJmojXWl_q6u2lyGUaGX-T0,1565 +torch/include/ATen/ops/index_select_ops.h,sha256=eJNuV_CGbj-AwqjrGurpfI6YinkLsMfMcuQdpGQxdic,3417 +torch/include/ATen/ops/indices.h,sha256=IiJyTLPcehA_AXBMv3TQSqU-tiz8gbF6XgPK2HoCV88,481 +torch/include/ATen/ops/indices_compositeexplicitautograd_dispatch.h,sha256=NtptpbmpDMRQhc9OKENcOkIerT-wV-uREe6T_8m-dfI,765 +torch/include/ATen/ops/indices_copy.h,sha256=18aArGBEljo38Sywr5bgwXNuuFCF9O52Fx0G2MhvMd4,1064 +torch/include/ATen/ops/indices_copy_compositeexplicitautograd_dispatch.h,sha256=C1pzb113Xh0GV7J5_wlGtrScbU25Y_pohFE5ynQBWsI,879 +torch/include/ATen/ops/indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=VIAEOpaWRV3GAQSCzjzommNlXXMWShHj9-z7J3okeJM,796 +torch/include/ATen/ops/indices_copy_native.h,sha256=Ncjh26wGgJhZDuZ60R3uSm1eCesvYFE_FsRXb3SWJFM,566 +torch/include/ATen/ops/indices_copy_ops.h,sha256=ANJAk6WdDSGIvLlvmpnvN-halWJks2M4FIicxNFQddE,1626 +torch/include/ATen/ops/indices_native.h,sha256=Z_GcO__Lqajh8Wl69dtIaQxEgOATAb66-vWDEv4SqSw,547 +torch/include/ATen/ops/indices_ops.h,sha256=lJS2_JNDMvfCXJuSN78FBLOORIMGcvUDinX29Ei8azE,976 +torch/include/ATen/ops/infinitely_differentiable_gelu_backward.h,sha256=a8v9wtGPA_cyw2B3E3LawrREz4SSDmwnJfS3Wra54-A,790 +torch/include/ATen/ops/infinitely_differentiable_gelu_backward_compositeimplicitautograd_dispatch.h,sha256=wm77Mxn6ul99VKoUWmCB3rkiqW7zMOv2KJyfSfOKyNc,822 +torch/include/ATen/ops/infinitely_differentiable_gelu_backward_native.h,sha256=M3l_a9br_ZqGjBI5kzdh-ETQxFf114MQce9nSzh92Hw,534 +torch/include/ATen/ops/infinitely_differentiable_gelu_backward_ops.h,sha256=mZbajlTnGhoVc3sSYSdPLHPPa7PhYoA4Amsn1Yd-XJY,1149 +torch/include/ATen/ops/inner.h,sha256=tWAFby8H-gki6CQ3qsd23mKWTsUx0_MTD47mzfWGKs0,1135 +torch/include/ATen/ops/inner_compositeimplicitautograd_dispatch.h,sha256=rGTQcl54_ubMF-wFN4RU1uZoxnBlVVOG1lG0Pl5lgFk,996 +torch/include/ATen/ops/inner_native.h,sha256=VvUrTrl1pAOevmhQy1LTpOSw1PoOAvA4EAR6M2fPV58,604 +torch/include/ATen/ops/inner_ops.h,sha256=ZJxXvVhOttMXPIx4U5b0TKEkOsu3RzDzewact0PVmd4,1756 +torch/include/ATen/ops/instance_norm.h,sha256=HfObZgevMAohlOUbuZibvfvukQoYEpRNy-KfHaDuGNk,1131 +torch/include/ATen/ops/instance_norm_compositeimplicitautograd_dispatch.h,sha256=AX3uUTv2oAtq4DDZUalpehV5KNC4XzjP03cuWiMYD8g,1028 +torch/include/ATen/ops/instance_norm_native.h,sha256=eCSJWZPn-f6UFmMzawfvMm9lzUTvcUhtAc1ljS7HzEM,740 +torch/include/ATen/ops/instance_norm_ops.h,sha256=B1YSPOTifoQBL0gsHpzZxRu1UX77qWxa5N1_GMIZgQQ,1821 +torch/include/ATen/ops/int_repr.h,sha256=HDOuDU8Sa1XECDv7LHd4fc3kmlWu94CNDV2Dv11mFNI,1024 +torch/include/ATen/ops/int_repr_compositeexplicitautograd_dispatch.h,sha256=N3eNdxRpL51j-SjJYR1v-_CqBZm5jKDtxN97k-xRFPs,871 +torch/include/ATen/ops/int_repr_native.h,sha256=ExwN4wXUH1q5nhbNj1_Cxf1ykRXNBQ1hDLaV_vAS-ZE,643 +torch/include/ATen/ops/int_repr_ops.h,sha256=bTs-WKFKly1AyI8zfmmLi-wm77pds4g3UmCXU6uO1iU,1602 +torch/include/ATen/ops/inverse.h,sha256=6EAb0fZ6FMAMzjx2i5sKUm0o69O9kD_mcBdrmpTkM4M,1014 +torch/include/ATen/ops/inverse_compositeimplicitautograd_dispatch.h,sha256=3hZZTApRJteFgetCKmFy3PcEGPweUE4OCTHURXkKLOw,924 +torch/include/ATen/ops/inverse_native.h,sha256=uj-CT2rMzP6OIIGgh9vkmUnYFZ0Sk6l_dl6Yo3kPqSI,556 +torch/include/ATen/ops/inverse_ops.h,sha256=NBMK2F6D1sXeYhN9uX5vn08mmb-NAfY26x04sRgziEM,1596 +torch/include/ATen/ops/is_coalesced.h,sha256=sspvuByXW5td632ShkbilkMOfXpwBXwHBg8l65OiCvk,486 +torch/include/ATen/ops/is_coalesced_compositeexplicitautograd_dispatch.h,sha256=yC3pLDFtapo3PUsZdMjxINh25AJ9Oz0vSb1vAp0A_TM,764 +torch/include/ATen/ops/is_coalesced_native.h,sha256=S_KYU284-mDch-OFftlGekfeBQ9SUoDi1Gf0V75PwWk,545 +torch/include/ATen/ops/is_coalesced_ops.h,sha256=aD-TB4D0oes9jyEeASkl7asj5ud0NWztAdLgc9uuHDs,965 +torch/include/ATen/ops/is_complex.h,sha256=Bws69GjisHPWbF_f2epO41M5d9Qmhf9OVBKZUnwG-os,633 +torch/include/ATen/ops/is_complex_compositeimplicitautograd_dispatch.h,sha256=PM7uB6Z8ulJJUs-Q9Hkl7Ji_kWrQZb_PJr-8YDSieZw,762 +torch/include/ATen/ops/is_complex_native.h,sha256=FgK0ub3CivIpWtMNq6duqQZgwoO8TCEAiHzdEeD1MxQ,474 +torch/include/ATen/ops/is_complex_ops.h,sha256=ujLm7lyRUosIzAAtacPYmvOTM37pa4o1RJHH3kMughQ,959 +torch/include/ATen/ops/is_conj.h,sha256=CK-GwhNqJWo-cANYSGxYbqNN-nX3N_qmK9CGmv-XhDo,621 +torch/include/ATen/ops/is_conj_compositeimplicitautograd_dispatch.h,sha256=6OF4Lw27SN7l5Gm1nWaZLYM32fjNnV1sgzw1ECfHe4s,759 +torch/include/ATen/ops/is_conj_native.h,sha256=4sSsceUPkvme-_Tc0vB0m2_CG9GT-egDnyC2dv_D0wg,471 +torch/include/ATen/ops/is_conj_ops.h,sha256=tKDoZ6nd7mjhmNcLb4gz283MYr6pUB0MurYZGFfsMJM,950 +torch/include/ATen/ops/is_distributed.h,sha256=fCBBqV5APno8oeSEF12FEm_2YVGYsTaCyos-QoIx3UY,638 +torch/include/ATen/ops/is_distributed_compositeimplicitautograd_dispatch.h,sha256=OPotyIqyIDnxX7pfFj1_Jd0iAajLgscjZNtEkAWNvNA,766 +torch/include/ATen/ops/is_distributed_native.h,sha256=qcLBeN_58hG0gi8iHqrpW3Gs-MI5kgVsMsKh3gjVAnE,478 +torch/include/ATen/ops/is_distributed_ops.h,sha256=cwKWAUNHt5Yh3dUEbFa8U9u4QPemwUzc97-7BuohCr0,971 +torch/include/ATen/ops/is_floating_point.h,sha256=G66pd1LRp_A7pKRyr1DViDA9Kz5-bSYS53b2Itk1oBI,661 +torch/include/ATen/ops/is_floating_point_compositeimplicitautograd_dispatch.h,sha256=A06RbWpiRJ4J4i5_SmDn8j8mWdPdGioKZSH1QNYj2fA,769 +torch/include/ATen/ops/is_floating_point_native.h,sha256=3uoBelvwlH_8G1GLDBLQ9-Wu2D0AoOejwO1j7kXJfyE,481 +torch/include/ATen/ops/is_floating_point_ops.h,sha256=zXJYb69e6U5kF5UTexO_xdRazgbw9UvWyNeVcxi8_6w,980 +torch/include/ATen/ops/is_inference.h,sha256=c0GD_IEPFqERKOpE8DxJGsNvNLyembXuqQx6ukemhcw,641 +torch/include/ATen/ops/is_inference_compositeimplicitautograd_dispatch.h,sha256=ec1er2INXV2_SN0Toc4IbcmTida63Ef-Z9l20OaopPU,764 +torch/include/ATen/ops/is_inference_native.h,sha256=Hb3BKZ61m4CzjjfzKLYO91AdloGDaJgS355Ahie_aR0,476 +torch/include/ATen/ops/is_inference_ops.h,sha256=7YvphPoFxkffFMcDG6v7b2Q9oxVUYJRVeIiLjf6sLwE,965 +torch/include/ATen/ops/is_leaf.h,sha256=Y_Le4XqGIo2Ul9ESA7a47o4Gk-fPvkcn5WynTQ__K0Q,481 +torch/include/ATen/ops/is_leaf_compositeimplicitautograd_dispatch.h,sha256=8YSwdYkXAlqtNyy6vzoHCZrf9kg3igxbHG8bKoTQOPE,759 +torch/include/ATen/ops/is_leaf_native.h,sha256=keMKE5Ho0rmkJLpF9MdkmhiumTI46031J1ctUnfcYsY,471 +torch/include/ATen/ops/is_leaf_ops.h,sha256=BipxhqWZzNhxup7JKX45vzkDoMJX5lIl1VybDWI1Egg,950 +torch/include/ATen/ops/is_neg.h,sha256=GW3PTKWci25dwcyKXTymUQ6qvi11lnTEdNc06qFZ_4I,617 +torch/include/ATen/ops/is_neg_compositeimplicitautograd_dispatch.h,sha256=e5vmOK96ljDoaD1ZEQPEQ8zxrLyJ-8JTIeMgKSSJI4w,758 +torch/include/ATen/ops/is_neg_native.h,sha256=OfJinkQDDHJ5cpAaWYVX-sZGFBT_9UF6CA1VMqfAiKc,470 +torch/include/ATen/ops/is_neg_ops.h,sha256=vRHRHduzeGHFRhP4doAsY4Dn7lvSC_rYcUYdfRlcLSM,947 +torch/include/ATen/ops/is_nonzero.h,sha256=_vR7mUOH-f9LGLBAg84EHZnMC-tY6EBl0lvBTcA0HMk,622 +torch/include/ATen/ops/is_nonzero_compositeimplicitautograd_dispatch.h,sha256=OQaAY2in-cvFGggANfAfZKV3BDAtaQCHqIyGSbD5vlE,762 +torch/include/ATen/ops/is_nonzero_native.h,sha256=ZXbNssowgL6LZm_1Q8kSMbcRmodh3_MIujIEaEdlowM,474 +torch/include/ATen/ops/is_nonzero_ops.h,sha256=fAQ2qCOyJj-hNSLQFrY0CDLCVxupRoBfRCbuKUPeDd4,959 +torch/include/ATen/ops/is_pinned.h,sha256=fLdI4w8DzJmpGUNGCDd1ogRa9mdEK05EI_Rmyn549UM,483 +torch/include/ATen/ops/is_pinned_compositeexplicitautograd_dispatch.h,sha256=XvPDS86TxmonV3zEt7RgIVOSEJuxTJS9I8jagjfEJ3E,812 +torch/include/ATen/ops/is_pinned_native.h,sha256=MgWMN67Dk_AEsuebDxE0TiHxrpkF2vZWrxEmSFzlufw,757 +torch/include/ATen/ops/is_pinned_ops.h,sha256=e9tRA5hqD_kma3wLIf7OGK9kwnv6PS3pMJPBCv-7MJs,1078 +torch/include/ATen/ops/is_same_size.h,sha256=Bjx-zSrW-4QiLouiqUF__GKHtmbhuiSRt4TspoHqaXQ,677 +torch/include/ATen/ops/is_same_size_compositeexplicitautograd_dispatch.h,sha256=wAeTAaogkdseFll4Dul7AntZUVKfJbPlw09vsWq51A8,790 +torch/include/ATen/ops/is_same_size_native.h,sha256=IDOre-ZdaLgmLxNkvNsJPf282NmCS7u-z_qpW67bTto,589 +torch/include/ATen/ops/is_same_size_ops.h,sha256=s2fL7HmsXz4FGSeOm47ngHotFzoLhGW6s7_G8MvkFhQ,1051 +torch/include/ATen/ops/is_set_to.h,sha256=nxMYSABPhUkWpikazeJGCnE9xq2RsEKcEIzr7gkjp8U,483 +torch/include/ATen/ops/is_set_to_cpu_dispatch.h,sha256=C44peVDc8STgF5TlW3naz32pjtVhxvXWuSwDqg_Yge0,744 +torch/include/ATen/ops/is_set_to_cuda_dispatch.h,sha256=7uE-ZcNeaYWMlZ21ayYc144zs83lk03eFWl0wrh-kCw,746 +torch/include/ATen/ops/is_set_to_native.h,sha256=8L1lG1QLtF4tZ6OSHChd1lclrzmnldlwWMpuuMUXe_s,500 +torch/include/ATen/ops/is_set_to_ops.h,sha256=7lrPVDJna5KMr3bM8NUbbrysfe5dqgf-1P1LFDceEBM,1045 +torch/include/ATen/ops/is_signed.h,sha256=67JWR0HYbtFXOxEmeM57IKm2sXfotmg79IN5wUukvL8,629 +torch/include/ATen/ops/is_signed_compositeimplicitautograd_dispatch.h,sha256=u884jU4_OTpQKNfWKRugrlpuLGpQacdZSKNz7saFGHQ,761 +torch/include/ATen/ops/is_signed_native.h,sha256=9O8Iu7h4jNUnniXUDwgWWxPm-QSdJAZe_zRYUVFkNlg,473 +torch/include/ATen/ops/is_signed_ops.h,sha256=HSULAHgCggTv4luaAD5bZShR_iKDxADMLYLgSgnc4JA,956 +torch/include/ATen/ops/is_vulkan_available.h,sha256=pI05CKxwwgBARSZlyt2VuJxXX5_WVlExw6lWGHtoUCM,620 +torch/include/ATen/ops/is_vulkan_available_compositeimplicitautograd_dispatch.h,sha256=HHFOPI52jZvU2ZZz2YXIVJucZIy6qadOfvDePH-_jgI,748 +torch/include/ATen/ops/is_vulkan_available_native.h,sha256=oHgVZ1IISzBKIF1S_DygV-HdCm-Ay2lXGYrZv11Hxug,460 +torch/include/ATen/ops/is_vulkan_available_ops.h,sha256=pmwy91MGQmC4edHZPQ8LZg8Xm6XxFUkA8Cilal0c3IU,909 +torch/include/ATen/ops/isclose.h,sha256=AZlYYf-CE9mfyPtG2omtrL3SRsO6IiChNu3Wrpvj1QI,806 +torch/include/ATen/ops/isclose_compositeimplicitautograd_dispatch.h,sha256=2sPLXHrcWuqvEkhLXP_wSFANpRyDt6VpH5IVoYDmyFU,851 +torch/include/ATen/ops/isclose_native.h,sha256=KAdmtzsoh3-m5sLMXNvOjqT53Dfc_ZNaM44LNF-mgkU,563 +torch/include/ATen/ops/isclose_ops.h,sha256=VwCkK9U3O1iITrdxaLca5wez0fBmCtWUE72lAAepFEk,1220 +torch/include/ATen/ops/isfinite.h,sha256=jm_5pdUbDMKDWkCTAefEUOcChSCFcs30xz5tUz5d5V0,622 +torch/include/ATen/ops/isfinite_compositeimplicitautograd_dispatch.h,sha256=6gIF-qr537lG4VF_zaUN8eX6t1JmilAHpTwSK2etg_Q,766 +torch/include/ATen/ops/isfinite_native.h,sha256=sBZTyfxXW_CrWd0bv_NbdDuZK5bXsWfYV7Hz6kdOCts,478 +torch/include/ATen/ops/isfinite_ops.h,sha256=bYX8MffWS3zGN1sfD-e7rVSX4OeYPgSuWA2fRlmmqBo,973 +torch/include/ATen/ops/isin.h,sha256=Zqs_yRXDXQ_QsUtJ4InrsnvruxiLEKg4dWPOCQEqs-U,3969 +torch/include/ATen/ops/isin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=DbQSEtmQVYZYcTzdmbToOsTaJkczCnWxrgAOR1Y54kg,1139 +torch/include/ATen/ops/isin_cpu_dispatch.h,sha256=PIGEL0_12S73-xp3zUZjWt87LY15lu5yHfma-r8-E14,1986 +torch/include/ATen/ops/isin_cuda_dispatch.h,sha256=lnkeCo5IIHaxVBka7jb0rG0N96Z7hfYd4Y9na0VPrHA,1988 +torch/include/ATen/ops/isin_meta.h,sha256=-srUjGjvODHWvvBqMu_n_d6oq2106oVHep2abXzsMbo,1058 +torch/include/ATen/ops/isin_meta_dispatch.h,sha256=r9pisiDzRKbOg2rHU-hd5srMZDR_hU4cfq6-RtDn5gc,1988 +torch/include/ATen/ops/isin_native.h,sha256=prJlb44u-Q7Ofnz4EQDi9yjgXnuxqAgrvmkqh874F28,1159 +torch/include/ATen/ops/isin_ops.h,sha256=MSs1WGYIsmyYnYsU9gRfG2gw-DzFLiLwAz_Wph5cH8s,5582 +torch/include/ATen/ops/isinf.h,sha256=gfW3uss2EVl5tzcqWMD7Ntn1-Hw4Am2j4-3KOK5sLpc,994 +torch/include/ATen/ops/isinf_compositeexplicitautograd_dispatch.h,sha256=2vxqvNiqAMknXD2tgc0XI0XPhKho-_3QdmaQ9uz9zDg,918 +torch/include/ATen/ops/isinf_native.h,sha256=V9kSlY13tt6Twf1UCPQ-SlRvVQ4ZMOBh_suFkoBnmiM,741 +torch/include/ATen/ops/isinf_ops.h,sha256=y1mrl42kXgHRiK-gNFYv-Rti4SPHNaIEtA6PTM1qMVU,1584 +torch/include/ATen/ops/isnan.h,sha256=Jclv8BD6q7WPULH61ZgGe2PPo_vSJN3wkX7TnmGZAbE,994 +torch/include/ATen/ops/isnan_compositeexplicitautograd_dispatch.h,sha256=0rerY-imE2tu_5ntwS1WYzpWFc_nfMmllp7BPHauBuE,865 +torch/include/ATen/ops/isnan_cpu_dispatch.h,sha256=RvGBhKcczUpWPCaNlnHs4yLGfy-9uJCZqPB_YMx5pkU,719 +torch/include/ATen/ops/isnan_cuda_dispatch.h,sha256=yeJ84r1Mf9u_6eB7k1NxWC--OHmdv1MGg9_Liiw3jfc,721 +torch/include/ATen/ops/isnan_native.h,sha256=WEJ_J-p8za3Unmvm-oBC8w2tekCcnjTlKjGeRb_L1x8,676 +torch/include/ATen/ops/isnan_ops.h,sha256=Fsfl-GYZBdY1iLEwqY9PbsGu1KW_lvwLciR2O8pqTe8,1584 +torch/include/ATen/ops/isneginf.h,sha256=j07osp3rcWxVZSTGXZVibdC-i4DOeF5h9RpbX35ksoM,1024 +torch/include/ATen/ops/isneginf_compositeexplicitautogradnonfunctional_dispatch.h,sha256=na5G02vfqv_qZ06ZhunIX4FVd2OzbGBEJ8EKgcRYnx4,792 +torch/include/ATen/ops/isneginf_cpu_dispatch.h,sha256=_6-kZGLygWlMHzQNvQtJFcSDe4TsEr7fxpTgoW53y60,883 +torch/include/ATen/ops/isneginf_cuda_dispatch.h,sha256=-kgtLYxzmPSPTNUXJR7bX0iXrO9iSxmhJL-vEDidgzo,885 +torch/include/ATen/ops/isneginf_meta.h,sha256=DEnNtLEBrp0-207S_vBD1eL3hsO-e9UGze89uuV6V7Q,577 +torch/include/ATen/ops/isneginf_meta_dispatch.h,sha256=hApmHsMa8d_yZiCwzPRYeuqB_6XSISXrAAVguQ7_Lcg,885 +torch/include/ATen/ops/isneginf_native.h,sha256=bfM-BNsfEnPzK1soCPGyagvL538YJGJzZW3c_svP0F0,910 +torch/include/ATen/ops/isneginf_ops.h,sha256=1t_cOZEy9aJjuj1DezuqdsEls8sL4l-qQ7AJ2snqGjA,1602 +torch/include/ATen/ops/isposinf.h,sha256=Hq5-wy-7UOyMeEC3drrJlMO3oQrkacPGY3eEsUGr3AQ,1024 +torch/include/ATen/ops/isposinf_compositeexplicitautogradnonfunctional_dispatch.h,sha256=bkhRb_WOB7xTExZM9-UtXhWQ4GxJMVXHphjl119uNBI,792 +torch/include/ATen/ops/isposinf_cpu_dispatch.h,sha256=qwFtIg51JGhSsvkKHaVUSaocwRxmwEmVTlkygP8yfWk,883 +torch/include/ATen/ops/isposinf_cuda_dispatch.h,sha256=dUqldPPiINjCj26On1xOF9Xh-xfK5EekKc0zSFYzmSs,885 +torch/include/ATen/ops/isposinf_meta.h,sha256=lwh8BMuPg1TyukfPxvQ8QD8LKpxS0GUuWXz7ps36Ct4,577 +torch/include/ATen/ops/isposinf_meta_dispatch.h,sha256=AwDhbHfTlqRygBVDMdrZn6dQ3likrfN9GnG27sEYFn0,885 +torch/include/ATen/ops/isposinf_native.h,sha256=Z3hsKxBUk7hFkPiWtuPQEGU7DjP2ll_AvCVCfXFVqOU,910 +torch/include/ATen/ops/isposinf_ops.h,sha256=imsB-x0YEpksrLSzL2ZY02fQCSA1CzWDB1BERIfibrY,1602 +torch/include/ATen/ops/isreal.h,sha256=-L06uUbKlioiS57aGCvwfVztPXf-wDFz5KkUYquBTjU,614 +torch/include/ATen/ops/isreal_compositeimplicitautograd_dispatch.h,sha256=wssOt2_7Jl_wPKpeF8gBgvZigg7_2r8N1YK5V9G5Xyc,764 +torch/include/ATen/ops/isreal_native.h,sha256=UdiOQDEO09MYGCA8lAoWcRcsBD8uVDeZ5QhXVnIQtLU,476 +torch/include/ATen/ops/isreal_ops.h,sha256=xd8nb3gOx5lStloL-l7xyPZmHAPf9VQCDk4UOLMOYHE,967 +torch/include/ATen/ops/istft.h,sha256=i_pAcLSzvL7Y5Wu8MFnbhR0RcXEzI2QIqDbiumfsuNM,1215 +torch/include/ATen/ops/istft_compositeimplicitautograd_dispatch.h,sha256=gSuhyig2fA3hGgNr2KjRz4n5d63lxAQxKQh1nye-2NA,1092 +torch/include/ATen/ops/istft_native.h,sha256=65Lkmz1hJRghdVlIG0pUmegxgDdY3j1w0_tp-dPRglQ,804 +torch/include/ATen/ops/istft_ops.h,sha256=g2EMU8l5DMgKt9is0kgOypqUqqWXxF-0Txn80WyV7R8,1810 +torch/include/ATen/ops/item.h,sha256=RIIWwkqik_yb7R4naJI37Qke1VcepVjLhMA84IbY46E,478 +torch/include/ATen/ops/item_compositeimplicitautograd_dispatch.h,sha256=ZlQsPmxfYxn7eSvY3f1D_S6fYZrRSSveSxKGU6OrXA0,762 +torch/include/ATen/ops/item_native.h,sha256=7j1VOEcaUymj1VrWDkmsdDPLJfmIxyfR6QjoL0ys0dA,474 +torch/include/ATen/ops/item_ops.h,sha256=58ljbr2AU3IXw3QxjTycL2fZr80hPTwC8N9n5n9nL3g,961 +torch/include/ATen/ops/kaiser_window.h,sha256=g4mJPh1CxbiUfaQDlPKY2XLbEhtoneJTzA9zXy1MIQM,4981 +torch/include/ATen/ops/kaiser_window_compositeexplicitautograd_dispatch.h,sha256=QeHNOMuoZ6hVMVzgYYNVpIXVvwVbRah825cHrGUVXGo,2262 +torch/include/ATen/ops/kaiser_window_native.h,sha256=dzn1ZsjZG-lVmV8uMhO73egSI4IPHMDms2FNmBABBbw,1419 +torch/include/ATen/ops/kaiser_window_ops.h,sha256=dhdqt2xdGKdmjiUZ_LT0yyJNux2t8zWWkG-U7iLAH_s,5840 +torch/include/ATen/ops/kl_div.h,sha256=Xq0vZ98KsweER1H82oEpOvyNTt3-QJNmQBO8t2rvGo0,795 +torch/include/ATen/ops/kl_div_compositeimplicitautograd_dispatch.h,sha256=cEFMesvwyJaiJr3hpFfTrd231Z6ZnUXKVb5vbOXWXNU,853 +torch/include/ATen/ops/kl_div_native.h,sha256=NzDYWTSXalneCpgjKPZbe6nS2cXbciCtYHJ5tw5mQmI,565 +torch/include/ATen/ops/kl_div_ops.h,sha256=YhE6RDkgFtmmudDi4R5t52NWSL7XEXl6xUK4hPl1n1U,1189 +torch/include/ATen/ops/kron.h,sha256=YI0XGNbtciKCY06gYCZoSF9XZMOIv5PPrYkoQQI6PAw,1125 +torch/include/ATen/ops/kron_compositeimplicitautograd_dispatch.h,sha256=M4rrYdqvzM-H0qSdsO59O-lkY57No4I6ENSI9Mkm-L4,993 +torch/include/ATen/ops/kron_native.h,sha256=uOOKoiEY6RBb71Mrw7CqRux1bLQ5D3crRKQXOX8cEVs,602 +torch/include/ATen/ops/kron_ops.h,sha256=fTTMYcrNXDfUT27iNi9LU4YlN0BClLg0DNZy7JjQbFw,1750 +torch/include/ATen/ops/kthvalue.h,sha256=uLtKs-OgtAmy7Tmk2VkGGxurpqhZWGFZ-_AhzYgqCbg,2800 +torch/include/ATen/ops/kthvalue_compositeexplicitautograd_dispatch.h,sha256=IMuzI1cKCxbkJ_JSpH1XfTSrU8L55kuen2Knwbg3YhE,838 +torch/include/ATen/ops/kthvalue_compositeimplicitautograd_dispatch.h,sha256=7eSi75LeTWUpcsSWQ0inYryRG1oURls7RcHp-4_4Y8w,1194 +torch/include/ATen/ops/kthvalue_cpu_dispatch.h,sha256=NiFRB46RuO_2NaGAbSZVzzcYxKOPPgR0kJaN_17OTEg,1016 +torch/include/ATen/ops/kthvalue_cuda_dispatch.h,sha256=LxDzYMfSWFkv9FVF4e6y1tn7h4LpJ60a2KMFxiCwUdk,1018 +torch/include/ATen/ops/kthvalue_native.h,sha256=_V6C_gEuxAoOhD3lI0-gpOr2OsgLhESorAYKzjs6VYA,1202 +torch/include/ATen/ops/kthvalue_ops.h,sha256=LABuJlUEtsijE4g6C4SAPEYzFYg9V0TRniXUhz9lyNc,4002 +torch/include/ATen/ops/l1_loss.h,sha256=I7thiAyAS2ljox3L_LWo2oHAwAe55_EtMx13yipCWFo,738 +torch/include/ATen/ops/l1_loss_compositeimplicitautograd_dispatch.h,sha256=DdwctjgHfcBJpq3IbK6ZSJlfoCeId2T4SJoq0F_Fs0U,831 +torch/include/ATen/ops/l1_loss_native.h,sha256=rfKHS3NQxglkbQMO4vsW1sWDdkxYyFc131ozVExI3pI,543 +torch/include/ATen/ops/l1_loss_ops.h,sha256=t0gDnTTg4VP7Emu45tcv_kjjGn7wYak8TDa9te95kWc,1126 +torch/include/ATen/ops/layer_norm.h,sha256=j4NTNFUBAi097Ll5uGc6SbAy5tWu-WTkF-3jc4n7a-A,2364 +torch/include/ATen/ops/layer_norm_compositeimplicitautograd_dispatch.h,sha256=lkM8SgUdPpTiXp73D1WHyPqG2xr4Eh8mcbT_y9zCbLI,1175 +torch/include/ATen/ops/layer_norm_native.h,sha256=2SH5DK7VzizB0iRor-HQobZFbdUAGNR8tVpyopbdPNk,660 +torch/include/ATen/ops/layer_norm_ops.h,sha256=MygUN5ypIX9mdXBgW7fQ9SLYkySHFvBJzhTQneXYMP0,1509 +torch/include/ATen/ops/lcm.h,sha256=m3ULAXf07eqD0bMhxNpk47Lz8cyOEgOzWI7hmUZFUMA,1295 +torch/include/ATen/ops/lcm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=cAGOr3tQDesg_kq6DxPStJVQjiQ_JZ1LnoT2_XLXxt8,887 +torch/include/ATen/ops/lcm_cpu_dispatch.h,sha256=EIqGnY-65IygmauP0_oPS_0Noeo7DtQE-qpapsaV-cw,1020 +torch/include/ATen/ops/lcm_cuda_dispatch.h,sha256=mH-0ksfTKxqtai8mGEwjGto_Z0weRSjShnJilzbFEyU,1022 +torch/include/ATen/ops/lcm_meta.h,sha256=_GQAfJlA7yLhYqCM-OXNrdFLEvaDcaTurlf5G1S6IpY,598 +torch/include/ATen/ops/lcm_meta_dispatch.h,sha256=WZ8--OWURo4XcQEyY_OT0uBGK3s1AKvgYO_aRDcx1Ac,1022 +torch/include/ATen/ops/lcm_native.h,sha256=5xgIhIq-199A0Rrl4v6etOwMKqW7v-8Zq1srD_PlsH8,613 +torch/include/ATen/ops/lcm_ops.h,sha256=6FVe64z4a9eJa4KTJVwgiif4_tkL11X5_t1wpxbE5Z0,2353 +torch/include/ATen/ops/ldexp.h,sha256=vxeDY8pn-iocfLLeMWWNWycNTyBnDEN4mVLzljmPU8E,1335 +torch/include/ATen/ops/ldexp_compositeimplicitautograd_dispatch.h,sha256=aAt401aajRcGOgUSHa2dWY4Y4UHJ2IJ_8k89TR0TuaY,1072 +torch/include/ATen/ops/ldexp_native.h,sha256=Atk8nvM_FP7Usxs5W_z2F31bai0qdRi12-k3NOSLMRg,680 +torch/include/ATen/ops/ldexp_ops.h,sha256=m2M-LWql2APRm7CZ3oFj97SUCKG_rqP-Pn1RnhXdqAg,2391 +torch/include/ATen/ops/le.h,sha256=vzJtDLeauSNAkOQ99-9G7yjqzh4Q0dhucHPRFH4IaXw,1819 +torch/include/ATen/ops/le_compositeexplicitautogradnonfunctional_dispatch.h,sha256=nAFDacOx2DNEjeHaEtA44QHZSX5W4MlwF1YcJgvWt-c,1034 +torch/include/ATen/ops/le_cpu_dispatch.h,sha256=uhlqk131ERYt8VmsnuqP2p_91C3jOVHkcijSPAqQIKA,1366 +torch/include/ATen/ops/le_cuda_dispatch.h,sha256=fV_zhFJUL2NzCHJgbyLnKKPOiG3SPXxnyqTRCwl16Y8,1368 +torch/include/ATen/ops/le_meta.h,sha256=BKG7bISM4rxs9TraIVy8SZI4YlamzsPpeiRVSKasVFk,751 +torch/include/ATen/ops/le_meta_dispatch.h,sha256=9ZuWvTOL5uQ9PO0A1yj_64Bh0uf3tVFGJrX6gWq2JFs,1368 +torch/include/ATen/ops/le_native.h,sha256=9EezCmoSne3tljQBNG7a24lfdI6diXWGoC06dwLyoOQ,1205 +torch/include/ATen/ops/le_ops.h,sha256=X1HCyn6xXonUOGGfQvzp7K-aaQNcPxk4AZ0KRXAEcTQ,4376 +torch/include/ATen/ops/leaky_relu.h,sha256=nS_nhUK8fQE9nt3a9B4B4zixYXi9xOKk5TYrx8tr0Uc,1529 +torch/include/ATen/ops/leaky_relu_backward.h,sha256=2Hwsbk0N4N4-OkGDOnsLUYzFVYSFEENrhemtkUSqkic,1795 +torch/include/ATen/ops/leaky_relu_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=JlDYL89-QIouDMoca884wLPQYT8XApBSRAbQ4ry3z6c,891 +torch/include/ATen/ops/leaky_relu_backward_cpu_dispatch.h,sha256=qnwO7C80nvREYGs3FpBFFyfRPOSTZMBXZ_faT7ZajB8,1194 +torch/include/ATen/ops/leaky_relu_backward_cuda_dispatch.h,sha256=7nXYkqzamWEWSL25DLwqZjClrHSdBAJAm950XI4ILwo,1196 +torch/include/ATen/ops/leaky_relu_backward_meta.h,sha256=sZEvUDUbxmwoMLcj4wM_Lgf4b-2xDGNuhq8c7-90Ppw,676 +torch/include/ATen/ops/leaky_relu_backward_meta_dispatch.h,sha256=DTbDlJU99_xlks6iXwUEBc6RDnhOMwwz8XxeZvD7ZL0,1196 +torch/include/ATen/ops/leaky_relu_backward_native.h,sha256=ubQ6NmKM2XLlS_n2XyCX_h4cIGGLKS9PU4SqkeuEDaA,730 +torch/include/ATen/ops/leaky_relu_backward_ops.h,sha256=heuAzp6cEPownUHWC52GdRRrVQj03gF2CGpXSB22CNg,2282 +torch/include/ATen/ops/leaky_relu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=45bX3czHACelwUcKfixLNRmJ2GZ7gaBLWUHTgduZ3Wk,929 +torch/include/ATen/ops/leaky_relu_cpu_dispatch.h,sha256=MNnw9gneQFVevSNjYrd-zPmFWcW_64NhwWUAAVvHIP4,1099 +torch/include/ATen/ops/leaky_relu_cuda_dispatch.h,sha256=YpnBgBpwRy_wRWLjQaA3NcCz8DX-7oP3J57iqJksCEY,1101 +torch/include/ATen/ops/leaky_relu_meta.h,sha256=lfysxBcVwyrrV5qdSeRNuHeJ7NM45ss79vS29Vg-EBk,614 +torch/include/ATen/ops/leaky_relu_meta_dispatch.h,sha256=UgSbPrZcazh0-M1-FTSRT6tstp6kmhSzck7iGWx-ulE,1101 +torch/include/ATen/ops/leaky_relu_native.h,sha256=St_oHeOkHGfdMpYda3R-tt8NP7t5aLWnfsFOT7F0PTg,995 +torch/include/ATen/ops/leaky_relu_ops.h,sha256=J6xZJuU5N5niXGOeSt6tMQ0UzAFFVBNrC0SdHxsAdLI,2512 +torch/include/ATen/ops/lerp.h,sha256=nSspAKYFigVP3djrVb1JiAb4cdgQN9io0503-srtaF8,2121 +torch/include/ATen/ops/lerp_compositeexplicitautogradnonfunctional_dispatch.h,sha256=k8mmxOxkIwdeJXsVaq8Ch5J7nps_YfZLW0UfXivqMC0,1142 +torch/include/ATen/ops/lerp_cpu_dispatch.h,sha256=cGDE7_7aMW-bmQ50UQnPdxFlmcmu19N2FvcdSU6oY-U,1582 +torch/include/ATen/ops/lerp_cuda_dispatch.h,sha256=46PgRppUorrhLx3oOHc3HLEg_f5fA4jkIw_wrMLTJ50,1584 +torch/include/ATen/ops/lerp_meta.h,sha256=GtLFrFDxZcp_tiDjdzscJ5VVJdFl5pJt-CTLhoOeDB4,805 +torch/include/ATen/ops/lerp_meta_dispatch.h,sha256=OAvSXM5utS3BzjkCyCRIj3OZMS0im-iHoX2YVs29Gpg,1584 +torch/include/ATen/ops/lerp_native.h,sha256=q3EQcU4vwYnsX660D43m4kLBg1W3T4AqQ2JiNwsOce4,849 +torch/include/ATen/ops/lerp_ops.h,sha256=qr9urGJU5te0s_cOIVv0Lvq_PSyOb38Ax6mqg8UiJkQ,4910 +torch/include/ATen/ops/less.h,sha256=37LNviyafSplu_Mfu3jPg9gyg50UgPGdKLjRc3Z_eVo,1857 +torch/include/ATen/ops/less_compositeimplicitautograd_dispatch.h,sha256=h8_iLv2NZ_vWcXEJ2kE0oqxPktH5kRQxzaLZb5u7lAg,1426 +torch/include/ATen/ops/less_equal.h,sha256=sSmcCFzl08J7Ts8QM3TMkjdW4FhFESrTs_SNpyBfUjc,1971 +torch/include/ATen/ops/less_equal_compositeimplicitautograd_dispatch.h,sha256=HldrZ7I4qFiLblsbZpNUp09lYq00e5bQXDatHI6DhKc,1474 +torch/include/ATen/ops/less_equal_native.h,sha256=LiO8Pyw0gmK2mS_FkwR9YloVrV526EzUQdHR4-w5Isw,968 +torch/include/ATen/ops/less_equal_ops.h,sha256=_MOb2B-FrN5uscp4UDKyvGnW_KBwlHnhyH8SaolaOAM,4520 +torch/include/ATen/ops/less_native.h,sha256=MS4m7F39M06IpFZW5Qvqi3gIxDXBYAQl-rUgrWilSCg,932 +torch/include/ATen/ops/less_ops.h,sha256=2mvp0zDOTz6k7UCRhBCOT_lLK-8-uNl1Pb51-0yhEpY,4412 +torch/include/ATen/ops/lgamma.h,sha256=pilVbzx-vxqECvLiRFdTvVW3E5LNczOfAkxKqF8mCao,1004 +torch/include/ATen/ops/lgamma_compositeexplicitautogradnonfunctional_dispatch.h,sha256=rZqhFXhcU-nmbZ2wOuD-u8KHNdgYMpRYQt-BzoxS8mw,841 +torch/include/ATen/ops/lgamma_cpu_dispatch.h,sha256=T8tQq41Ao_PKjQaKTYXKQJ03d9wMAtr9NEFUsgpJpS8,928 +torch/include/ATen/ops/lgamma_cuda_dispatch.h,sha256=MtY18bz8r_yeUjUFbl03hUIQGT8lhbqYbcBxrZu_FAU,930 +torch/include/ATen/ops/lgamma_meta.h,sha256=M84ZmtBtjw7jJgS3s9QueRKpeyrt9ORTz_Twl914maA,575 +torch/include/ATen/ops/lgamma_meta_dispatch.h,sha256=URa2AhwDKi5HIkPzGTb8jyjdf3PbplyHKUY8boVYgio,930 +torch/include/ATen/ops/lgamma_native.h,sha256=hLRCWbsa2SLWaxY5YRO5m9CHFQ235J9GiJT-3XLkv2c,596 +torch/include/ATen/ops/lgamma_ops.h,sha256=JnyEbAdY_gMMukeYq-Wfz_M6NZ7cPgoLklKZBOG80Ww,2122 +torch/include/ATen/ops/lift.h,sha256=MYwfXbsRMU5WLQ6NFSSpIDR9XXh9LFE5LhrEsYc2odI,984 +torch/include/ATen/ops/lift_compositeexplicitautograd_dispatch.h,sha256=s_qMNtOHoiiLcIa0ORiGWFYuUtN-sI25ELheo3nlNMU,915 +torch/include/ATen/ops/lift_fresh.h,sha256=XaYY7S8iAx_DJGTmGrCIBYejY8oNoIXKpzTlB7zCe10,636 +torch/include/ATen/ops/lift_fresh_compositeexplicitautograd_dispatch.h,sha256=Epp3eIkqwBXIoyRvlyOC37GqqputV8dEDUxfUlePUl4,768 +torch/include/ATen/ops/lift_fresh_copy.h,sha256=WFazkossY8jI1CyjT5q8HCdmA3l8hxoT5C7cYOY9JNM,1094 +torch/include/ATen/ops/lift_fresh_copy_compositeexplicitautograd_dispatch.h,sha256=V638bvtxhLG_tfXr2hDynzA1nlm3F38RiERbWUGz8d4,885 +torch/include/ATen/ops/lift_fresh_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=mUq97k2g47V7UMj1bwth1mj96g3dmFpk93xGVY_hMXI,799 +torch/include/ATen/ops/lift_fresh_copy_native.h,sha256=nBB0tuDlWUF01phli5-TJ9sQfpWqP8cUGNCgeQGrwO4,572 +torch/include/ATen/ops/lift_fresh_copy_ops.h,sha256=SLKg74l0RanMmQjUZ2-gZ0Ki78qDr4RsdpnApdcd4Mg,1644 +torch/include/ATen/ops/lift_fresh_native.h,sha256=uq5XG5BqR28UCHIoh7RyPjotPfwsDedZWkziXmPMBfs,480 +torch/include/ATen/ops/lift_fresh_ops.h,sha256=yzbH_ttGKv-PghCPfNJM5F1e5wz9lolD8lYFkwbbwdo,985 +torch/include/ATen/ops/lift_native.h,sha256=gG0soRJv4Sv1PUVlpX6QOUzxmfd_lTz9jX6iqOjWd5o,550 +torch/include/ATen/ops/lift_ops.h,sha256=aTxz5F-HjseLvQs3Y19U3NUmU1Pun6xm58tZDaUlAw0,1578 +torch/include/ATen/ops/linalg_cholesky.h,sha256=vksK1VRg_y1iaFw3N4xq8VQYuQuVk9xGUZH4SRRIdCo,1220 +torch/include/ATen/ops/linalg_cholesky_compositeimplicitautograd_dispatch.h,sha256=AeQTce46XEZ1XMKLAvuoyta2I_xPueqUgkVSqn0ALUY,996 +torch/include/ATen/ops/linalg_cholesky_ex.h,sha256=LmDurcBvm8xTdxEIyAdoBWq1V-Ls1PiVYxXU1kZzea0,1638 +torch/include/ATen/ops/linalg_cholesky_ex_compositeexplicitautogradnonfunctional_dispatch.h,sha256=DmQ8sjVgsv-xvdPtLZ2jc6zlXH9nRqvTD7_is42Egx4,870 +torch/include/ATen/ops/linalg_cholesky_ex_cpu_dispatch.h,sha256=jp6nL0xHjHECvBbG-05mp0ZZH4oqU5bZ-NczqNyoWc0,1143 +torch/include/ATen/ops/linalg_cholesky_ex_cuda_dispatch.h,sha256=9KQ1cc9zfJyNExBdNjbpc5qfajZfbm9bW3uCRf6e_EY,1145 +torch/include/ATen/ops/linalg_cholesky_ex_meta.h,sha256=QvHBFj439eHou2Dhy9IEgIjhyaNKQn3vYY6uPPFlZHI,618 +torch/include/ATen/ops/linalg_cholesky_ex_meta_dispatch.h,sha256=oZzyKuoZClkeng2J_RTrQGjLFgm_uAOvVnV9BU4twnk,1145 +torch/include/ATen/ops/linalg_cholesky_ex_native.h,sha256=9k2FWLquQXG8q6GP_dXjZXcFeco4xog55UzbxTQj9_A,686 +torch/include/ATen/ops/linalg_cholesky_ex_ops.h,sha256=vjew4Dg7kPUdmkuV_QTSjvff-eko8G2Hnlcnp-e7cAA,2150 +torch/include/ATen/ops/linalg_cholesky_native.h,sha256=zXoS7W1ed8lkHjJDrUrg-OU2eO9KQcZaJJXq39o-u_w,602 +torch/include/ATen/ops/linalg_cholesky_ops.h,sha256=_srkPW1y_WvhJcXRnmX0SAZ2rydqb-Zt3AsKlXqXs4I,1743 +torch/include/ATen/ops/linalg_cond.h,sha256=6aoWYbSq1e5vIpoT-_1Mgl1MKBQt536DYk-Br7w5hY8,1954 +torch/include/ATen/ops/linalg_cond_compositeimplicitautograd_dispatch.h,sha256=hKTqwcicJYz5QNC_tUvGyCfjJ4cikQSG1pXWe0uITQ4,1369 +torch/include/ATen/ops/linalg_cond_native.h,sha256=gzNJzz-o5W3bqDDXOXu3L634GilYbTwUvLee8I3BeFw,839 +torch/include/ATen/ops/linalg_cond_ops.h,sha256=7bvZgenfp7nT2aUIjWlq93eAAEwD9CR55cZWuwqL2io,3233 +torch/include/ATen/ops/linalg_cross.h,sha256=RVlT067DHEa-OZZjn_3mruFI8-hjuunrrL247dK4PmA,1304 +torch/include/ATen/ops/linalg_cross_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WxdqUrIeN0dnAB1Dhsrvo4heiigJ1foqgQbNPkIrs-I,838 +torch/include/ATen/ops/linalg_cross_cpu_dispatch.h,sha256=GbUiLkfRAo3bBs_hP60FNHqbZtBvvu7k9t0HO5Uda1s,1018 +torch/include/ATen/ops/linalg_cross_cuda_dispatch.h,sha256=nJYsij1yAWkz-u6h9zD9C18rW1t6gYRcN_9KOlgo4dw,1020 +torch/include/ATen/ops/linalg_cross_meta.h,sha256=Kyav97EyYtM9PLX2oapq_1tD95CM0DwYImfRVhTKMUA,620 +torch/include/ATen/ops/linalg_cross_meta_dispatch.h,sha256=llqEj0BPdym1Z4fNBLfVb4gdzOSVsbO2LeFRLPa_C48,1020 +torch/include/ATen/ops/linalg_cross_native.h,sha256=Z43iT3i-6P018UhGhJnph4IIeTjUKFLwD5-GdvaDLUA,766 +torch/include/ATen/ops/linalg_cross_ops.h,sha256=QD-YQ_8gXimbahQ8Geh2xvN-TNX_OTVF1xNxhvMnGJ8,1895 +torch/include/ATen/ops/linalg_det.h,sha256=S9G4bimOBDkBnw6ieF_uk6qKSg2qHZU7FQSsKTiHeYA,1017 +torch/include/ATen/ops/linalg_det_compositeimplicitautograd_dispatch.h,sha256=dYCsBZYJzGdz__EwNHNbA4a0nbCgxN9B8p-1OA_6j8c,924 +torch/include/ATen/ops/linalg_det_native.h,sha256=YtoogQor_b16lp_wjmKw2FxpmqwUP2XcONU4WQq5rdI,556 +torch/include/ATen/ops/linalg_det_ops.h,sha256=gSreHedyKUuGrkFd-IxpnQiXVJlw5TJH9dWct3rF0HQ,1596 +torch/include/ATen/ops/linalg_diagonal.h,sha256=8b8lh5OSHdOt0NQlVuPFM-omjF3EmiGGxwk47D8Dkyw,762 +torch/include/ATen/ops/linalg_diagonal_compositeimplicitautograd_dispatch.h,sha256=PUqGbsFh5qRWriLNSflGHNEKFKsbKxCyFTz2dw2w7KA,822 +torch/include/ATen/ops/linalg_diagonal_native.h,sha256=cEuC_Z0H3BOnkaFavZKagq16uNJJjjRob1xbq5xwpLo,534 +torch/include/ATen/ops/linalg_diagonal_ops.h,sha256=OFL7gXgY8v0hzrhYi7KZ-ml2oX-jSPANiXT3t4rxHnQ,1149 +torch/include/ATen/ops/linalg_eig.h,sha256=Xxjg-14mX5Hc7QJPrL5uo0t4qphbeUadjCxt_ZwEkwY,1416 +torch/include/ATen/ops/linalg_eig_cpu_dispatch.h,sha256=IXX_aHDbu9PYzx7yxtEuljgMXaDewPUWb827O5NRo7U,1038 +torch/include/ATen/ops/linalg_eig_cuda_dispatch.h,sha256=H7rrfuazDX9pi6DtBjvZxco3-IS89JNEiz9yleGGRes,1040 +torch/include/ATen/ops/linalg_eig_native.h,sha256=Uph94TNerqK8Ud801qs-17psg9RKd3r0VlOBfPrRbX8,649 +torch/include/ATen/ops/linalg_eig_ops.h,sha256=C3T6wXAk_qo3jZgxsCeYYmXUt4XYEmDtTkBcWHL8_T4,1961 +torch/include/ATen/ops/linalg_eigh.h,sha256=4Hu0wL2_FvYDav6VqSraEhUqP3fwTTtnOisN-vIdLQs,1525 +torch/include/ATen/ops/linalg_eigh_compositeimplicitautograd_dispatch.h,sha256=M7U0cEFF-8l5MHaH_NY4_w0D7R0qnDXUKdXLNk1yf88,1144 +torch/include/ATen/ops/linalg_eigh_native.h,sha256=RChyzFwn7T_Hx07VwOyh43qhvzyMszGlNLgJ0cXmTeY,692 +torch/include/ATen/ops/linalg_eigh_ops.h,sha256=bVgmoeCbaUw06ZUiABDLlkBXoCOKZESXrNbUqbYVu7I,2112 +torch/include/ATen/ops/linalg_eigvals.h,sha256=d90Gmf2_t_JEVJDRrVqEBUsmj82MBbtCKLOO4mQmE88,1084 +torch/include/ATen/ops/linalg_eigvals_compositeimplicitautograd_dispatch.h,sha256=WLOpJ_8pVIPUeTJWWrvLYYSr9UFVwQwftehDlxUduXg,772 +torch/include/ATen/ops/linalg_eigvals_cpu_dispatch.h,sha256=sf0tsRZxx9i3EWBCWwv9kJNwRUJujhz3yljugOM4C98,839 +torch/include/ATen/ops/linalg_eigvals_cuda_dispatch.h,sha256=avcvg216S9vIWDoq5XlgHHUQ2IsbkvsEEfie49qs_tg,841 +torch/include/ATen/ops/linalg_eigvals_native.h,sha256=I4_EhghHQ3UzU0heTawT6MEsMR5kPOlHVG-dBcTbtlo,570 +torch/include/ATen/ops/linalg_eigvals_ops.h,sha256=xj0uHNLHALAmcmfuO2BQqveQrgthnyKiCP_aSwbxgsY,1638 +torch/include/ATen/ops/linalg_eigvalsh.h,sha256=-iH-6vNdMZFw8LZW6gniEU4-TOjqThFjLINmsnqnxzA,1231 +torch/include/ATen/ops/linalg_eigvalsh_compositeimplicitautograd_dispatch.h,sha256=d8dIOnhLKOFUXrncNdEQGrvs3_d_aQTvmNjzXnShNIU,1025 +torch/include/ATen/ops/linalg_eigvalsh_native.h,sha256=XBnQZILWEN3IqhriH3JySAyd_UIvooYMfLNdaG0Hwog,622 +torch/include/ATen/ops/linalg_eigvalsh_ops.h,sha256=7cboAcJ6c7SLg_lj06upzr9x16PGbufuF9gRCbQGbH8,1804 +torch/include/ATen/ops/linalg_householder_product.h,sha256=vvTMeD0W28sBlWq2lAx6NNWHB66FrUbYJQaWUEcySJw,1336 +torch/include/ATen/ops/linalg_householder_product_cpu_dispatch.h,sha256=bvrmTo0qKelasHOFD-6sMqk_utquKXUvu-emGLkJcYA,1012 +torch/include/ATen/ops/linalg_householder_product_cuda_dispatch.h,sha256=CZZJthOjlqWBINhAfbgeMrSa1fkNtH-e_mFTByXT_Qs,1014 +torch/include/ATen/ops/linalg_householder_product_native.h,sha256=zonwzwPCl5Cs8euK4ydWbmpcWyb9cIF50SnyQ58PJJM,644 +torch/include/ATen/ops/linalg_householder_product_ops.h,sha256=KAysmwuzUgqKIRm3PB7ODJuQ2OKZZUyfPyp5ZZx4fj0,1876 +torch/include/ATen/ops/linalg_inv.h,sha256=z0LXC8OjNZ7kKwhoH2O74NXE1BdsPpWBFU6WxADuBmI,1017 +torch/include/ATen/ops/linalg_inv_compositeimplicitautograd_dispatch.h,sha256=xifdp404nbGnsKcgjlYZH3C5vYXt2AI5T3hJDBp0EXQ,924 +torch/include/ATen/ops/linalg_inv_ex.h,sha256=jEjsfrFinbPkoFokXMXFP4f1sZnEK3kqCoqkcJYyTfk,1516 +torch/include/ATen/ops/linalg_inv_ex_compositeexplicitautogradnonfunctional_dispatch.h,sha256=y8IGNs_EPJG_OStM1lJshdO_oWINpDBaVu9FaO4449E,844 +torch/include/ATen/ops/linalg_inv_ex_cpu_dispatch.h,sha256=kGYXUMJSGuIF-4TZQFTx5K3oTE8O_5Ap0nm_-_fPspM,1083 +torch/include/ATen/ops/linalg_inv_ex_cuda_dispatch.h,sha256=8DyzJ7eKuZoJ9erFCn5zX7ogTCRIrXqGv3_U_TWym78,1085 +torch/include/ATen/ops/linalg_inv_ex_meta.h,sha256=IIMOpHQrrZLWaLE0qkp42ouJnt0esBRPZnAj_qcKX2Q,598 +torch/include/ATen/ops/linalg_inv_ex_meta_dispatch.h,sha256=cZ-MXYBoSiIhJ3mO2lR0A8b53lMirMGsvigO95H8zBY,1085 +torch/include/ATen/ops/linalg_inv_ex_native.h,sha256=u2LdsZMyqqN0m2pVxEwJqTe3vet7FRuiJudjcXvoN54,662 +torch/include/ATen/ops/linalg_inv_ex_ops.h,sha256=ysBMW6FhZqG6WmIsZBKlceBvReHnZtKyK4IKEyPAQ54,2054 +torch/include/ATen/ops/linalg_inv_native.h,sha256=DSyeeAWevZzyNKiI5J-WdFOmSa6olTkOwhyGUgVt-z0,556 +torch/include/ATen/ops/linalg_inv_ops.h,sha256=dXULkP5n2swsupP1KlcJ-B9cMZdS2bCLLCoxc4vgD5A,1596 +torch/include/ATen/ops/linalg_ldl_factor.h,sha256=oLbldOCd5y37vTjfWprm5UKcPIZFe1h5vDqVneUcS1Y,1513 +torch/include/ATen/ops/linalg_ldl_factor_compositeimplicitautograd_dispatch.h,sha256=rLPIQKmR-hvDbhsNfxeamYsk9sp6OSehHs0I_5Wz4fs,1133 +torch/include/ATen/ops/linalg_ldl_factor_ex.h,sha256=WEZUOR8Qqq9HGaCOGeI4dCYvfBA8x5nTfEMgk7_uudA,1897 +torch/include/ATen/ops/linalg_ldl_factor_ex_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XoLjJjg2Xgbe19pZplpYNkg7BzrQI08E0xGPlAInbb4,887 +torch/include/ATen/ops/linalg_ldl_factor_ex_cpu_dispatch.h,sha256=m8MTePggsQs0Nhw3JyHWCw4Ybsy8JxhVUhNG8ieg2n8,1242 +torch/include/ATen/ops/linalg_ldl_factor_ex_cuda_dispatch.h,sha256=EKIdD96tRVvOfjDzeBpt9PMsuS5gA43GrAKUeK94dtQ,1244 +torch/include/ATen/ops/linalg_ldl_factor_ex_meta.h,sha256=YLhWXR_E8rkZHy_jz4N8jldNvs-RbGmYUDEba-6BYiI,624 +torch/include/ATen/ops/linalg_ldl_factor_ex_meta_dispatch.h,sha256=OKxqYiYpXg2OHeW9Cn4NN57TeBXFaKjjuBIxqCFl_0E,1244 +torch/include/ATen/ops/linalg_ldl_factor_ex_native.h,sha256=AWMHC2DFvSUAQBSnKyOIrUJmEqb8ix6Ikmz_HU6Pn2Q,724 +torch/include/ATen/ops/linalg_ldl_factor_ex_ops.h,sha256=VHR_uxZBq8H1YbNA_un2UUP6a0bjRJjyWocpaA7Tz6Q,2378 +torch/include/ATen/ops/linalg_ldl_factor_native.h,sha256=e9rKQ69CtiJKN4OVbyCuXajYdQHMNo48bjCTTJLmpwA,686 +torch/include/ATen/ops/linalg_ldl_factor_ops.h,sha256=tx2b2-ddI5UbgmO-xs6ma-2h75I9qMYu9dZWZHdQg4Q,2051 +torch/include/ATen/ops/linalg_ldl_solve.h,sha256=iwpbKPBCoPLpTLRyd3exa5AcFcjPPwbyWNeqKxqEZ3Y,1503 +torch/include/ATen/ops/linalg_ldl_solve_compositeexplicitautogradnonfunctional_dispatch.h,sha256=CV3VnzneqLbwlhVjHM7O2zDjR9HbSE7mR4X6Pby-8zw,869 +torch/include/ATen/ops/linalg_ldl_solve_cpu_dispatch.h,sha256=nLU_qlUPQbsrNp3mo2Mzn-ibSdThflElNnDKYxfaHI0,1108 +torch/include/ATen/ops/linalg_ldl_solve_cuda_dispatch.h,sha256=PWy4L7ZdrnQ_IAKEz4LlX0e_-JsjKtZCxSwaKusPsR4,1110 +torch/include/ATen/ops/linalg_ldl_solve_meta.h,sha256=gEcYbBH4cBSmGD1AvT-oysSUvoV4dHcMZiIpZhg7Vvk,648 +torch/include/ATen/ops/linalg_ldl_solve_meta_dispatch.h,sha256=tzqJiqVOMN5qcLSUIplJXZTaI2UslRcNgKcY80ANcmY,1110 +torch/include/ATen/ops/linalg_ldl_solve_native.h,sha256=47zC8XubtB4sQXag7t2H2ThD5xctv4pqSVeSeP3Q3wM,689 +torch/include/ATen/ops/linalg_ldl_solve_ops.h,sha256=IeSC4dJZUdxWMpbdl8srmGcKpi4_ud90EDuNx83Ldyk,2087 +torch/include/ATen/ops/linalg_lstsq.h,sha256=2RJxEjolYEQUOlcd1fdK2YyfjQ_HzfsA_6Ixmqnqiyw,2360 +torch/include/ATen/ops/linalg_lstsq_compositeexplicitautograd_dispatch.h,sha256=bU02tR0xyyVCynVnACWH7W13pthrtPVjq9R9obniNME,942 +torch/include/ATen/ops/linalg_lstsq_cpu_dispatch.h,sha256=7rMFmahk_QQYXOiHQvqMMgMhMU1SlDqiNvA0kDtRMzk,1317 +torch/include/ATen/ops/linalg_lstsq_cuda_dispatch.h,sha256=Ywup6yUkZpbIkX6Cjw53YtxXYntesVRwdNKkLy3U76A,1319 +torch/include/ATen/ops/linalg_lstsq_native.h,sha256=jocXDRFM8mKURkFnMBwjlbc2o5qq3EZkF4YVbGyte3U,964 +torch/include/ATen/ops/linalg_lstsq_ops.h,sha256=3OT7KZQhLni54fdw3r8kSn8W5Am-999kKZLdO6YTMF0,2977 +torch/include/ATen/ops/linalg_lu.h,sha256=ArTYd0hi4-uWubHPbPCJgf8fiPRl1qUztOTyfaozEL4,1452 +torch/include/ATen/ops/linalg_lu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=smtmNY4C8YljZ1ykgna3nAc41ErMW43WWQg6y47dWtE,843 +torch/include/ATen/ops/linalg_lu_cpu_dispatch.h,sha256=Xp7_D4L1il7ip5dwwtKO_Pqz3OWh6N0JhFNzzjfoVIY,1099 +torch/include/ATen/ops/linalg_lu_cuda_dispatch.h,sha256=uiYoc1rh5c-9GtJj34CAPP-11HZIBg5k4eDnzkM0JtE,1101 +torch/include/ATen/ops/linalg_lu_factor.h,sha256=B1vQnzaGrSxL-guYD70hiNE5C1nhKklIl8aY-i0PLMM,1435 +torch/include/ATen/ops/linalg_lu_factor_compositeimplicitautograd_dispatch.h,sha256=8vUJQ8riaNtEXmkP4XvXh5qDoZu45IrXYH88MHa1EiU,1107 +torch/include/ATen/ops/linalg_lu_factor_ex.h,sha256=kX5Nvl9Kq8TEOkxCwFIFxz9WvHAJmVTWYQcGqhmeQ2s,1819 +torch/include/ATen/ops/linalg_lu_factor_ex_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AuCmBCTJMmmgAt5ZTS_I4yS8dWyK9zES31pa99w-FFM,878 +torch/include/ATen/ops/linalg_lu_factor_ex_cpu_dispatch.h,sha256=R1P9J-kB3kP7sqH5Lc5ZT5odnFN9JVd10j4YCIm4t1M,1216 +torch/include/ATen/ops/linalg_lu_factor_ex_cuda_dispatch.h,sha256=Mn2CQlZLkBe1FhWH7rMyuCrAp9NzLyay3e4ZgDDw4BQ,1218 +torch/include/ATen/ops/linalg_lu_factor_ex_meta.h,sha256=5mRtzbEcP3AKPzHqG4kobttclf24VufmSTRfgIheP-k,616 +torch/include/ATen/ops/linalg_lu_factor_ex_meta_dispatch.h,sha256=eXbcXiFGM_Ikqgela1zN1EglIZilZxdeyYAHwZUwU14,1218 +torch/include/ATen/ops/linalg_lu_factor_ex_native.h,sha256=sXr_bY4BkYxF6mydelT9rOIYG6N7ImxLYb3gn5gVjfo,714 +torch/include/ATen/ops/linalg_lu_factor_ex_ops.h,sha256=fpoh8YmqAjndsG2cjfWM4uknJvlnqo9xF2yWxbWgTJ8,2328 +torch/include/ATen/ops/linalg_lu_factor_native.h,sha256=nFScBn0_GaGrcOL5dYOn5G_cDuezA5ditCg13m1a7Kg,669 +torch/include/ATen/ops/linalg_lu_factor_ops.h,sha256=MSl99HgWh5Ilqr3_WVyuwnIm9loAnE1bPtd3C6-gkvw,2001 +torch/include/ATen/ops/linalg_lu_meta.h,sha256=lxqWNB9K9eXAI6CZaThyFi4ZVG4-vY68bZ7aM23Y6GA,587 +torch/include/ATen/ops/linalg_lu_meta_dispatch.h,sha256=B8teXxNTtx2YftlFlWnvYJz8Nw3QJVRb22GZmskEWno,1101 +torch/include/ATen/ops/linalg_lu_native.h,sha256=3u7iw-jc2b7jJ6zd6QaN9K-akf23Pr-5VP-3hf08eZo,656 +torch/include/ATen/ops/linalg_lu_ops.h,sha256=i00jI5Pi1dikFvdf-nsAxUbQerGTzoDLT8YmfPcal98,2085 +torch/include/ATen/ops/linalg_lu_solve.h,sha256=DfWHCd6as5-JLuwCI4ZeUgQfw2KE14Km-sQDuKNvmJw,1584 +torch/include/ATen/ops/linalg_lu_solve_compositeexplicitautogradnonfunctional_dispatch.h,sha256=yDxFDDhx0T8KutdXHAJhMf9fnn7Iu4JOymmVwAUVA9w,882 +torch/include/ATen/ops/linalg_lu_solve_cpu_dispatch.h,sha256=Pogx7Ji1bVUqqADhPj-IyHf44YPpzm6VLgLM-h6K91g,1142 +torch/include/ATen/ops/linalg_lu_solve_cuda_dispatch.h,sha256=kYq9fGi2__Qzd9GDG9Klte-ryXKZDcl5vbaTSozunG0,1144 +torch/include/ATen/ops/linalg_lu_solve_meta.h,sha256=num3XyJPR9fXzNuWzgqTb1SYZ3fL1ai1VYfsEUvK8Dg,656 +torch/include/ATen/ops/linalg_lu_solve_meta_dispatch.h,sha256=nw642a0aRVtBLFvBXa5E_80BBLR7sT621A0pDE3JanE,1144 +torch/include/ATen/ops/linalg_lu_solve_native.h,sha256=f5Ylqdyo6g9W7SyNHCinlM-2fHOJlE8ZHH23EakDHo0,695 +torch/include/ATen/ops/linalg_lu_solve_ops.h,sha256=L0cYgXWYT_Dqsl-VyL__Pq6I9pt67ZPAFA0jrJHraxY,2157 +torch/include/ATen/ops/linalg_matmul.h,sha256=pkC4LmEhHqgBBtwaj7rhLwMCOCSukWZbX3ai1gKpjCE,1215 +torch/include/ATen/ops/linalg_matmul_compositeimplicitautograd_dispatch.h,sha256=Q3uz6RawtS4rq_yETtDzm23bP4v35zDesmU8KjkBj8I,1020 +torch/include/ATen/ops/linalg_matmul_native.h,sha256=n0S5DH5lLZYf6xyZSpJ0s_eNKV9P0iSryN11rm91MvA,620 +torch/include/ATen/ops/linalg_matmul_ops.h,sha256=SWkk1sA2-nikkuUw_VQD42qY4bQEbozCojGZ5-Q4u1M,1804 +torch/include/ATen/ops/linalg_matrix_exp.h,sha256=gf0zAlHi1ZCfzaCMxCK41-wUEHyhdSkkQYcjC3HLXbk,1114 +torch/include/ATen/ops/linalg_matrix_exp_compositeexplicitautograd_dispatch.h,sha256=m7KVL5YQSOj0jmGsp6hX59dYJ14RQgYKiy-_kL3PsyQ,889 +torch/include/ATen/ops/linalg_matrix_exp_cpu_dispatch.h,sha256=brPS4ishlrgIM0GQYTkpMfzy5NC14JfUImajpcjnSX0,731 +torch/include/ATen/ops/linalg_matrix_exp_cuda_dispatch.h,sha256=skDxjmJFuCVge-yFNbu8ZZzu9Z3vxQnRtMwD7T3h2MQ,733 +torch/include/ATen/ops/linalg_matrix_exp_native.h,sha256=rkqhIfaXqR2WGXNvpQv9Hn2u1gVCiMer_L7Bk555JMA,576 +torch/include/ATen/ops/linalg_matrix_exp_ops.h,sha256=Ku2u8WPenxtN8mgUXDBCBfrRFhVSzEyptmQ69be8AAw,1656 +torch/include/ATen/ops/linalg_matrix_norm.h,sha256=fGcFkl4UYNXfGA0I_zJ2IKGw-dlhzEaXYCb2RzOC11s,3136 +torch/include/ATen/ops/linalg_matrix_norm_compositeimplicitautograd_dispatch.h,sha256=1trsOTo3butPXz_b-lWyFVctmC9AFx7btx53npcPdJ0,1914 +torch/include/ATen/ops/linalg_matrix_norm_native.h,sha256=ArcJ--ZjtOpwHsBVxu2x3libZepqiIDQVQlUsSY0Rvg,1186 +torch/include/ATen/ops/linalg_matrix_norm_ops.h,sha256=YofL19H01RdQegwM_HoE8f5HHpGb8A7IrgeZ1Aa3q1o,4325 +torch/include/ATen/ops/linalg_matrix_power.h,sha256=QOEBbkS12pM6GYuUmogXd8ECu5gYKXghJOuBJC-L_2g,1197 +torch/include/ATen/ops/linalg_matrix_power_compositeimplicitautograd_dispatch.h,sha256=3sHPktmIURdB8o_H5ehBCYGQBKRUcb3j3XyhXxILems,993 +torch/include/ATen/ops/linalg_matrix_power_native.h,sha256=oiril8Aply293UCVBdY-5zdGhA3471awNWivA7oqrXs,602 +torch/include/ATen/ops/linalg_matrix_power_ops.h,sha256=L-jE8IZ7kO4L3bvlg-ebebMv-kew8YTHc_SoMnaOUx0,1744 +torch/include/ATen/ops/linalg_matrix_rank.h,sha256=-kiYqVlccU9FrdIIBdDlbGzAMP0nirw8T7wGOnXtSkI,4892 +torch/include/ATen/ops/linalg_matrix_rank_compositeimplicitautograd_dispatch.h,sha256=cEyVGQUFRcVLxP75EE_-eXCROMl4MxmdgLrss34GvKw,2496 +torch/include/ATen/ops/linalg_matrix_rank_native.h,sha256=9UH4DnHzhiEs1PWGnjLIhNxIhHAmIYLgxZGiMf1yAoY,1598 +torch/include/ATen/ops/linalg_matrix_rank_ops.h,sha256=B5h5qWqa-GfioXh3y6paHOj-81U-h236evU3JmzFM4I,7323 +torch/include/ATen/ops/linalg_multi_dot.h,sha256=NCdq3pP5ydD_4gNUH4MVDoE-IqJ4TtXgARk4fJeWsrw,1125 +torch/include/ATen/ops/linalg_multi_dot_compositeimplicitautograd_dispatch.h,sha256=bXaWoHpaT67VqY7UPfhALyd4AzXT1xFQUYeTIp5XiiY,948 +torch/include/ATen/ops/linalg_multi_dot_native.h,sha256=vlyyieGUdiunWUiV_dxLGF5MGJ_DrRxxLvv4SQi0vWQ,572 +torch/include/ATen/ops/linalg_multi_dot_ops.h,sha256=DprcDcANn2iVicW_KOBVsHhmjPV4bhwCG8qbqjlsoyE,1648 +torch/include/ATen/ops/linalg_norm.h,sha256=BqCE9NNJAuVs0wVtIBHJB_hp_7XSMHIx3Pp390HpSQ0,3142 +torch/include/ATen/ops/linalg_norm_compositeimplicitautograd_dispatch.h,sha256=owa4Qufyld8ARQq-jhdgEvJ6wEts_4eZy33EVl5yn2U,2017 +torch/include/ATen/ops/linalg_norm_native.h,sha256=E7ojaYXyIVhjRseKGQZiVbutRr0LM77n-DovyxVqIpw,1247 +torch/include/ATen/ops/linalg_norm_ops.h,sha256=mNl9gRlwBshR5rnazJ90gYOjFsHdFJYiKIgIM0U1mFI,4435 +torch/include/ATen/ops/linalg_pinv.h,sha256=HPkuUKGatyV2SIfMLeSbiGl1BIcszC4Jer7lnaDdMo8,4663 +torch/include/ATen/ops/linalg_pinv_compositeexplicitautograd_dispatch.h,sha256=g5Oo7lDsticDXlctTZJ00YxqvqGS1b2o1vJRGYCeWm0,1089 +torch/include/ATen/ops/linalg_pinv_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XHXDU1STjIrDjPVsG2W2dMJsk-7rkjatyYZFAL03i5s,907 +torch/include/ATen/ops/linalg_pinv_compositeimplicitautograd_dispatch.h,sha256=MpN03GIu_UC98yBieVcu80CIbyP-qVnwNFk9Ra5ZUrU,1868 +torch/include/ATen/ops/linalg_pinv_native.h,sha256=0ycvuc0e-zo8BTU8XO0dpOt5XC8WwG6rsX2QGFrRRQw,1546 +torch/include/ATen/ops/linalg_pinv_ops.h,sha256=kiTpxevrPPCTn5isgUKjcq9NuQpEeZJEbpoLAi1ZbpA,7179 +torch/include/ATen/ops/linalg_qr.h,sha256=glFcsfMypxRh0FG2vhWxhb8GiKRzTv91SDz2TSCMp6E,1357 +torch/include/ATen/ops/linalg_qr_compositeexplicitautogradnonfunctional_dispatch.h,sha256=FCJ9MjgtDChbM30XPAGtxzi58IARZx8Pix-jSwWHvYk,848 +torch/include/ATen/ops/linalg_qr_cpu_dispatch.h,sha256=euAJ4D_Rp71zYwdcpiG9s4ytOrpyg5ZVY2qlu6fzxOw,1073 +torch/include/ATen/ops/linalg_qr_cuda_dispatch.h,sha256=zb2_m3CL3xSR6-_W538KkXt04STnT-K4IAoEZSUK3nU,1075 +torch/include/ATen/ops/linalg_qr_meta.h,sha256=2E67zUOzOH2OUAAP_Br9QqY4aWnyck6sb9IcPzjm2sQ,598 +torch/include/ATen/ops/linalg_qr_meta_dispatch.h,sha256=xf8OjPcSwRXrluAzKXJerNZIyVrDsQE-bQpsDwyYLGo,1075 +torch/include/ATen/ops/linalg_qr_native.h,sha256=s5Sj4U2HQu3MmLShfnOUbGGubG4rsa5HfDkIGOuE1k4,645 +torch/include/ATen/ops/linalg_qr_ops.h,sha256=Pz8ZPaLJeU1ZvwQPDQqdNQ7KwSPK6g0AhijqPsisu7E,2000 +torch/include/ATen/ops/linalg_slogdet.h,sha256=ASevwYm65pNmBwzBDzgA-oAqgCvhpMDll5tq88CV4EM,1339 +torch/include/ATen/ops/linalg_slogdet_compositeimplicitautograd_dispatch.h,sha256=di5jHc0VrIubufgKqQBOH1etWRrc2u9r9uc_CokyQsc,1065 +torch/include/ATen/ops/linalg_slogdet_native.h,sha256=L-dkXH2iyhOmiycwmU3sR41V0NZ5V6ZnqVVQ0acSr7o,641 +torch/include/ATen/ops/linalg_slogdet_ops.h,sha256=ssLd9_Dj7QIRarPwmxmGmW0LnIUgc6KqxIj9GN06W0c,1917 +torch/include/ATen/ops/linalg_solve.h,sha256=DhA2II8BCpCbZ_u9mRK5Sb-uEftger9w1EWfX80aokE,1254 +torch/include/ATen/ops/linalg_solve_compositeimplicitautograd_dispatch.h,sha256=8cY-0PMm0sj75iR6eBQOs1kYVQj-KBEnWBDhHw1FuNM,1039 +torch/include/ATen/ops/linalg_solve_ex.h,sha256=f_-5tPJvgaTi2e-gxLiP4AY0RegEQR-4NIcf78X-7-4,1725 +torch/include/ATen/ops/linalg_solve_ex_compositeimplicitautograd_dispatch.h,sha256=HZUPTWcwao_GoW3OhxMFul8gRH0OHZBZ1C3eANupxFY,1240 +torch/include/ATen/ops/linalg_solve_ex_native.h,sha256=chpW9FdLE2omBoybjznQycG5tZZcYuC609WuVeRgvTI,755 +torch/include/ATen/ops/linalg_solve_ex_ops.h,sha256=AYrRef9dAF-nW-zWSSAFT_RJ4GgraPgbnk9JpSUIRb4,2285 +torch/include/ATen/ops/linalg_solve_native.h,sha256=NOpZbmUSrQwCOhDEsie8lp8H5pLkHTY8toNkKKpNhbk,631 +torch/include/ATen/ops/linalg_solve_ops.h,sha256=M6zaCNGtEadidg0twpSLUMZgwcBI8C9I11N3rN3ta8o,1847 +torch/include/ATen/ops/linalg_solve_triangular.h,sha256=j8gCcuusmA7An2Xzy6kg1bBT3CaMaZEwnn-8e-ol8JY,1679 +torch/include/ATen/ops/linalg_solve_triangular_cpu_dispatch.h,sha256=4U0K_-QB9weJUUAgCgnwIcCnvo1fDkGLZFqoYyjmcHQ,1145 +torch/include/ATen/ops/linalg_solve_triangular_cuda_dispatch.h,sha256=CQUuMO8zIXOdehSoQQaf1XJpppDNtM9HQ8dmk-yZjIU,1147 +torch/include/ATen/ops/linalg_solve_triangular_native.h,sha256=qa7zSZHUORfmhJEjQTssMvxysbwU4lyRXGYFsVJg_wg,729 +torch/include/ATen/ops/linalg_solve_triangular_ops.h,sha256=WGGezErIhDBwXffyvtNXzDspq_G2dsel8nMTQcDDY-4,2159 +torch/include/ATen/ops/linalg_svd.h,sha256=AZZIp_Z3sJIhlUmpV-mtLt_sL2BrRhHmtwFlO-WlsDA,1769 +torch/include/ATen/ops/linalg_svd_compositeimplicitautograd_dispatch.h,sha256=ClT8lxwaMZLogmilMHvBALfnzgAgXN1wkKPJTle0Xhg,1328 +torch/include/ATen/ops/linalg_svd_native.h,sha256=SpFKaUieXIO7IdiW7SyreWkKnTizDKWg48LmahBHeAI,807 +torch/include/ATen/ops/linalg_svd_ops.h,sha256=hJY5fp1pVi2m5BBXSHVqVjTuSgKkJAXaMkIyosNWe0g,2412 +torch/include/ATen/ops/linalg_svdvals.h,sha256=NsUhpqEujwg3YyZfThrzOxWfWO58lTpFp-ADok-BE3k,1294 +torch/include/ATen/ops/linalg_svdvals_compositeimplicitautograd_dispatch.h,sha256=mkSpLo7FHRSmsiTf1ekCnJIrodJl-fMZsHSxbd0bZ_U,1092 +torch/include/ATen/ops/linalg_svdvals_native.h,sha256=mlNmowXp6Ae08ekHJ3mKMa9JBdgl3Ly_lXTkMyjn7eU,663 +torch/include/ATen/ops/linalg_svdvals_ops.h,sha256=u7QkSscMCB12wqtyDT04veOo610ISQs5GFoJK5my4yI,1897 +torch/include/ATen/ops/linalg_tensorinv.h,sha256=5pkamU3VTO7Gg-IEWukNeGLrirT2Xou3FdEKCijVmD8,1195 +torch/include/ATen/ops/linalg_tensorinv_compositeimplicitautograd_dispatch.h,sha256=oBf9t0vCRaKFN8v36An5TaBsmM7utTyFWiKoNQjrhYc,994 +torch/include/ATen/ops/linalg_tensorinv_native.h,sha256=AgCcbB2Qr0xZDTC2KOErVgZHh3UNUfxtbdo4tMC9ebo,602 +torch/include/ATen/ops/linalg_tensorinv_ops.h,sha256=BQ1dhISC-eY9Ho08wW_Efnv844x3lESkfj7NeCIMzwY,1742 +torch/include/ATen/ops/linalg_tensorsolve.h,sha256=MTp8Q4BVcd4o8RBDXYWs18xa_eCZuBMShsXxXQay6vM,1457 +torch/include/ATen/ops/linalg_tensorsolve_compositeimplicitautograd_dispatch.h,sha256=00A-B2Ulj7kv4kg65hBQgRKlTBB0F6xvL6gbkr8XMNY,1155 +torch/include/ATen/ops/linalg_tensorsolve_native.h,sha256=WkffWVTHq0bnZ03LE5NXnZp84k9a74sQMpXI5emVU5o,705 +torch/include/ATen/ops/linalg_tensorsolve_ops.h,sha256=4GuQ8YpSIEPnw_ejgUvgkI0r2l3neM6yuHGTykYOV3I,2040 +torch/include/ATen/ops/linalg_vander.h,sha256=BKZMuo5Spi2e2OALxOtSXy48z4xfxziFwzSZDZsLFHQ,1573 +torch/include/ATen/ops/linalg_vander_compositeimplicitautograd_dispatch.h,sha256=TUMOCoy3Ya9COq_GAMAO6ESIPv5GwIGPH9Z15bJeokY,923 +torch/include/ATen/ops/linalg_vander_native.h,sha256=xPhMS7REgIxuD9pqXt4slvdSPMh3kPDrhFv_dEUHNh0,534 +torch/include/ATen/ops/linalg_vander_ops.h,sha256=Flwcck8X91VkrzWBywDu_3QlGGSXdMB8KczBijBXMko,1092 +torch/include/ATen/ops/linalg_vecdot.h,sha256=E3OtVe7cB01Kz8H3sENYKCmNCw7BYpXpguEq_bkn8zw,1251 +torch/include/ATen/ops/linalg_vecdot_compositeimplicitautograd_dispatch.h,sha256=JNyVgc0Uml-umeeX2Jh2-odMb3qOTkShubw1fYz8N8o,1044 +torch/include/ATen/ops/linalg_vecdot_native.h,sha256=cxdsGXFjeDcPcW-WXciChbbg3Xfz-HCXlcAQjC2mDbY,635 +torch/include/ATen/ops/linalg_vecdot_ops.h,sha256=_gvg7DndsvX8iVnvnA_B_uq8M4_13YpbjKodYrkRQ2Q,1859 +torch/include/ATen/ops/linalg_vector_norm.h,sha256=lR1lJ6OqIen_pIi8oaehrQrXPNT00ey5KyH6qOR-POo,1827 +torch/include/ATen/ops/linalg_vector_norm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sWu91oIuE9anCXLQRj1zcK724Pmfwg8sMFaTfHdwMJw,946 +torch/include/ATen/ops/linalg_vector_norm_cpu_dispatch.h,sha256=QcVIXFhWRp4awoRfHKbJslH-siDlJV9HkC0DFSJ3qaI,1307 +torch/include/ATen/ops/linalg_vector_norm_cuda_dispatch.h,sha256=MP0y4yybRUcAKL1Knkemc52ZPMp1bOTL5pr-xp2hhNc,1309 +torch/include/ATen/ops/linalg_vector_norm_meta.h,sha256=JVfdDdKQ97CI3RqTJPS8Trp2netcOjmcUslZlBj6egM,693 +torch/include/ATen/ops/linalg_vector_norm_meta_dispatch.h,sha256=d40xc2O19KxySDs-prhXfJqwB6YacTvKQMIRkgJIz5E,1309 +torch/include/ATen/ops/linalg_vector_norm_native.h,sha256=lbMCy3Adm3BP_SEWG90bMougED4tV1kz6qKvBcsnl44,738 +torch/include/ATen/ops/linalg_vector_norm_ops.h,sha256=PrQhOfSa_BB2D0TMdHjwfg2-bovPRZmBHOzUYPxtuow,2409 +torch/include/ATen/ops/linear.h,sha256=Okup1v_almiDDxsW2f1YiJfuRnE2Le9yxlo-eO_KcWk,1370 +torch/include/ATen/ops/linear_backward.h,sha256=qspGFIU2aHdtNALxfcwOjPt43NYanbhASznY6AoH_Uk,2003 +torch/include/ATen/ops/linear_backward_compositeexplicitautograd_dispatch.h,sha256=x8aT0z87AANsyzfWu6-B6cBrKo3cAe4XCTM7UVWAslw,1229 +torch/include/ATen/ops/linear_backward_native.h,sha256=Q6nmae4kZM3NoU38r2mpQchgPqmnF5m_O8DJqMMbW28,880 +torch/include/ATen/ops/linear_backward_ops.h,sha256=Uf9cBBI3rVZQHtKDDhqxoVjeZIpJb9yJukglMLgFQOU,2665 +torch/include/ATen/ops/linear_compositeexplicitautograd_dispatch.h,sha256=OYqpLbi_YuCq-40FAWLopipnPGe059Izwj3kFO53pt8,1010 +torch/include/ATen/ops/linear_compositeimplicitautograd_dispatch.h,sha256=_vxI_7noemSVxhonHddPBDWraV8YA9X4QzW82Ve0M3g,837 +torch/include/ATen/ops/linear_native.h,sha256=zojCU40JDShK30BNJvpNlR7dHCmM_K6pyRGNOR0xBv4,831 +torch/include/ATen/ops/linear_ops.h,sha256=0y0UsX9zk6qjgOSyG0CnijZ35bOpNA0Y8ac4L62Ad_o,2054 +torch/include/ATen/ops/linspace.h,sha256=_6Dbziohl6e7HsXlBsZJN1KwbpKXI00XuLgFmkudej8,6853 +torch/include/ATen/ops/linspace_compositeexplicitautograd_dispatch.h,sha256=KYjQUjdc29e5ohCBYXk5Us3lvNj9oUNcFk0Rchrk3TE,2901 +torch/include/ATen/ops/linspace_cpu_dispatch.h,sha256=ulTqqc-sXxiBRDzc6AL_W6IcXoCnwS2Ysmy8aBGb67c,907 +torch/include/ATen/ops/linspace_cuda_dispatch.h,sha256=JMvkDEkCk-8XNIPY2d6NP0bTBfTkZS1V9I-YyLKp30k,909 +torch/include/ATen/ops/linspace_meta_dispatch.h,sha256=tkxveHfnIO5T9J0loBOcuyOII39B9qDRa1I2w0rD0BM,909 +torch/include/ATen/ops/linspace_native.h,sha256=0Ry7Ehlz5Y-PYDcwXuKYyWHunIc2UltEhg6F-emXAsg,2039 +torch/include/ATen/ops/linspace_ops.h,sha256=v0ryP2QRURVvpMyhv5CEqzXu49cv5u5kn-FlLLUVOiQ,8403 +torch/include/ATen/ops/log.h,sha256=nl3RHS53V10QVbNzCG6bPVwSxVAgaubT7jg9WxaQ0xA,1107 +torch/include/ATen/ops/log10.h,sha256=KsIIBpliw79O7ZlzEmkyW6kkZqFcW3zjJFPU4Z9ZI5g,1133 +torch/include/ATen/ops/log10_compositeexplicitautogradnonfunctional_dispatch.h,sha256=NP-hwD4L2nLYQH-POFuf7M7aaDxO37MIm9Eos7DE0Yo,839 +torch/include/ATen/ops/log10_cpu_dispatch.h,sha256=C5AAstlgh3CRIbh2CyQuPjf7cx1LOfzS1wOPuSD8Lek,924 +torch/include/ATen/ops/log10_cuda_dispatch.h,sha256=Zr1dHQslm1PiUXxdBtCE5i7egd5NXEYllVcqm1mgXu4,926 +torch/include/ATen/ops/log10_meta.h,sha256=PUOcGFdCa9DonUChZHiJFpIUq7DCQkHPUrkMLgPzzQM,574 +torch/include/ATen/ops/log10_meta_dispatch.h,sha256=dY9_HA9tiH6FFYJRqxKOJFtclnp_fCOM6vSbkXxTh5A,926 +torch/include/ATen/ops/log10_native.h,sha256=8PAFZ0ZJ4RQ_IaZdEwZMYSCOMNx0ZyYHWzjU-RraO_w,593 +torch/include/ATen/ops/log10_ops.h,sha256=VMWMazkB92iwpPZm8BM33tbi_HLcYV3F2w-SnNQGxxE,2113 +torch/include/ATen/ops/log1p.h,sha256=2tWRzs1okxoVqRVQoi57QSCAWwXfejMgPtkRzoOGMHA,1133 +torch/include/ATen/ops/log1p_compositeexplicitautogradnonfunctional_dispatch.h,sha256=KqXkNFECg8JrjY62r8R2-r5pliijNanXqkSeXu61U2o,839 +torch/include/ATen/ops/log1p_cpu_dispatch.h,sha256=T5rxVVCiaBTfbTh_lTDDb3SwWXyu6zE805tKIyneuUA,924 +torch/include/ATen/ops/log1p_cuda_dispatch.h,sha256=pFopz_KuXJjjuKIH-9vQ1bYt-i0F7h8CSgmy9QIiHzI,926 +torch/include/ATen/ops/log1p_meta.h,sha256=L1AtB7Oz9QZjhZmjO-0s4vZ0Os_ho86yBJlmXX7gT3g,574 +torch/include/ATen/ops/log1p_meta_dispatch.h,sha256=8vlGzc0v3JC6MpahpYFTVy4Dax2egP7eeMxu7gAcuHw,926 +torch/include/ATen/ops/log1p_native.h,sha256=174NgUtuIjQu_Fru5EsVv1BWx4gJfP2V_0h-bqPsOr8,1007 +torch/include/ATen/ops/log1p_ops.h,sha256=-gjTT5OwDnkl5GjwOYHhCiICrzkR3YI0AuAGZXa8wtQ,2113 +torch/include/ATen/ops/log2.h,sha256=73FXmvo7KYZwz-s23z4WONhHn5zC_r0NrTuJ2rugcr0,1120 +torch/include/ATen/ops/log2_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AUm0c7_tnZT8LUY2y0keVJiAqRKPfJJuEXDByhYW1Vg,837 +torch/include/ATen/ops/log2_cpu_dispatch.h,sha256=e3oy2Cug8UiLUkCC-bpv96mszAlcL_Ad4oMMAecPrn4,920 +torch/include/ATen/ops/log2_cuda_dispatch.h,sha256=sHoqPDKA3sLwRv2NWahdofJXAqBmmYM-st74vd6gaCs,922 +torch/include/ATen/ops/log2_meta.h,sha256=pRmPPzcvNxSkmoU7tJr-RgO5Yw5NlgbsmNhvBUfGJas,573 +torch/include/ATen/ops/log2_meta_dispatch.h,sha256=K6J2HMuMjNMaLtK_6oDLaiv9OaivrMKw_Iy-wr7otKI,922 +torch/include/ATen/ops/log2_native.h,sha256=emafEE8TJtGkEyn0GxIIPuKIePEbGUiEq_qONoMINgk,590 +torch/include/ATen/ops/log2_ops.h,sha256=NbrDDBjdbqX-DJE5D0M5ht9lCuAESM09f3DTeLO3G88,2104 +torch/include/ATen/ops/log_compositeexplicitautogradnonfunctional_dispatch.h,sha256=R0qUyXpi3CT-cR2n2n21s5JHMkySnZD19veEAMxnHVE,835 +torch/include/ATen/ops/log_cpu_dispatch.h,sha256=1scbJNlTj9Svm9bpk8dzdf6qnQaybTuz1FXKK4aM6uQ,916 +torch/include/ATen/ops/log_cuda_dispatch.h,sha256=wVeI5GNaoZ-qVyJot-7teBov9ZniPQ5qRydcuixamRg,918 +torch/include/ATen/ops/log_meta.h,sha256=IV7NbNMuHbabrc9klIer4z1SuSEAXZZZiro799uKKn0,572 +torch/include/ATen/ops/log_meta_dispatch.h,sha256=auLInGknpTv08vFk7YujovQ7UrjDGGKQ7u_2kvcPOs0,918 +torch/include/ATen/ops/log_native.h,sha256=5zaAnttLgeQSnbdeeISEvqYt1j3J5mf9fUakVdE9Y9g,587 +torch/include/ATen/ops/log_normal.h,sha256=W6U__siOkwZFY2IXbqErDyvoh64pGtrFaEU9z98PIxI,1514 +torch/include/ATen/ops/log_normal_compositeexplicitautograd_dispatch.h,sha256=3HCVdTzfQDzi35NnUvPv6RUtTcEjMsc8CEmWfKP4WXY,1172 +torch/include/ATen/ops/log_normal_cpu_dispatch.h,sha256=WWCZkvmygCYCiD4OJ2b9xUM23C9Sx9WUzWQaOhL8w14,807 +torch/include/ATen/ops/log_normal_cuda_dispatch.h,sha256=1wuglPjrB1bPkrdzPfOtkhr4tNsUUpwYAy8GQ87Av4I,809 +torch/include/ATen/ops/log_normal_meta_dispatch.h,sha256=16WMRLsYjc1ChbL_p7x0mbnJIewLYWdLnSGYOJ0usRY,809 +torch/include/ATen/ops/log_normal_native.h,sha256=pXBIvGcsZAeNCH5zMkxDt5uIuPCmgUTXiE0KfU36ZHE,856 +torch/include/ATen/ops/log_normal_ops.h,sha256=7crhbnfi4c9fwxApkArT4pfeinLyWnlzoypV-A2BaHk,2872 +torch/include/ATen/ops/log_ops.h,sha256=4bX1rkb-mNpEyZu0LlCwMIlOptxvsJHHrRM8QQCzO9c,2095 +torch/include/ATen/ops/log_sigmoid.h,sha256=-7O8B89Gj9M3CZFgYIMAatObhUkVMhsJPxjrw9-WehQ,1054 +torch/include/ATen/ops/log_sigmoid_backward.h,sha256=Oo24wEqjj5JkRbOwpk307g46oJqeV-S1Bg2GhG-jzdI,1559 +torch/include/ATen/ops/log_sigmoid_backward_cpu_dispatch.h,sha256=VYlouOZtPurG_oFO4ZTpUFIdUdmSissSXnHwwUbb1dk,1110 +torch/include/ATen/ops/log_sigmoid_backward_cuda_dispatch.h,sha256=3XkRbmt3bdVwdEph9u9rM2UL0uXLmp-9Z5DnSkA0D6A,1112 +torch/include/ATen/ops/log_sigmoid_backward_native.h,sha256=9MeG71SXXL_1_UyEr0KGHgZB5vvKF-bEOm70uFimTIY,1010 +torch/include/ATen/ops/log_sigmoid_backward_ops.h,sha256=waImE7ReUiakqCmnV-jsoihC19P9TkO0i0u4Q1SRM-E,2102 +torch/include/ATen/ops/log_sigmoid_compositeimplicitautograd_dispatch.h,sha256=Ok7h2c47DYJL1q7NUjbtUav2BOUAybPnMr2b1VKNW_4,936 +torch/include/ATen/ops/log_sigmoid_forward.h,sha256=s7PXyso8nXP5Bn9_i3NNB1oVedDZr-YVN-aU1JKi_0k,1391 +torch/include/ATen/ops/log_sigmoid_forward_cpu_dispatch.h,sha256=5OgHsH3a5Z-kQMtzEfBi98VvNN8wm5IMMbBytL1IB0Q,1043 +torch/include/ATen/ops/log_sigmoid_forward_cuda_dispatch.h,sha256=-rD7OmAWyX1Wcaax8-makfMsekD_LD_9HWmldJPKt9I,1045 +torch/include/ATen/ops/log_sigmoid_forward_native.h,sha256=FrsCHAOmJkE2PBcwValrMciyIOs7-JV7RqG_wadXLXs,908 +torch/include/ATen/ops/log_sigmoid_forward_ops.h,sha256=UFCcOMPJ6_o6pcsE5LXkPXQne6XGW0wmU-yxdXKbklg,1955 +torch/include/ATen/ops/log_sigmoid_native.h,sha256=K6f3LwPRVNYNj9DZgc2-GC440jn7gmmdKPtIPaGIyiY,564 +torch/include/ATen/ops/log_sigmoid_ops.h,sha256=iBhPLgLkybondELseJbJb2J4PmyIeqMH-IPV12Gcouc,1620 +torch/include/ATen/ops/log_softmax.h,sha256=ylVlh1RQrDnKHI5RHVu5msI38TbAdRwwlPy-aCUi3vY,1688 +torch/include/ATen/ops/log_softmax_compositeexplicitautograd_dispatch.h,sha256=YHCTHKUcE7mCTAfidCKXQP2-LgmV2be2s69fJVYrPIw,996 +torch/include/ATen/ops/log_softmax_compositeimplicitautograd_dispatch.h,sha256=vtZzi3QTLqRqRB59R92KG30nDdifqH_-bBtvx6JKeDk,966 +torch/include/ATen/ops/log_softmax_native.h,sha256=vHNhBlcCU00o3AQ7qH1RyQjspy3itUqJnbxeNoV-54A,813 +torch/include/ATen/ops/log_softmax_ops.h,sha256=L50g2M4_jJj09J7EDXLwwW2VOqQl_DO95eb1xfrhCKg,2770 +torch/include/ATen/ops/logaddexp.h,sha256=cctgUWSmgIelkyn1FPQ7qk1FBD5rVb_M3ck5v__5pHw,1175 +torch/include/ATen/ops/logaddexp2.h,sha256=SCzacNDanfUKyCFfutxOvuAa23lM6j62ACwHv-rZaRg,1185 +torch/include/ATen/ops/logaddexp2_compositeexplicitautogradnonfunctional_dispatch.h,sha256=7I_XgYDS5ZZvdVRH0t4O-L0t9usQninkZbDmQ2R1Yg8,820 +torch/include/ATen/ops/logaddexp2_cpu_dispatch.h,sha256=u2Ny-PRgGqX-rACBKejXub6FbUsFj_1grlY29le0LYE,967 +torch/include/ATen/ops/logaddexp2_cuda_dispatch.h,sha256=N53TT8UTTQs_6_fj2r66JFVKnGSzdK9OMvYPakUoq-I,969 +torch/include/ATen/ops/logaddexp2_meta.h,sha256=l___dCAG9C_2lx7hc3Z2e4G4jrqhWRNWqBn1s0mSeQI,605 +torch/include/ATen/ops/logaddexp2_meta_dispatch.h,sha256=KSp3Tv1k2J3YO9vIcmk8U3V3IY7Qn_sLk2a-aLTHEbU,969 +torch/include/ATen/ops/logaddexp2_native.h,sha256=jz7fnRWk5Bx7g2oXksUbxlYup6yrnB3PMvWnbQqU7QU,634 +torch/include/ATen/ops/logaddexp2_ops.h,sha256=AmJtikJ02iO56bYqwUVRPfWwJJqQVQa5Zx2LlKINW8g,1786 +torch/include/ATen/ops/logaddexp_compositeexplicitautogradnonfunctional_dispatch.h,sha256=bc27hqaHJuF1-PlT74ga5CIv1PLV0HoH68lFFA3cGhQ,819 +torch/include/ATen/ops/logaddexp_cpu_dispatch.h,sha256=2ugkj6JTjpTaG1GFYnO6iTJdnakkXWesKbUEard7kSc,964 +torch/include/ATen/ops/logaddexp_cuda_dispatch.h,sha256=nkHjcXhFtrSZH2JCgkqFTVcLWhYDxd6WqQB6txJomh0,966 +torch/include/ATen/ops/logaddexp_meta.h,sha256=WMU2fl9JLxNDVuNBQbKfXKDnNLW_DDAzYwehmzZDEwQ,604 +torch/include/ATen/ops/logaddexp_meta_dispatch.h,sha256=DLlzaAJCcqQCfWMoYf-woFqFHzGT9OWtdlJgrDssoys,966 +torch/include/ATen/ops/logaddexp_native.h,sha256=ofalyf8ua3OJmxmqIK1jO71vkAIVHgkNKKXinIyM1Js,631 +torch/include/ATen/ops/logaddexp_ops.h,sha256=vYe9kVx_sbwcauvdwIgdN_03OPclBAngglDcsHDaa5I,1780 +torch/include/ATen/ops/logcumsumexp.h,sha256=MQsK3Ywm3XGhbQdJFZlcK3Frl6axn3-wb28GCGk0SzQ,1877 +torch/include/ATen/ops/logcumsumexp_compositeexplicitautograd_dispatch.h,sha256=ZdaEVXzPSlWPdldhhjL0zBH0UhjHyuJRqoIoGr6E4CY,978 +torch/include/ATen/ops/logcumsumexp_compositeimplicitautograd_dispatch.h,sha256=maRoxGYE-a9toJGDnFV1WTVX1tVbIG8zT8D5giqhCrk,990 +torch/include/ATen/ops/logcumsumexp_native.h,sha256=WkjkIfvb62-ru8yGVa_txRDlbMDQq5YFKazsw3uL_04,770 +torch/include/ATen/ops/logcumsumexp_ops.h,sha256=URmMrvevQ9gWQR8DF7LyIQRccbtIXC7647KXu8UGOP0,3073 +torch/include/ATen/ops/logdet.h,sha256=mVJJjRAUuyUqCYWUQ-lsp4Qgq-KIGfGbXpvuiTmJuf4,614 +torch/include/ATen/ops/logdet_compositeimplicitautograd_dispatch.h,sha256=sakW73DeFiy3EJoOI6TnZOja4Ux7KfqfyAJlzBDeRMM,764 +torch/include/ATen/ops/logdet_native.h,sha256=dYf3TDdISZw6iJKRzeFeTdRjvaOdc3lfUGUVW4QDBF0,476 +torch/include/ATen/ops/logdet_ops.h,sha256=9XCtUP-QmX6Dw4_RRUiCMzx6r8XvR-apvYQbD0J7VFo,967 +torch/include/ATen/ops/logical_and.h,sha256=XLI1NkrIY-v3vjWAr_98tvlSuXtLQgzB7lZ-Rp4l32Y,1195 +torch/include/ATen/ops/logical_and_compositeexplicitautograd_dispatch.h,sha256=B7nBUHUsDDcnUtb1jr4pi59wEiKtks43m2-cXR9IkrE,877 +torch/include/ATen/ops/logical_and_cpu_dispatch.h,sha256=dzXGNFHfpMDWfxKtEa08QXJNUBqzHveh55Bk8v7_1wA,885 +torch/include/ATen/ops/logical_and_cuda_dispatch.h,sha256=kwNsiuoj3AuhHyE9u5oh69fg8T64zXEoBzWy7gRZo0U,887 +torch/include/ATen/ops/logical_and_native.h,sha256=tgN2tN8NXxkwX3c8XVwIOZfK2YUD6hPo46vKyHeke_w,698 +torch/include/ATen/ops/logical_and_ops.h,sha256=9WEpX5w7qUbDJq_k29h3wQu68Yim_lLcXIkeOu3oFm8,2425 +torch/include/ATen/ops/logical_not.h,sha256=_-zA_xa_IutnuqoKANsFVvktQgytKV1XZHggWrx-0zE,1054 +torch/include/ATen/ops/logical_not_compositeexplicitautograd_dispatch.h,sha256=c0_UHtPG0hnPwvNGu2xPlvh_qD7--bZ642QUCSnJgTM,825 +torch/include/ATen/ops/logical_not_cpu_dispatch.h,sha256=tob0XD-izc3K5yvLxOcasfCwB-kPs4OxhPT3Gs00oK0,833 +torch/include/ATen/ops/logical_not_cuda_dispatch.h,sha256=W9vTjbo5wcHui6al3CEyERTrkmEWS9zjyYAM4XzUvZ8,835 +torch/include/ATen/ops/logical_not_native.h,sha256=t_vao3UBSMuWsjNTPirCAUCXIPN39O4knVoelTf-p7o,761 +torch/include/ATen/ops/logical_not_ops.h,sha256=KNfZuS8HKrXNVSRUoYtxMWY1nCELGYng_ZDv2Wh9toY,2167 +torch/include/ATen/ops/logical_or.h,sha256=xmRzKd7rm6G5VnmQdB63ByIUw7J7nKB51MJSXtSA4jQ,1185 +torch/include/ATen/ops/logical_or_compositeexplicitautograd_dispatch.h,sha256=xBax8bjDlC3gCLLjnSdnVH2g7hNdMvoWPKL4n1nryRI,875 +torch/include/ATen/ops/logical_or_cpu_dispatch.h,sha256=mxXGaNamr-nRzho_VXH3HaQyZocaHNaY-0kTBD1tjCQ,883 +torch/include/ATen/ops/logical_or_cuda_dispatch.h,sha256=2Rp4qKQPdSifWBc6nViCNeEK_D_O8jR07tAsFtpII6M,885 +torch/include/ATen/ops/logical_or_native.h,sha256=eQTT_EZKa9XfgCWHgeF2RTvPctix3KtrOuxQWP4EFSA,695 +torch/include/ATen/ops/logical_or_ops.h,sha256=pEL60h3bX3FG_kHjMR8BIgHrtOK5dkB2zh8FSPqjgtE,2416 +torch/include/ATen/ops/logical_xor.h,sha256=HJ2ZaK8V7k6W0L2LHQzG7yV8nCYCM_2qkaDYCH2TXEk,1195 +torch/include/ATen/ops/logical_xor_compositeexplicitautograd_dispatch.h,sha256=byN_XRa6285A-DveuuYDr9Fh_2JvCXR3hQM3mM2qsec,877 +torch/include/ATen/ops/logical_xor_cpu_dispatch.h,sha256=MNYc3FYQRAZSXG-HbtrtX05RycNuWN4wniBkT3WKNXw,885 +torch/include/ATen/ops/logical_xor_cuda_dispatch.h,sha256=HlO2iVUTQ37xwSimEDWQrjpVQla4z9umbNEljEpeeTc,887 +torch/include/ATen/ops/logical_xor_native.h,sha256=C5NxiCMd1fwSwHCM_SFrjNVsZiGj6XaN12bKWdgBVcU,698 +torch/include/ATen/ops/logical_xor_ops.h,sha256=dSQPCiXaY6LFMq5QZTzON4B-mOI4TyMs8d6r_zBobGM,2425 +torch/include/ATen/ops/logit.h,sha256=-WVWwBPf9aAfKLnkPTKmFkytKdSmVV2SiRGDj0E1Si4,1382 +torch/include/ATen/ops/logit_backward.h,sha256=UU_73M_Inp0KW95t8ppPXaeehzgg-hqgQYqlUlfdaKg,1532 +torch/include/ATen/ops/logit_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sCzoa2HrPqdbQbvgMhgvwsJVxj3k9deS105PUPZGiUs,874 +torch/include/ATen/ops/logit_backward_cpu_dispatch.h,sha256=878iRlstGB4_OPbvlyQthgSSR4enw4SE5Tz8fGJazl4,1128 +torch/include/ATen/ops/logit_backward_cuda_dispatch.h,sha256=0O5umbwKdcyHs3V6V8BHMqDCFmmrDoikpbZdFrstH8w,1130 +torch/include/ATen/ops/logit_backward_meta.h,sha256=J4sVjyHmEQC8frmTxPjmkDI2rFnRKlnR9W2NZ1AXXY8,644 +torch/include/ATen/ops/logit_backward_meta_dispatch.h,sha256=j1VGfXmFP4UKlvvmisQcxRsbVuuLWAHKBAUNzmhuPIw,1130 +torch/include/ATen/ops/logit_backward_native.h,sha256=HlvKwPs134HiZ0eEa5TbraQXSoxG7TUh3uH_ErQO_7s,688 +torch/include/ATen/ops/logit_backward_ops.h,sha256=AQmo9yKO-Lwv7IXUDcBzvr8SLDRcD2Dnz4pgA3pFdyw,2088 +torch/include/ATen/ops/logit_cpu_dispatch.h,sha256=TLBjPucO0h-mFBFLTkGAvtwX6RpqbGzhMqCGk0304iU,1085 +torch/include/ATen/ops/logit_cuda_dispatch.h,sha256=S8T6fufN5G3AeoEI6piLi8AbuXyDawzOeObQzGW7ecI,1087 +torch/include/ATen/ops/logit_meta_dispatch.h,sha256=M7LC9p0ZXrWdLB35ZtKLKoK5J8_1IgXkSLyTzZzxOks,762 +torch/include/ATen/ops/logit_native.h,sha256=pzgh1isGXTRjxHP-SI9WgM_xQ9Kq89pLUbPPAfRRiwU,719 +torch/include/ATen/ops/logit_ops.h,sha256=A6kgU899tYxpJm5UULYMkHKidGDxnucXQHJh6o35kYk,2413 +torch/include/ATen/ops/logspace.h,sha256=8PmVniuvFIOqf4FY9L3yEhbtGyi9bvAhQKO_eoovfLQ,7469 +torch/include/ATen/ops/logspace_compositeexplicitautograd_dispatch.h,sha256=WJMmhXCoZc0O7Itz5Ko6NEWIXO5R6ePtQQMuaj6Nh3M,3118 +torch/include/ATen/ops/logspace_cpu_dispatch.h,sha256=lIbC92l2z6YYc1i69eBuzc9N3mxFJLCF6E2Qvhs5-lY,938 +torch/include/ATen/ops/logspace_cuda_dispatch.h,sha256=Xi-FnMUCpNarqwhTiE7UEzDtf4sU79WyplN-hg-5C4c,940 +torch/include/ATen/ops/logspace_meta_dispatch.h,sha256=KLZEzq5BpvGTkBIU9xk5s4ksGuoETrCxMAhMLoM8Dwc,940 +torch/include/ATen/ops/logspace_native.h,sha256=7Hgb_BC5giT9hjvR3FluzmdQ0qjizW08uEMqkZfjTV0,2176 +torch/include/ATen/ops/logspace_ops.h,sha256=9ctvRpkmZfq_0wvVnaEbAx_jvLKq46VI_BP86a-ZZ4M,8811 +torch/include/ATen/ops/logsumexp.h,sha256=pY8k-aUUiZV2N1HftHa23pDI8CYBDaVCrRhLLrpsnIw,2144 +torch/include/ATen/ops/logsumexp_compositeexplicitautograd_dispatch.h,sha256=UrwIhA6det4vpkAUASP5QCkB5lxt9mfTIe6sfKJ1o8A,808 +torch/include/ATen/ops/logsumexp_compositeexplicitautogradnonfunctional_dispatch.h,sha256=IcoFrqLTlhRccV0UBYg8WcUR8cDbWP8qUUs5DB5Sb2Y,975 +torch/include/ATen/ops/logsumexp_compositeimplicitautograd_dispatch.h,sha256=zK_0YBaabZeNgognSpNYm0UZxRn6RSwfzgBucfiUh8o,1047 +torch/include/ATen/ops/logsumexp_native.h,sha256=3OHVRNEiRgDCburMeZ_En21ReZompNHxfX5OLBxAUVY,850 +torch/include/ATen/ops/logsumexp_ops.h,sha256=fm1fNrD_Jf9uxmIqGDvZfx5tIfOHFSrOk5irIx82sZY,3325 +torch/include/ATen/ops/lshift.h,sha256=wnMB8D495nUz4hFNow2AZAE1qTunoiAqJTuF9PFN5A4,1967 +torch/include/ATen/ops/lshift_compositeexplicitautograd_dispatch.h,sha256=x4myy8Ylu7bzDpBNtcllaXLY81TSMLY0EPrn0eahh7M,1144 +torch/include/ATen/ops/lshift_cpu_dispatch.h,sha256=ImbVJ1OFP-DKtWTFKCfEkjMU-BE_IXHa9yro9xn7fJQ,996 +torch/include/ATen/ops/lshift_cuda_dispatch.h,sha256=Mfch3l_jbK5XQNfaLhXp6Ocvo5HUKhGNsQzHcfLPGw0,998 +torch/include/ATen/ops/lshift_meta_dispatch.h,sha256=8BQni9LNubFUj5L9wWrzOIIUCYoG5w_N5p0wEXKFP0U,830 +torch/include/ATen/ops/lshift_native.h,sha256=LXcpHvTIgrbd1ukjzSChq5Zor4db70pMmGyncfwPARM,982 +torch/include/ATen/ops/lshift_ops.h,sha256=lMAbUXO9G11jNkW4T1b8Xa78MjXkS029WDzU3eemjqg,4520 +torch/include/ATen/ops/lstm.h,sha256=kAc8D6S9prPOk1SPOYm8V_3zZ5kYVe8gM4sAxvYJMwI,1590 +torch/include/ATen/ops/lstm_cell.h,sha256=56tyiQKHh-JuhAyCBbpsr5Mn3stOCUiqlYFFdFzF8ik,928 +torch/include/ATen/ops/lstm_cell_compositeimplicitautograd_dispatch.h,sha256=3ENSc9qQQ_satAImfT98ee0-1Ylz0JYTdMFb8JBvHzg,952 +torch/include/ATen/ops/lstm_cell_native.h,sha256=ChAWJuSCwjUYC0o-nyR9ptBlMqCHotdhcUfnuRWfk2o,664 +torch/include/ATen/ops/lstm_cell_ops.h,sha256=smG0o6elQeiHfXhIsRsKGQn0lIyrUpMU74PN9ut_G7M,1577 +torch/include/ATen/ops/lstm_compositeimplicitautograd_dispatch.h,sha256=fkyqlCWp8UACGxzhsW-_yZrLasxjQYICWWrzftAIlYQ,1191 +torch/include/ATen/ops/lstm_mps_backward.h,sha256=bXVe3s9HZ3HhEl7vmg5vZ1htnvsAxxzFiLKiX6-OGsQ,3652 +torch/include/ATen/ops/lstm_mps_backward_compositeexplicitautograd_dispatch.h,sha256=IIdaMF-wEAlREwa7OeMrS5Br8yszFgPQix_4Wkv-xro,1713 +torch/include/ATen/ops/lstm_mps_backward_native.h,sha256=weHyoiJWjmeV6Y4uZrJCmejM7Dyh9Wc3fKjMKT4phOQ,923 +torch/include/ATen/ops/lstm_mps_backward_ops.h,sha256=6hIz1p0X8I6bGMzOiyUoBSnDtH0XFIBHrAeMuH6laFA,4477 +torch/include/ATen/ops/lstm_native.h,sha256=s6nlnbSKE3Huk4kkP5-Pchd0lESemlIyz83IlpUOtNs,903 +torch/include/ATen/ops/lstm_ops.h,sha256=4uGYzXkv_YzOjtwsnac3ARqpnng6ZLBX3V2lcg6ydeA,2800 +torch/include/ATen/ops/lt.h,sha256=mWpKp4pWBiuG8_s0BAtEmECX_kTXImjr2PHNVoTou_E,1819 +torch/include/ATen/ops/lt_compositeexplicitautogradnonfunctional_dispatch.h,sha256=SArQJmYLRzdjhA9J0YtsRqanWbpkkk5zgBBStLxzXOM,1034 +torch/include/ATen/ops/lt_cpu_dispatch.h,sha256=8cQPcA5Xp1iDmRxCcxKx4l1mRAAiZXSIDRF1vZCSMBo,1366 +torch/include/ATen/ops/lt_cuda_dispatch.h,sha256=b8TooVTHOkBITOLJYs3wsDVv86xvYXtXSpy_Idg7Bd0,1368 +torch/include/ATen/ops/lt_meta.h,sha256=beD9CFVKOefjzKW244huDTtOoVP-sYCi3wHkUqbQxfY,751 +torch/include/ATen/ops/lt_meta_dispatch.h,sha256=axFyNImwwx6g7Nw3KlcTs7sVCnQ3pVMj7fjo8FFv8DU,1368 +torch/include/ATen/ops/lt_native.h,sha256=d_7JRShnwlOPXgJ5HmC6dett00vQSDI1b3xg7hCj7kY,1205 +torch/include/ATen/ops/lt_ops.h,sha256=oSUkOkhlcuK9H2yWjI7nyQ8XBVvh6uGez0EZpWzJK-w,4376 +torch/include/ATen/ops/lu_solve.h,sha256=ctFY0n3CLcsoFx3Uw-odVBPZ-fyMlHlF08AIqUytIdQ,1360 +torch/include/ATen/ops/lu_solve_compositeimplicitautograd_dispatch.h,sha256=6-UqzkNBV-lzs9QzSQEJGtdwXCHupVq2c8AGODa6REs,1101 +torch/include/ATen/ops/lu_solve_native.h,sha256=aV-XKFRGHTWFQQshQ7PHF4CuKL-3t3Rxxyx2i_BAMRY,674 +torch/include/ATen/ops/lu_solve_ops.h,sha256=Gp6XD8HHV1TqgNhX6OjiSj-zBPjBwrZAjIDVB4lIwxo,1982 +torch/include/ATen/ops/lu_unpack.h,sha256=CAB7RSJdeQK1YAUXk4bmRyRq_km_eiAKifB2mz90m-k,1924 +torch/include/ATen/ops/lu_unpack_compositeexplicitautogradnonfunctional_dispatch.h,sha256=uknmKso8TMw5EhozlajBM-DkV9dXFO0WTr71SVUfMfU,910 +torch/include/ATen/ops/lu_unpack_cpu_dispatch.h,sha256=8V3hXMYZBq1UiRSMlyFWdNLUz8mgALSplAnvjrzu408,1295 +torch/include/ATen/ops/lu_unpack_cuda_dispatch.h,sha256=_n5AavfVhIkZe7F2ysigMDGt09FL2nPHkOJufdPE4l8,1297 +torch/include/ATen/ops/lu_unpack_meta.h,sha256=07Vq_-LcWnzikjDBmAxymBLaMxTe1fXJleOLbSWNGHA,649 +torch/include/ATen/ops/lu_unpack_meta_dispatch.h,sha256=bjfhj2g0TjCgAjc4SIZir8fBgrR3otbysU-tIn5YHc4,1297 +torch/include/ATen/ops/lu_unpack_native.h,sha256=e9DtaFznwLiRkY-_je4LxT98QrHY2I1PpF2t15Eg29Q,718 +torch/include/ATen/ops/lu_unpack_ops.h,sha256=C0sL3Zutx3J54O7HJNi8qfL-I1ImkUkQf1apWuhkBfI,2492 +torch/include/ATen/ops/mH.h,sha256=9NbaODR2b-B2kA_0LJHwDlF0itMS-VLfRTcBsARBb7I,476 +torch/include/ATen/ops/mH_compositeimplicitautograd_dispatch.h,sha256=Qgn_B4neSq-pHvFFlgIrYUmjDfzDkDXeFqWYTX2atNk,760 +torch/include/ATen/ops/mH_native.h,sha256=HGFt_fvzBnqDNQ7S3wApiP9vjuwDXyc8SiLyoO8nUDk,472 +torch/include/ATen/ops/mH_ops.h,sha256=cLNQa865PZ3SoxKiOVwpx8UZALX9yifKivX_Vi7I8do,961 +torch/include/ATen/ops/mT.h,sha256=QLNvmcyEIYp64Q-65IxmJcJYbWNWgtImJTSz27TeD28,476 +torch/include/ATen/ops/mT_compositeimplicitautograd_dispatch.h,sha256=9XrU66nNz5i8TeYoRIaxRsuVZhBXBB4Plz9Vz_Tztp8,760 +torch/include/ATen/ops/mT_native.h,sha256=OcSSdCpIvuFp23cks8T6N6BVdodWDM0JgxXtRUWAYSY,472 +torch/include/ATen/ops/mT_ops.h,sha256=p5lsPyFQN_IhodPzLGgdMkUdLuWkUsDlLWBM4XQAdLU,961 +torch/include/ATen/ops/margin_ranking_loss.h,sha256=AvhSjAXEOEhnEk_w6e0srW05e4wCVIHbSFw_WKbttwQ,887 +torch/include/ATen/ops/margin_ranking_loss_compositeimplicitautograd_dispatch.h,sha256=bdfxYFL03AA-RibFkW4dSxsGa5Qi9yamzvUIEnD98LE,891 +torch/include/ATen/ops/margin_ranking_loss_native.h,sha256=gLnQbu7ORAh7-bOjV06Z47j_Mfk2l6RT979hGCVcMhc,603 +torch/include/ATen/ops/margin_ranking_loss_ops.h,sha256=yQDKUukOnXBwGxy4FY429Z3w1-VDXOoIE4d7-5PO9Ig,1313 +torch/include/ATen/ops/masked_fill.h,sha256=pLM4C0C730DTJ18GlmG2rxGfGWJ_G60goCEekvJNV-k,2254 +torch/include/ATen/ops/masked_fill_compositeexplicitautograd_dispatch.h,sha256=GtC3FtJQ3kwkxuAo642-kJdb_qckd2ZzpS-iULidfzY,1468 +torch/include/ATen/ops/masked_fill_cpu_dispatch.h,sha256=DWObWOVWsfS7hpSg7kn_0utMHCfake_YmwAgAUT8JCg,880 +torch/include/ATen/ops/masked_fill_cuda_dispatch.h,sha256=WahwAlL1cI8vj-6jdBQ2mJQsYIOAlGJVTtm7MCjXWAs,882 +torch/include/ATen/ops/masked_fill_meta_dispatch.h,sha256=YXkifJG75xG23gj1o9EmvemHPRbvyhztqSN-knCvk4k,882 +torch/include/ATen/ops/masked_fill_native.h,sha256=tTFnejNZ-MQ42-lalFdWyVTwXSvm9o6KGEXbORL5_dA,1979 +torch/include/ATen/ops/masked_fill_ops.h,sha256=ecc3eDn5qb0kjoTR_5a96wBezNpWdDV_FueAIkly9DI,5036 +torch/include/ATen/ops/masked_scatter.h,sha256=jl_fNwfIFjwDMTqmg98OnqhOUtDPXnJG_oVSNXKv_aw,1366 +torch/include/ATen/ops/masked_scatter_backward.h,sha256=s_yHyKMKXVmKT-RvrXxhUyglosuhGSBwCLf5DrLGllg,1777 +torch/include/ATen/ops/masked_scatter_backward_compositeexplicitautograd_dispatch.h,sha256=Fq6xvGYz_QvL5SMKHBwzjsQAZxOLgwe9Ze7A_6sdvTo,973 +torch/include/ATen/ops/masked_scatter_backward_native.h,sha256=JoKJmknM3JHgJyebSeFvqaPEZmqZhVegVQP2EwLjdfo,559 +torch/include/ATen/ops/masked_scatter_backward_ops.h,sha256=3lvuKm4FiYWit5BaIAQ_xi2hlUooZXh5W0f9OZ3zhw0,1213 +torch/include/ATen/ops/masked_scatter_compositeexplicitautograd_dispatch.h,sha256=HMpnqJA8Prp_D--kCluZ7KhvCIW3rQAies1AhDq2unU,1101 +torch/include/ATen/ops/masked_scatter_cpu_dispatch.h,sha256=_Leg0ox_AEk2TWF1pr2Sa3HZk5ue1ydwvDImeokiFno,777 +torch/include/ATen/ops/masked_scatter_cuda_dispatch.h,sha256=sNnC6beSVZc45z4K7MmAehRQ73m-qmhgkZ1VJC0Fi6w,779 +torch/include/ATen/ops/masked_scatter_meta_dispatch.h,sha256=8XCK4lE9U9k7q_CCD857a5Xs38lcEOa7MDWa1D154SE,779 +torch/include/ATen/ops/masked_scatter_native.h,sha256=Esdks64DIbKZ4PBKGKNIsqRpm2XHiYq3acMrFzEExNg,905 +torch/include/ATen/ops/masked_scatter_ops.h,sha256=a7uV62SrSgGX6lRzgNtfHrJy1ze9WQTKXmx_1dWEigs,2710 +torch/include/ATen/ops/masked_select.h,sha256=qMjxfvVe7UDs595arrFpy2RYd7Ph0k0FhrsT4AZrLqI,1206 +torch/include/ATen/ops/masked_select_backward.h,sha256=t6MwC0Efjb8U-p2rBzZTV9f3oWU8uVbbWAWqvpJCLgk,769 +torch/include/ATen/ops/masked_select_backward_compositeimplicitautograd_dispatch.h,sha256=2RFDp1nA2LuekX6SdqsjouB2lyEE4l2YKeK0dx9fJG4,831 +torch/include/ATen/ops/masked_select_backward_native.h,sha256=-G_oBmNAkJIhsh7v18w8Fcp0hjpIMliGWsdNPI4Zg0w,543 +torch/include/ATen/ops/masked_select_backward_ops.h,sha256=hDyHqsvk_q8EGODw7GJr4ohBI3O2z-AyR-ddVxNWqM8,1184 +torch/include/ATen/ops/masked_select_cpu_dispatch.h,sha256=8Yt4cYmNe_XhIorOZp6C2YdCxP9qXjyAtqgfTeLourM,973 +torch/include/ATen/ops/masked_select_cuda_dispatch.h,sha256=flnYJp7xyQbIouWT8T3ojlp61gnxOuX3fRaylYutPjA,975 +torch/include/ATen/ops/masked_select_native.h,sha256=GJl_tk504-45WASRgDv5L9SDcsdjtsZRUjJGukZyqxc,832 +torch/include/ATen/ops/masked_select_ops.h,sha256=olTZ87FOp1uTJUha7l6tzv8E9lJw8bpQoz8cXkYlCj0,1798 +torch/include/ATen/ops/matmul.h,sha256=bwtOTJx7_kA1hkai8rZ7aGhag-iFMeUKejhdic_gwnw,1145 +torch/include/ATen/ops/matmul_backward.h,sha256=OqOiO41yvUDgcYKwipFgCrtW-JFs-oEl_U8gvfhNC9k,1715 +torch/include/ATen/ops/matmul_backward_compositeexplicitautograd_dispatch.h,sha256=IfZAwsZXBjNcNtWoYyxLNmLAP-8-3M9v3CrPt_M6FD4,1135 +torch/include/ATen/ops/matmul_backward_native.h,sha256=XfodV32bVP_EBgb4rzTEp3OK3spHOjVd-dE4E5-Dy6M,807 +torch/include/ATen/ops/matmul_backward_ops.h,sha256=bo-yyzhkdhIQf6SdoUtq6Aq5kFgd3_Z7uGX447LbnDc,2414 +torch/include/ATen/ops/matmul_compositeimplicitautograd_dispatch.h,sha256=QJtM3u5qAYC6-faSHGw1n0_QJhQXCGkOInYiOQrSZaA,999 +torch/include/ATen/ops/matmul_native.h,sha256=gVzxli9XCbic3FdJV6u2uvg6kybPelEiDa74fJ-MEbc,804 +torch/include/ATen/ops/matmul_ops.h,sha256=-d5ZUx3JeMDLE8LWeJrK_xjVhpzjQ1j5t24u0ISjrzs,1762 +torch/include/ATen/ops/matrix_H.h,sha256=t03dbF01e6hqS5UBH7VYP9OjkOSA3HKhul9PRUAuf9M,482 +torch/include/ATen/ops/matrix_H_compositeimplicitautograd_dispatch.h,sha256=-ajflMamPGTvhRi4uy3-skucUUufMNlXVi6AqqP6Z3M,766 +torch/include/ATen/ops/matrix_H_native.h,sha256=g-R9BwHKF4fi2M0LDeoBCiJdO2vcxmo67Phsns5tRnE,478 +torch/include/ATen/ops/matrix_H_ops.h,sha256=88FqmS-ruJsbTZggReLMHxEWZCtxdiN74HLrZp-js6Y,979 +torch/include/ATen/ops/matrix_exp.h,sha256=Dl6UcTox03nsMgKSAysdlIOvvgXpg14buFgKwNdXI90,630 +torch/include/ATen/ops/matrix_exp_backward.h,sha256=R8c2xnA0hpkwdl2ducYAdRp8LVHSs7CdD-NCyCkikmo,710 +torch/include/ATen/ops/matrix_exp_backward_compositeimplicitautograd_dispatch.h,sha256=uUS28D7wPGFYwHYcsXYkmYCd5u92FjNrrjH6fIpR4Zs,802 +torch/include/ATen/ops/matrix_exp_backward_native.h,sha256=RB8s1zAtBu6Wrvky3BeT1IHMZ4NbVQ7ucrmLGJu7NXQ,514 +torch/include/ATen/ops/matrix_exp_backward_ops.h,sha256=HWbsK70bFpXN9TQzq6MY009goecdIRZ9e6xB2a34o3Y,1089 +torch/include/ATen/ops/matrix_exp_compositeimplicitautograd_dispatch.h,sha256=BTFybvwdLa7K-3s4NLMqEih9GfEaV3V7dQcrPTSgBYI,768 +torch/include/ATen/ops/matrix_exp_native.h,sha256=_hPcGP9oF81qWvcRlQThE36LeGp4YXXbd58--OYfoXE,480 +torch/include/ATen/ops/matrix_exp_ops.h,sha256=ERRMvk04r1faHxw4wmZWJnnFlt3lDQZgcAfnRX8B4vc,979 +torch/include/ATen/ops/matrix_power.h,sha256=Y3Ttuzu51sGgSrtRhpgQ9NyhmhKirJcj4dGbwCLEKfo,1127 +torch/include/ATen/ops/matrix_power_compositeimplicitautograd_dispatch.h,sha256=ggBKUWmjgiq4CPpuybMr49VyOwqgVHHpTPKx2UgsQzQ,972 +torch/include/ATen/ops/matrix_power_native.h,sha256=Z83XSZ5M5_Q00QJoLSnAeRRPoITOwzPN461ZF_j7RNI,588 +torch/include/ATen/ops/matrix_power_ops.h,sha256=BZ2IUjwXXdtgUR9W_qCl1O4BtZL3JANnktrhUFLgaHM,1702 +torch/include/ATen/ops/max.h,sha256=RORprR6GqGRvN_4YVbzcg2PJJIDg950S1uiHPs2uHHQ,3761 +torch/include/ATen/ops/max_compositeexplicitautogradnonfunctional_dispatch.h,sha256=oQgn2RhxmfMGPuFSINQjiah5-NfLl-LS3wWBFMjcf9s,845 +torch/include/ATen/ops/max_compositeimplicitautograd_dispatch.h,sha256=mZb4YTt4QH5ZgOdfKTAruj3yq4RwNUI4vDMBmDizdNE,1426 +torch/include/ATen/ops/max_cpu_dispatch.h,sha256=-8j1oSNDI9PSKEJtpILgrQNqadVeBa0MKmsdWTYdehc,1292 +torch/include/ATen/ops/max_cuda_dispatch.h,sha256=NT0jfu-2O7T2whYD6ZjkJdDH9Xsrr2S6sHs32eI1FjU,1294 +torch/include/ATen/ops/max_meta.h,sha256=CUHul58qii_u2zppHqySFKSrRgfO_TZrGSo632U6oIk,1077 +torch/include/ATen/ops/max_meta_dispatch.h,sha256=3i8JSPJptX5dfA782p5dM_m43XoPtp7qKI7cyPxlrq8,1092 +torch/include/ATen/ops/max_native.h,sha256=Tnl-hfZd9c-pnqgN3jQkePBoXch6f1JEA-gaabOHGEs,1496 +torch/include/ATen/ops/max_ops.h,sha256=R_eePJPATC2z_BZVwW0AuvsH1eiCL-2BJRP5hFcTDUs,6293 +torch/include/ATen/ops/max_pool1d.h,sha256=f5HFww4uAdwWmPc9ZrkWP6gQ7sRq58mJTUhvk1pd_uU,911 +torch/include/ATen/ops/max_pool1d_compositeimplicitautograd_dispatch.h,sha256=DF8sZvX435wLllAp6uvpUTgms25HffE1cgKSdBKBqyI,901 +torch/include/ATen/ops/max_pool1d_native.h,sha256=Khse4-_yfqRMVjUfl7iUmZl2ud3GqAUfUYDfalG8Aoo,613 +torch/include/ATen/ops/max_pool1d_ops.h,sha256=IaVQuU1IJoH79YHPQSh61HMXNZh2p2uIkfun9wNnsjs,1390 +torch/include/ATen/ops/max_pool1d_with_indices.h,sha256=WRSXjjHajMu05wJwRaAyg2pbXlziEsGFPbAcVd0vffg,998 +torch/include/ATen/ops/max_pool1d_with_indices_compositeimplicitautograd_dispatch.h,sha256=C5kjki9wDdSlniaafgHM70uAptXDB_N20LU7ujYF9cw,939 +torch/include/ATen/ops/max_pool1d_with_indices_native.h,sha256=P-wCG7hor9_LqRsZGZOVt48qndPfMRgsijAACtI2G4w,651 +torch/include/ATen/ops/max_pool1d_with_indices_ops.h,sha256=l9lpdn_WGW7-QbnyCBNjgMgks9I3jY3n6-Cem8GI7Gw,1514 +torch/include/ATen/ops/max_pool2d.h,sha256=BbjmlTOziR2u5HgY2yo2NKeqkiHCXPZrGDAqSrA6ESM,911 +torch/include/ATen/ops/max_pool2d_backward.h,sha256=zYNrWiafgfCqpDMCYglF2tgAD55dwQyEzbAjRQqYDU0,2159 +torch/include/ATen/ops/max_pool2d_backward_compositeexplicitautograd_dispatch.h,sha256=gQ7VUKhB527wOR2LVRrxQM5Y5FcwZ3J2TUYAlEZp1Uk,1210 +torch/include/ATen/ops/max_pool2d_backward_native.h,sha256=_YmBcypg5aRbj2IWIYl-gKXfgKP5mPlx8ap7nL_nIjs,665 +torch/include/ATen/ops/max_pool2d_backward_ops.h,sha256=hRuw2V8glUrERNgIpPruhr7-lTtN38FsSF-1j-skkYA,2698 +torch/include/ATen/ops/max_pool2d_compositeimplicitautograd_dispatch.h,sha256=9Gok7VNwNIHmV2NfC7fxifz_nFTTucD4vMQUNfmQhvM,901 +torch/include/ATen/ops/max_pool2d_native.h,sha256=Ph4XEcXdZ9WHKXsk6Q9nG1ZxJM4RAbL-tNIPptp2KY0,613 +torch/include/ATen/ops/max_pool2d_ops.h,sha256=TJL_UqQ1qY-3PAbqKF7O9O-340YvTUyH1hIydqpGGLA,1390 +torch/include/ATen/ops/max_pool2d_with_indices.h,sha256=Pi7mrpdSGIbxViYDzXsUblh6nS4JAEHN76ElyU9bNMw,2223 +torch/include/ATen/ops/max_pool2d_with_indices_backward.h,sha256=gnKKx1hSJucHYgrZUWEmCqdbwmjM_17MK82gzNU4088,2453 +torch/include/ATen/ops/max_pool2d_with_indices_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=nGoahet0rfcevcKyuG56TOJh6cIRgbh8noXxmSrw-Ek,996 +torch/include/ATen/ops/max_pool2d_with_indices_backward_cpu_dispatch.h,sha256=lUEzB6hWsVCiLjqc2ll4C9r2p1XouZeYqe3v7tMIS48,1509 +torch/include/ATen/ops/max_pool2d_with_indices_backward_cuda_dispatch.h,sha256=HJxfFJ6ALfEI2gXa0su2rFCB8IXMNqi54YGvoAmX94c,1511 +torch/include/ATen/ops/max_pool2d_with_indices_backward_meta.h,sha256=qjKfUxCXhWZWNKZL20dUFF-WUzRPdxFBFWt7Z-f_bWU,781 +torch/include/ATen/ops/max_pool2d_with_indices_backward_meta_dispatch.h,sha256=u7b6dIwWfdxARQwo_3GP-bJxQgDH_MEQWdpsYt-6bu8,1511 +torch/include/ATen/ops/max_pool2d_with_indices_backward_native.h,sha256=Q4eHeBi2-N7kv6PV3TwI5nUtfgnfI85uPhSQGcV64j8,1250 +torch/include/ATen/ops/max_pool2d_with_indices_backward_ops.h,sha256=eGx6YAGwHAlEjZFRIzOVOTuGtkI3mxjRqMQKVROrfRM,2976 +torch/include/ATen/ops/max_pool2d_with_indices_compositeexplicitautogradnonfunctional_dispatch.h,sha256=T5MFgLSN2CrBL-ec8cVQC8n0L0qV6_6lIL8JHVbLw2w,965 +torch/include/ATen/ops/max_pool2d_with_indices_cpu_dispatch.h,sha256=Ya8HebugKpVAxZwvxzenIVbhwld-JK6gpP5SszUp3ZI,1437 +torch/include/ATen/ops/max_pool2d_with_indices_cuda_dispatch.h,sha256=g86F4IdJFFVnwVQeWiIqaTyjZ8GY4AeMEtII0CHrSHM,1439 +torch/include/ATen/ops/max_pool2d_with_indices_meta.h,sha256=pac1PJpNsPjtA86PFE1WQkE5HpgS9cdZr0rV-0zNfdw,712 +torch/include/ATen/ops/max_pool2d_with_indices_meta_dispatch.h,sha256=HUVhgX_v6sL4iuzPQXLee1LS1dUNlJR2ji6ZRWv6xrk,1439 +torch/include/ATen/ops/max_pool2d_with_indices_native.h,sha256=tujAfN4Jfr6n_M5z32_4StOwHjId4-PWo6Lkxg2oDYc,1127 +torch/include/ATen/ops/max_pool2d_with_indices_ops.h,sha256=ekOCHSnFYYomUdjFGsWjCNSPp9-r6ARbWRbazrkIm5k,2772 +torch/include/ATen/ops/max_pool3d.h,sha256=Q0tofoQKF0C5KaDKh603qc8IvJ3qxYt9gsGAfsCrxAU,911 +torch/include/ATen/ops/max_pool3d_compositeimplicitautograd_dispatch.h,sha256=-59HnY2XbtqxV1pnx0diBuSVITVyeCxnyC7aXE4EdbY,901 +torch/include/ATen/ops/max_pool3d_native.h,sha256=-6URFJakf47e4uAZSOIWr_O6Yf0qPQNIeaEFVVQsn4o,613 +torch/include/ATen/ops/max_pool3d_ops.h,sha256=7POoAsMi3CxO7SBq2_HyYPOlUjTcWaXquonyVeQZWZA,1390 +torch/include/ATen/ops/max_pool3d_with_indices.h,sha256=VEqsXRmcMKPnms2Tn2gJQAhfLAXHAlRP1gEaga99o1Q,2223 +torch/include/ATen/ops/max_pool3d_with_indices_backward.h,sha256=nQubyc-k6kqNbUkpc3ZJNjJ8_u5tzIutT7xRryIjmbY,2453 +torch/include/ATen/ops/max_pool3d_with_indices_backward_cpu_dispatch.h,sha256=g9T7v0y6tUwzCVFeSiL0L_Ywsst0ZrfoUUsI2VGvySs,1509 +torch/include/ATen/ops/max_pool3d_with_indices_backward_cuda_dispatch.h,sha256=1UkC4-ToPZ3ZYtuEBAHRlALZ8H2s5SyqGCBXiGOUhzE,1511 +torch/include/ATen/ops/max_pool3d_with_indices_backward_native.h,sha256=PtaVDy1ptonW9TCorjFbN1ouqFL_rJOoXZXoGTOAyIE,1542 +torch/include/ATen/ops/max_pool3d_with_indices_backward_ops.h,sha256=Q1JrCcH49teFT6dnxRo0fQvDI9SV44BHCk9tjJc3Lp0,2976 +torch/include/ATen/ops/max_pool3d_with_indices_cpu_dispatch.h,sha256=__GTLWy5DSpJjFbjGsNAPx-mnand2rtIgx15Wtkhb3U,1437 +torch/include/ATen/ops/max_pool3d_with_indices_cuda_dispatch.h,sha256=OJxVUg2NawWQ4mCgbx1rIbonSR8zDFxGhUjMNLj-Mz4,1439 +torch/include/ATen/ops/max_pool3d_with_indices_native.h,sha256=pNWmY28gj6ZuK9NFYT6cxJi7vg5MssK3N2AfRQtirLY,1426 +torch/include/ATen/ops/max_pool3d_with_indices_ops.h,sha256=_DhxaSBbJFcioUiE1cvVTzIcAWY5TAzt_ozxLSTktDg,2772 +torch/include/ATen/ops/max_unpool2d.h,sha256=wU2odQqiB3Wt4t5S4tghWNvkC7b1w9Mo0OJcMkwkEJc,4440 +torch/include/ATen/ops/max_unpool2d_cpu_dispatch.h,sha256=Y5vJM3pG_qThEErm0kGhNUrm8P-Ge6qzeUmcQBSqBPU,1499 +torch/include/ATen/ops/max_unpool2d_cuda_dispatch.h,sha256=6LLpeLQzJQAJZJ5s5AavwziNRro5QpO9tfD6x1vsTfI,1501 +torch/include/ATen/ops/max_unpool2d_native.h,sha256=5J7c6DSPxYG0r_-D3gI0gOseOqVgAaqjZHek0iQojiY,1000 +torch/include/ATen/ops/max_unpool2d_ops.h,sha256=nPR0-D7Re1VsYXg7C8vAjTNabMxMMFGx8tYnk4qlp7w,2030 +torch/include/ATen/ops/max_unpool3d.h,sha256=jnIqMtk04ZQdxS2mNHQqlHUb0rnTwFXu1gCzsJniiTM,5418 +torch/include/ATen/ops/max_unpool3d_cpu_dispatch.h,sha256=11aqKZqsA1BaSqbM3qVgxe9XcL_9vRKaBut5vDSe3BU,1793 +torch/include/ATen/ops/max_unpool3d_cuda_dispatch.h,sha256=2uB5CE8MsqGfIDFNyJJqZbiNF0TCahD8LRMihrfp0ss,1795 +torch/include/ATen/ops/max_unpool3d_native.h,sha256=RIqHp4c6oqydbx1tMNYoKTfLkOLelIO3chZTipF79Jw,1196 +torch/include/ATen/ops/max_unpool3d_ops.h,sha256=NoGj2d00USQl2MmgubCN_gqh-EolJlFVwpJ1mWSeglU,2356 +torch/include/ATen/ops/maximum.h,sha256=N3tFsXDqo58pUKSwuVqRJRmONyypYbAIEyQEG_x7U4Y,1155 +torch/include/ATen/ops/maximum_compositeexplicitautogradnonfunctional_dispatch.h,sha256=KKg9KPUCB80Lh0emHI4KNMDobeFWnyINTReOsZvw2xA,817 +torch/include/ATen/ops/maximum_cpu_dispatch.h,sha256=pghd81gWcT09b45l264XaXdvV3mSfUF84JYwWnaAHxg,958 +torch/include/ATen/ops/maximum_cuda_dispatch.h,sha256=pCVRIvSNume8t8k3N5X5A5M0xNpES1rttjbHt2oRRhE,960 +torch/include/ATen/ops/maximum_meta.h,sha256=qEzhLxmp0hybzpY5wLMWCfFYFHT6JidWTNdBq8o-PSg,602 +torch/include/ATen/ops/maximum_meta_dispatch.h,sha256=TgoZmCVzVoAgMXUROk3C0u_VY05zZEaKtscRbBKdfuU,960 +torch/include/ATen/ops/maximum_native.h,sha256=T6TnSlYalplUBGjJxX2H0k1sAoRSmpmX1kIN08aIbkA,625 +torch/include/ATen/ops/maximum_ops.h,sha256=x4MX1aRx-EfJg2MnxTS3rHfxAKSIhaaGSCpHJneHjVY,1768 +torch/include/ATen/ops/mean.h,sha256=C4mxmgua9Dz_eJa9UOhIx1tg2ZTV__esmyf8B6ishKE,3352 +torch/include/ATen/ops/mean_compositeexplicitautograd_dispatch.h,sha256=UHze8qfCMmukBNW8uk2eOh0d_sKxonLDFO8MdMPuUec,1062 +torch/include/ATen/ops/mean_compositeexplicitautogradnonfunctional_dispatch.h,sha256=aBpLpmX5708xJwl7qGYgng7t63c6kl4wNvaGN8dbJZA,891 +torch/include/ATen/ops/mean_compositeimplicitautograd_dispatch.h,sha256=ABeBA4kFpoZKvGeYdgPZVVSBVsA-bPMmKefFmUi2rzs,1179 +torch/include/ATen/ops/mean_cpu_dispatch.h,sha256=nTfWuFfm77cbn2Ab2mMn9AEHjYJV0jCSuvsFZ4dhECk,1159 +torch/include/ATen/ops/mean_cuda_dispatch.h,sha256=XPF9oKhY5yDoY0zvm8aWZitgcwu7SFIiZjEZXeQzMtw,1161 +torch/include/ATen/ops/mean_meta.h,sha256=OAZcdlb1EbBYLg7SkcaSrbrYw_iKUyrdYjZKiEYS7wM,659 +torch/include/ATen/ops/mean_meta_dispatch.h,sha256=VWWAmyrXaEdg5Wi6ILslojWoKHuOxy1sF7ZTo_-5fUM,1161 +torch/include/ATen/ops/mean_native.h,sha256=9v9SrGSsskli6ylpiK8v99IfSd9FCxDVJ_-SL0Jwnag,1541 +torch/include/ATen/ops/mean_ops.h,sha256=Uxq2dc92V3unVfv51G9sIS826poYts9rhaBOZY5Nw9Q,5319 +torch/include/ATen/ops/median.h,sha256=_Wgbk1bcH17LqCH8rK8yEklgsgy7MJNOPfGvIAA7lOk,3194 +torch/include/ATen/ops/median_compositeexplicitautograd_dispatch.h,sha256=NI0DRVMls9CQcIbk7ZQ9aIyTQBGyH1tB2oeC1ziWrpM,979 +torch/include/ATen/ops/median_compositeimplicitautograd_dispatch.h,sha256=kbnqBK7nzMdcrHhSDEYuNSbPJYiFqjAP747i5KtJ6yo,1155 +torch/include/ATen/ops/median_cpu_dispatch.h,sha256=L9rNNPcB-6eB5fPltEpOErlmdMbh2-K3aAKRlL5ntY4,1041 +torch/include/ATen/ops/median_cuda_dispatch.h,sha256=0U0DKCxKT3UjzwPSci0N0NA4TsV1FanDeUgidYpIeX8,1043 +torch/include/ATen/ops/median_native.h,sha256=qjutwhjV47nVSk-8n9JTbF1Ls16OGs5XwCqtAbn2Xf0,1329 +torch/include/ATen/ops/median_ops.h,sha256=zBT7BqRAchc9Ww6N8G10SsmNAa1SukDWepOo3U62-Ok,5020 +torch/include/ATen/ops/meshgrid.h,sha256=kxYMYDTMqoFr1K6WrkTnIW-1uY4xkI6vRPY6WH3J2_k,883 +torch/include/ATen/ops/meshgrid_compositeimplicitautograd_dispatch.h,sha256=ZZdjr_0aAYZNtcE6hrK2wFBtY36i7QDkC9xzT4EwUOs,877 +torch/include/ATen/ops/meshgrid_native.h,sha256=ROlUjTBealDd8Qrdx_NleyjhBaU0i8NGOItlX7cGu48,589 +torch/include/ATen/ops/meshgrid_ops.h,sha256=qYCmBNIlUabpgJl9MHEfsKdYkQ9fw47ZMUidJFDpOYs,1719 +torch/include/ATen/ops/min.h,sha256=qFPAVFhFKYZBQPuv3ZWIxGLeIF-KVXfqME8h0edn9WY,3773 +torch/include/ATen/ops/min_compositeexplicitautogradnonfunctional_dispatch.h,sha256=yZeiTF6DsejCuRMy90M7JEKF6ALrLVcp2Y1ChwFQyeQ,845 +torch/include/ATen/ops/min_compositeimplicitautograd_dispatch.h,sha256=MidFFlx8c5xpQi5Q7NNr9uG5kRFdwZNIxt7Ih6ukiBc,1428 +torch/include/ATen/ops/min_cpu_dispatch.h,sha256=bwBX5JE-M14njzn3lPfKH0JkPMeeGTKK-KRg-dyMA_o,1294 +torch/include/ATen/ops/min_cuda_dispatch.h,sha256=WquZLCzonaMeDRYUoJcgPkBio_4OTVxYA8tvYyCotV4,1296 +torch/include/ATen/ops/min_meta.h,sha256=P7CVel1LnSYW8JJRZVFMWmMQJZUoW-tAcFclf0ylVqY,1077 +torch/include/ATen/ops/min_meta_dispatch.h,sha256=0HbzU5-UhAgkLA2z8SoxdY9Uqfn9hHPCuMTAkPb40Js,1094 +torch/include/ATen/ops/min_native.h,sha256=cs9YfvQ2aiTbgmraCyOtaCpUtQLw867FzNpMfo04bwc,1498 +torch/include/ATen/ops/min_ops.h,sha256=mUO06dj96YHhGsZ8iDSw1JEnC6VY3Eqk6kAio8_XdTg,6299 +torch/include/ATen/ops/minimum.h,sha256=P6Uaj8NRrv3S8uQG74p2IuVAXsUFWYetCBL1WP8udRI,1155 +torch/include/ATen/ops/minimum_compositeexplicitautogradnonfunctional_dispatch.h,sha256=YViqkkU8aBEPsqtU_CIBn-IDUAhDTq46ZjK-zYOUfaU,817 +torch/include/ATen/ops/minimum_cpu_dispatch.h,sha256=YzBtN9UsGh6bYXNsxasIRHfnXj_vLVVGNEktglm5IG8,958 +torch/include/ATen/ops/minimum_cuda_dispatch.h,sha256=wA2E_-db0grMPAZSH-jE9xFzvjMZmIwqYgOTqNZFtYY,960 +torch/include/ATen/ops/minimum_meta.h,sha256=OejxGbIsvUZjIlN0s6sheV6qafOkHEhqHp9TpD1MX78,602 +torch/include/ATen/ops/minimum_meta_dispatch.h,sha256=ON8t_0TuLWdomudwTRnfRJlsj8fmkQE_Cw93feLpaVc,960 +torch/include/ATen/ops/minimum_native.h,sha256=7Fls4BqxVIOuX4_4CT0ghQphfTOZvkFyMWIBt1f2xNw,625 +torch/include/ATen/ops/minimum_ops.h,sha256=lD8cLnHL4tWVKLBZeYvpQObDM5CKT4oEzrJkRH8fe3M,1768 +torch/include/ATen/ops/miopen_batch_norm.h,sha256=pchJ_aGxRaIxGBVnK3iysxW5UBmRn5jrLLCnenYMCH8,2857 +torch/include/ATen/ops/miopen_batch_norm_backward.h,sha256=SAmO0v44ge3eFsF-MNkezsIZSVuhkEVooDp7oeI3zLU,2998 +torch/include/ATen/ops/miopen_batch_norm_backward_compositeexplicitautograd_dispatch.h,sha256=JxlFLR6Rc8BFPvx9BGXEObWIjcnhrg_FBPecnVl_oWM,1601 +torch/include/ATen/ops/miopen_batch_norm_backward_cuda_dispatch.h,sha256=Rlczt86wXFdYbH7wgF50wPzJ8s-mO_qpbcbhjxU22g8,1046 +torch/include/ATen/ops/miopen_batch_norm_backward_native.h,sha256=WbelUkObdxewWqNk-s_aYjT0GUFgvrMiabvawu3-spU,1245 +torch/include/ATen/ops/miopen_batch_norm_backward_ops.h,sha256=c8joUkb4g4YF3xW9cqOTsjNpNqXVLC4GqEqHq86Q-5w,3849 +torch/include/ATen/ops/miopen_batch_norm_compositeexplicitautograd_dispatch.h,sha256=QOW02ZhPLJhOigLkHEovbOFjTy9OxXyfXoDxwj7s-bA,1517 +torch/include/ATen/ops/miopen_batch_norm_cuda_dispatch.h,sha256=kuuLPvx5GxN6ZeIiLUc5unI7_f9sJ7_TdLl_wJNzMJo,1004 +torch/include/ATen/ops/miopen_batch_norm_native.h,sha256=Y_hqRoS-e5gKlIDsaT30zK63WtOyj_q8zbFw71zOPHI,1161 +torch/include/ATen/ops/miopen_batch_norm_ops.h,sha256=xbFOHr-L3xYGBF0O_Ilo5CqiQfstOZLPRePqccAV6C4,3589 +torch/include/ATen/ops/miopen_convolution.h,sha256=6NmYthrAlAFS1kVBIz6QXCs3-pH_2yjxLUruFA99Yxg,7884 +torch/include/ATen/ops/miopen_convolution_add_relu.h,sha256=dS_KE6Dm-hmCX5S2xoa4DnJLorsRprb6oionHLGN3iY,2959 +torch/include/ATen/ops/miopen_convolution_add_relu_cuda_dispatch.h,sha256=WByDFmsltt6dKiDT1VUL2fFksVCrTST_QcwsFpeieTw,1291 +torch/include/ATen/ops/miopen_convolution_add_relu_native.h,sha256=tJBEvcn4RqjsoJTvy4QeIaG-q5fvMm2LmaGmS-2GhPI,722 +torch/include/ATen/ops/miopen_convolution_add_relu_ops.h,sha256=WKcg0dgT0MYfsQi80z5gevTgjOJoTcGO_OVGU0fhYa0,1825 +torch/include/ATen/ops/miopen_convolution_compositeexplicitautograd_dispatch.h,sha256=-4S46dpFOTXpePCHBmzbCCq2wq1ghUGYgoyjE_7JoOM,1902 +torch/include/ATen/ops/miopen_convolution_cuda_dispatch.h,sha256=02O5i3A_if7s_Fv78C3ckYm9QPeVbqOuBJh1U2FF-CA,1215 +torch/include/ATen/ops/miopen_convolution_native.h,sha256=N-YaTw-Gj6pb_f8jPEyyWSZhQqP96oQgwthIMitTqGI,993 +torch/include/ATen/ops/miopen_convolution_ops.h,sha256=DdK5CVjeysXz52fYDYH8zI8xM3EObsCeEL72Uf-Ha7s,3068 +torch/include/ATen/ops/miopen_convolution_relu.h,sha256=zwdt0WI0KaNJBaUx6YUMVPrIHITV-hH7E6LHmuwzYFk,2565 +torch/include/ATen/ops/miopen_convolution_relu_cuda_dispatch.h,sha256=FSik3rtgRJ-Me1H_ANYNVwXpqi1Oyr4mrq1kHoUB7eI,1153 +torch/include/ATen/ops/miopen_convolution_relu_native.h,sha256=j5nsC5O5vEH32lhxESgzWahgfAfL8nGuxBUZqLSFcHQ,653 +torch/include/ATen/ops/miopen_convolution_relu_ops.h,sha256=Igi03_gJtilN2E5LLhsSlVXXzr0HgabfSEsmQbTWe54,1601 +torch/include/ATen/ops/miopen_convolution_transpose.h,sha256=X-3bjoZz-y4BHeR755S2cC6PQioh0VPAuIjvdF7a4Kc,9100 +torch/include/ATen/ops/miopen_convolution_transpose_compositeexplicitautograd_dispatch.h,sha256=LX_c1SgDN7RzNScXmKs-IDy6M1mwuUWzhAhgYetauPg,2078 +torch/include/ATen/ops/miopen_convolution_transpose_cuda_dispatch.h,sha256=U8WL0Lz3w0gCD8nRqVeFgQHNf1YGgyhsKqcnSYXUWiM,1303 +torch/include/ATen/ops/miopen_convolution_transpose_native.h,sha256=kHYCoas2lzAbLossdfLu6umEcsGH745CEY0AlwneG98,1081 +torch/include/ATen/ops/miopen_convolution_transpose_ops.h,sha256=SsYg6Oxshi6OsQQOgXrQ8wsuDnvgiB_dfPPVVd_6A38,3364 +torch/include/ATen/ops/miopen_depthwise_convolution.h,sha256=kSD1CaAWfm0tIupkIWj_58FYXi1g2AsxKE966Bl-UlI,8194 +torch/include/ATen/ops/miopen_depthwise_convolution_compositeexplicitautograd_dispatch.h,sha256=g3ORML6JzFIG_p0SjOUy-NINmoxguxdjw2dxkofV07o,1942 +torch/include/ATen/ops/miopen_depthwise_convolution_cuda_dispatch.h,sha256=fK8uNmj48AMrrDQxIAlZj-ELGxLW7kPjesuSMy-Hij0,1235 +torch/include/ATen/ops/miopen_depthwise_convolution_native.h,sha256=Xy8pgBXADEcQwORxNjn9LB0EuoLQE3ZnI90cZgACUGI,1013 +torch/include/ATen/ops/miopen_depthwise_convolution_ops.h,sha256=E3dawVU-qcSnKqk9R7WvYkEsIQ64ikC8K8X4VZ56HBE,3128 +torch/include/ATen/ops/miopen_rnn.h,sha256=rCpDHizs1dIMA5dYw-_4Yxf0yeT_Y5SOwrn13dynrm0,3687 +torch/include/ATen/ops/miopen_rnn_backward.h,sha256=o63T62VcU8cpDiZP4PGv0qRO3xBHvYmtCmnnCUAS71Q,4809 +torch/include/ATen/ops/miopen_rnn_backward_compositeexplicitautograd_dispatch.h,sha256=DoZX3-aSQa8CHtsoESjLPpYzPO6_irXA_RjSqHmYFaU,2139 +torch/include/ATen/ops/miopen_rnn_backward_cuda_dispatch.h,sha256=FCIJgaXJX-iBcR0tmAHnlsDW03YtbB6mu1JeXOioifo,1368 +torch/include/ATen/ops/miopen_rnn_backward_native.h,sha256=5ljTWIVLmxgA30vyxnlV8_uUTkN9cp-U7Pw27Fxlh-I,1836 +torch/include/ATen/ops/miopen_rnn_backward_ops.h,sha256=eCYGedYsSrtWIx6wdRLZJVr6Uqz60kh4g1SEDVK_L8Q,5790 +torch/include/ATen/ops/miopen_rnn_compositeexplicitautograd_dispatch.h,sha256=AZkOJx4e9F-gnHlwUMX0DjjD52gF-dDixuhXd6J3pQo,1785 +torch/include/ATen/ops/miopen_rnn_cuda_dispatch.h,sha256=gNYIOz6OzeJ7Bk3s8TYVEfGPIiOj8Rbovomg_y3A8lE,1096 +torch/include/ATen/ops/miopen_rnn_native.h,sha256=Z0rMe492I-098oz3cYTwlC7KSPYWCwdZjhDRj18pZco,1387 +torch/include/ATen/ops/miopen_rnn_ops.h,sha256=bhenzqNihiClePY8tQjYXLy-Hhg5-5wz4O9-cLoTJEw,4397 +torch/include/ATen/ops/mish.h,sha256=_hc4KlsyNtmc13wdP7cQJGXf-nw5jZrD4vArLrRCUDo,1120 +torch/include/ATen/ops/mish_backward.h,sha256=7qB0q97nUNXBNq-Rmy-DFnLlVdPSat4YScoOpl5nufE,707 +torch/include/ATen/ops/mish_backward_compositeimplicitautograd_dispatch.h,sha256=sjaTqkPtn8DYRK9BKg9UdQ-Pu9GTC0NgTu0uVNeGBnA,803 +torch/include/ATen/ops/mish_backward_cpu_dispatch.h,sha256=bVODwoS__2M4DEA_Z5QYO7Jdt2SVPfeb3Xfcv1eA5Mc,759 +torch/include/ATen/ops/mish_backward_cuda_dispatch.h,sha256=Uo5MdmBtQwvTo5hwt7vR75_8sb45kGSDq1zwlu8Bchg,761 +torch/include/ATen/ops/mish_backward_native.h,sha256=OoS-oohBYl0O5rEpYrMDylKSMHLyrRZcdFVKxpVJqlU,613 +torch/include/ATen/ops/mish_backward_ops.h,sha256=CiaJGTnakc88W9OcdMhQt1CSCPmzcq7_Q3yn_vUDy9A,1092 +torch/include/ATen/ops/mish_compositeexplicitautogradnonfunctional_dispatch.h,sha256=-ibYHDLqaY7uQmXRTpdWzBAAxQrERiQrEcAjj9nF2RQ,837 +torch/include/ATen/ops/mish_cpu_dispatch.h,sha256=7BfOMpIf3X27mStmRGBSwHo2RA2XvgeaDWlXbq9pM18,920 +torch/include/ATen/ops/mish_cuda_dispatch.h,sha256=69PawBuyzCIlcHgrkYXJ8MJy9u2tX34s-2t4L2GrKqg,922 +torch/include/ATen/ops/mish_meta.h,sha256=-iYnObjp65zmPkBg3wv9GJA41BCwHDWMYC9Gw2WU6uI,573 +torch/include/ATen/ops/mish_meta_dispatch.h,sha256=rRObNDUNIDNKE-iNn6tbPuUQfKlnY6AzeawySBqpyM0,922 +torch/include/ATen/ops/mish_native.h,sha256=sPw2M7xkY951QQ9PGFASRBlEwUSzljEuGBIcM-Qv3h8,590 +torch/include/ATen/ops/mish_ops.h,sha256=SJxR-qNzaH9nyJcPk91k4BoEwm1hZvHvpfUR636guUg,2104 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d.h,sha256=fkTTrQv3F9wBIiY0y-ED-A6dl9kYLvTwz7ieCv86Kw0,1390 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_backward.h,sha256=bXN_3Pic2Mz-xFn9hddJvVGl9QPI1K-rVhe96nVvFXE,1489 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_backward_compositeexplicitautograd_dispatch.h,sha256=5IQtgzY_LduH3bknIOPNrIMWisl0hzHNYMmEqbl0Ez8,989 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_backward_native.h,sha256=xlddIjfcXgM_jSoB3vwYaZNvLjfgO3IJiaL8t5RWA1E,676 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_backward_ops.h,sha256=BbTbGgk3Cnjx6Ag1Cj2ei9qYrQjB-Gc4cELpv598afA,1972 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_native.h,sha256=jC6jVdunXOE59G6FX65WMphKBQE5SSKt1pjHvZdco68,652 +torch/include/ATen/ops/mkldnn_adaptive_avg_pool2d_ops.h,sha256=LhPVQSJ_EE5Fq4T0I5uMdArBLw6z3XvUaWo4ECYN-Ss,1900 +torch/include/ATen/ops/mkldnn_convolution.h,sha256=B950aCjewaSpv0U0A6qcL6k3rsn2JLZxMXacipH99TI,6924 +torch/include/ATen/ops/mkldnn_convolution_compositeexplicitautograd_dispatch.h,sha256=piK087UHd6MJ0P7djf_Xt6iQTqzic0eZCfLK5aGEMNc,2233 +torch/include/ATen/ops/mkldnn_convolution_native.h,sha256=vxhPyUc80_5StZmwBEW3hY7tMEz-anni8UPA9qqXUXU,921 +torch/include/ATen/ops/mkldnn_convolution_ops.h,sha256=y_RDRDSf-O94XAGH9PHNJW9rTmvXPA3Lx7pcs21xvBc,2828 +torch/include/ATen/ops/mkldnn_linear.h,sha256=nInUgCnTcfSJi7yRcBDjI41lpmqglER8JmFrDI6s3vM,1431 +torch/include/ATen/ops/mkldnn_linear_backward.h,sha256=jCKRUSXqORnRQWw5udrQO0asbiaLlp_t4BJi-5At8Aw,2073 +torch/include/ATen/ops/mkldnn_linear_backward_compositeexplicitautograd_dispatch.h,sha256=HshatuKdCbXJUkH0Damx9mGbmpzm8aYW3k3CoUYFlfM,1243 +torch/include/ATen/ops/mkldnn_linear_backward_input.h,sha256=qrddl5pCIT06UMD8FjTt3xqfkiyWALyYlagitg5ajS4,1611 +torch/include/ATen/ops/mkldnn_linear_backward_input_compositeexplicitautograd_dispatch.h,sha256=ainUrq6BsTfIX39oQ8UGaPZlfWaK48biV7tPwkaShrU,1035 +torch/include/ATen/ops/mkldnn_linear_backward_input_native.h,sha256=giYwRjs1rk_86aF1hM40LmNzkd4_UkgUiJ1_i-nzzvw,722 +torch/include/ATen/ops/mkldnn_linear_backward_input_ops.h,sha256=3ifjr6daMbmx8EY3bP5H1mt8Hqbi_lRYSRveAogT9VQ,2124 +torch/include/ATen/ops/mkldnn_linear_backward_native.h,sha256=_XGnOnj4W7-09om4I8AjGVKkbGK88Z5qnyE-8Q0RPEM,887 +torch/include/ATen/ops/mkldnn_linear_backward_ops.h,sha256=4Ld0BUmPNHbhaq2xV6UjrNHLHs1hcWFR4Tct49nT390,2707 +torch/include/ATen/ops/mkldnn_linear_backward_weights.h,sha256=_Z6KcYhuc9kDwPgXdnTJGEHLWSLjfP8v7vjB1rnmhDI,1961 +torch/include/ATen/ops/mkldnn_linear_backward_weights_compositeexplicitautograd_dispatch.h,sha256=QM2v61b9EY6_DmLygIrYv-_Z1TA3vtUND8tGSSPNX7g,1167 +torch/include/ATen/ops/mkldnn_linear_backward_weights_native.h,sha256=SCpC121gl3Yg55DucbawpYrIW7Bz6jMbt5RC6YHiRC0,832 +torch/include/ATen/ops/mkldnn_linear_backward_weights_ops.h,sha256=c8pLIlW5bY3erY2oJGyDHKGQca6bzIAlnc2i_aem_Ms,2504 +torch/include/ATen/ops/mkldnn_linear_compositeexplicitautograd_dispatch.h,sha256=lovBTNun6J6izRmXS05Zi98LB9_iwUTC9G7vG-E4OC0,1022 +torch/include/ATen/ops/mkldnn_linear_native.h,sha256=HH-x21wC2n5XJrO1tcPd37At_zpjnuR6zMGyb9rP8mY,709 +torch/include/ATen/ops/mkldnn_linear_ops.h,sha256=ic9hxwdqEBpeo2zD3rRzT6ODSzDD3RpcVl4lba3daJw,2090 +torch/include/ATen/ops/mkldnn_max_pool2d.h,sha256=C9ykw7F5c1lWtOXzuu1xoKt66nDdf6mf7_rOVgyJ6vM,1944 +torch/include/ATen/ops/mkldnn_max_pool2d_backward.h,sha256=S0mzTLyDyM-CNr7iV6YRPZsy7Yw1bvMcJOnz3gjgYAg,2388 +torch/include/ATen/ops/mkldnn_max_pool2d_backward_compositeexplicitautograd_dispatch.h,sha256=W-GHd_uDPgGgJb61UDO_Nuqhz3Lb7r6bZW3FB1nin5k,1280 +torch/include/ATen/ops/mkldnn_max_pool2d_backward_native.h,sha256=FZLYLFKa1CqRqOEzIY3p_7H_RkXETz3HgcG_ugM33Lk,967 +torch/include/ATen/ops/mkldnn_max_pool2d_backward_ops.h,sha256=ftIlILpJzN8Fiy0iY7LcB_Zqt8zsujm8vaAaviKpyB4,2924 +torch/include/ATen/ops/mkldnn_max_pool2d_compositeexplicitautograd_dispatch.h,sha256=x-gDfK5YQ3KscofGu6LGMvKk14n3qVtujJBva6zMxtA,1142 +torch/include/ATen/ops/mkldnn_max_pool2d_native.h,sha256=ZCKBajm4SJgEbgEGQ2Zd-eJDJMCZHriyPcGCV2NXjTk,829 +torch/include/ATen/ops/mkldnn_max_pool2d_ops.h,sha256=X1Lwu3roEeGxWERJkzZ-0QiIxW0CCaEIv0qdx1nkqcU,2478 +torch/include/ATen/ops/mkldnn_max_pool3d.h,sha256=gTgNjU5fcKqhXF_7dubF1ivle3YgrgR7Ka-9arI2eZY,1944 +torch/include/ATen/ops/mkldnn_max_pool3d_backward.h,sha256=4PajbXyRBnqBn0h1Z1X6yN9yCLUbHFgvIctLcdmQGmc,2388 +torch/include/ATen/ops/mkldnn_max_pool3d_backward_compositeexplicitautograd_dispatch.h,sha256=NOEZKZcef2UHL6jClsc_pGcwyO2riRVc85F2CRRvwmM,1280 +torch/include/ATen/ops/mkldnn_max_pool3d_backward_native.h,sha256=8R2kRx2BrI57XghQRtEyMchdPpP5ENrLLyJ0FTtpGl4,967 +torch/include/ATen/ops/mkldnn_max_pool3d_backward_ops.h,sha256=-lDEFPmslwX3RswE3c6kGkIbvzYac0fLY-ywsH9Jkn0,2924 +torch/include/ATen/ops/mkldnn_max_pool3d_compositeexplicitautograd_dispatch.h,sha256=vIDea-OnUwC_IdUQMpnlBLJn-rILWfhT3pD8ObMkaQw,1142 +torch/include/ATen/ops/mkldnn_max_pool3d_native.h,sha256=D5irLdTDOlCiIMfvuD5xQFVxqfZXo-G9UOxpnqef7gc,829 +torch/include/ATen/ops/mkldnn_max_pool3d_ops.h,sha256=LojQwe1_TkuAaBMLYhD9aPYYl9n6-i8UAVrLKHmhjTo,2478 +torch/include/ATen/ops/mkldnn_reorder_conv2d_weight.h,sha256=-4NS_r9klnZsS0o8pRMDeAkwAQkfIxyhHedJv3Lmcrw,7772 +torch/include/ATen/ops/mkldnn_reorder_conv2d_weight_compositeexplicitautograd_dispatch.h,sha256=fgyhpMgWFHPAMhx6gaAz02pPpt7cl9tuylBjZFKKcEs,1757 +torch/include/ATen/ops/mkldnn_reorder_conv2d_weight_native.h,sha256=_cVR5vFLqcJ0zSrk9kIWet9M_z94YZ4lPe-Mxg-ZvOo,901 +torch/include/ATen/ops/mkldnn_reorder_conv2d_weight_ops.h,sha256=kA3pqTcN2W_V4I5n4TeSdmSdmWTgP5FAEnzxtsf3ecY,2728 +torch/include/ATen/ops/mkldnn_reorder_conv3d_weight.h,sha256=2YmT7HfdFbEUvcU2ErbB3Y5b7nksiam_jmlfZGsl_44,7772 +torch/include/ATen/ops/mkldnn_reorder_conv3d_weight_compositeexplicitautograd_dispatch.h,sha256=1R4plubq8sLg-asv5Io_EWN4ujIO-uuBo4s1QVrjM3Y,1757 +torch/include/ATen/ops/mkldnn_reorder_conv3d_weight_native.h,sha256=G0KvlTt5e3uq1e9DGzgNZC33l_auBF0qRWc3BNLURAE,901 +torch/include/ATen/ops/mkldnn_reorder_conv3d_weight_ops.h,sha256=a_AMsntNoG5WVKVXrtKtkIDsLxjbdtqp7lmdms-dxhg,2728 +torch/include/ATen/ops/mkldnn_rnn_layer.h,sha256=xcUUmD3NhDsNj0ptekf1_7xqtkPNAN2dRUP7hIrb52o,3732 +torch/include/ATen/ops/mkldnn_rnn_layer_backward.h,sha256=pOrP78ytvCF_8unfQNv0qimYCRWmDunX7BDayGJ32n8,5556 +torch/include/ATen/ops/mkldnn_rnn_layer_backward_compositeexplicitautograd_dispatch.h,sha256=Zu4XQF3m3DvK0R9cXwXodiY7v60EiZxatzC5IlY1OyE,2465 +torch/include/ATen/ops/mkldnn_rnn_layer_backward_cpu_dispatch.h,sha256=UwF_W-oM6nMCFphzm_6fzRJvw0OnTmKeSTyMIwrHxiY,1392 +torch/include/ATen/ops/mkldnn_rnn_layer_backward_native.h,sha256=YHccmQd8ZOxc568gvwvw8E7y9LfMf8ZB4n11EXKbhZU,2025 +torch/include/ATen/ops/mkldnn_rnn_layer_backward_ops.h,sha256=89I_z7JUJcc0G8_0-JOKbuE7a-BZWsLobybNT0uQhL4,6517 +torch/include/ATen/ops/mkldnn_rnn_layer_compositeexplicitautograd_dispatch.h,sha256=9hMvweCvF-PAgR_aQ75FP-yAhd6xz7llMQZBjpGBkUg,1761 +torch/include/ATen/ops/mkldnn_rnn_layer_cpu_dispatch.h,sha256=EcOtf3uG9B90tGTX9B2nlGegs2YcfWx6XOYcuOaaHfg,1103 +torch/include/ATen/ops/mkldnn_rnn_layer_native.h,sha256=-BZMyuW-TpvCxn8sUEozUF5CxVzk-K9U1OtjHsYMh6s,1384 +torch/include/ATen/ops/mkldnn_rnn_layer_ops.h,sha256=jr5Hb6gxc_UWwoYzEwWY8miUZ3PqImbq1euvHz7R3QQ,4380 +torch/include/ATen/ops/mm.h,sha256=uabnpWeYonQ8q-fJZCM_cm0NWKSnzYoOVEas1aktksA,1096 +torch/include/ATen/ops/mm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ReNeYLeW2Oo6wzkTDXZ4WYv2FThtaA1Lh1D3--tv8DY,811 +torch/include/ATen/ops/mm_cpu_dispatch.h,sha256=5PdBe8AGnCEzTroRbrpiF7CZ-W8_9SA9Y-0XcQaCA2E,940 +torch/include/ATen/ops/mm_cuda_dispatch.h,sha256=X4dZ27WGvRRncGVUsFNOB6GPpXRRZG6BCm_ZCtZtJmM,942 +torch/include/ATen/ops/mm_meta.h,sha256=-O6RtRv1J7HAadw12132vRAul9bUnS3Tp3bM4wV4S04,596 +torch/include/ATen/ops/mm_meta_dispatch.h,sha256=jYXxC7gP-lJ6pE0E6clafVELaW7DDAbWShDhX_l7hsk,942 +torch/include/ATen/ops/mm_native.h,sha256=PN6y8-O8Lczva2LpEEugPDXInydyFimec_amTZZn3WU,1164 +torch/include/ATen/ops/mm_ops.h,sha256=srs1mZNgRBHVV9cmYIvAfozeGYYKORZyhF6-T_ZJPnU,1732 +torch/include/ATen/ops/mode.h,sha256=JdnArilUjjLKTjNGnS9QzY2E6cUqIcVApbwzCpkB79o,2598 +torch/include/ATen/ops/mode_compositeexplicitautograd_dispatch.h,sha256=yVjw9RBIyefmqmqCh4IwYpVi-ZDfiWk1VhlndK54bm8,1030 +torch/include/ATen/ops/mode_compositeimplicitautograd_dispatch.h,sha256=2cjpJiDxzaczkC3bcy-FxnFkC-rp__vLee7UyN4BAY0,1149 +torch/include/ATen/ops/mode_cpu_dispatch.h,sha256=LXChVP5tLSI6szhpdOuxZ0SkCf9o2Hvu-brMglWFvIA,779 +torch/include/ATen/ops/mode_cuda_dispatch.h,sha256=kVBHurRgXiDpsiIHkEE3vzPMd1dTBH3YJgw2pqCHPsE,781 +torch/include/ATen/ops/mode_native.h,sha256=u6YFmXD_fLisSLiryqryPaK0NUdtWtZbgXS-RGo5b88,963 +torch/include/ATen/ops/mode_ops.h,sha256=LKWYW2ElfwHMAUuNppEqYi9Jqx9Xrc2wsI89WT3-LYU,3802 +torch/include/ATen/ops/moveaxis.h,sha256=EVdT5rsIJjnGGcg_87f5Uy73wFiZqqCQos9t06thBF0,993 +torch/include/ATen/ops/moveaxis_compositeimplicitautograd_dispatch.h,sha256=63FIf4CY-v2SiJFepjN4hutOjXSIXSkyinG5xgS37U8,912 +torch/include/ATen/ops/moveaxis_native.h,sha256=75kgu40C_b7fQDr1k8AX2m3-fRJI2bikdKLvobwcOCw,624 +torch/include/ATen/ops/moveaxis_ops.h,sha256=SRVhVvlvH5GkF5cM8-OCCUS0xNTses5yaTjb6XxgRGk,1852 +torch/include/ATen/ops/movedim.h,sha256=szzEmX1dxu--mxidgDhME0Twb9r1nC5_xMTT9lepPaA,986 +torch/include/ATen/ops/movedim_compositeimplicitautograd_dispatch.h,sha256=wB-0aaLseyYN1YOEQCyKxxsx2pJJ1UbPqDm2Jcb4z08,910 +torch/include/ATen/ops/movedim_native.h,sha256=K607vG305fCJuKIz9PmhgDLX3XxnsXBkSknVhEU8fgQ,622 +torch/include/ATen/ops/movedim_ops.h,sha256=O5BpI4KFlJfMsU0Cjxw49pE5Qv9dZGXRXaXMQwSe_uI,1846 +torch/include/ATen/ops/mps_convolution_backward.h,sha256=-ck1FvNXrZuPAUOEXaFAWOurC-PLhMjPDUSBl4pDHUc,8960 +torch/include/ATen/ops/mps_convolution_backward_compositeexplicitautograd_dispatch.h,sha256=Ll7yey86REXlxLuN7b7LmdCESa3p4zstiVMjtCQoNPs,2194 +torch/include/ATen/ops/mps_convolution_backward_native.h,sha256=Sqs1r0LE0hYYMHiSAV01C9mt_M7S_80o4qu3cB_5Q-k,804 +torch/include/ATen/ops/mps_convolution_backward_ops.h,sha256=H4EUHaQCFcZGP7fwOsOL0vXP19-3YIgTcaGU4ywIxIk,3437 +torch/include/ATen/ops/mps_convolution_transpose_backward.h,sha256=dtehR3gLbZknB5boc4X0mGDMXjVHzOmtLxu1wAH35vU,9696 +torch/include/ATen/ops/mps_convolution_transpose_backward_compositeexplicitautograd_dispatch.h,sha256=6kAymuyRJtQ6yai2CYT_r2udh2LSas9MTt0d_5WrhyU,2242 +torch/include/ATen/ops/mps_convolution_transpose_backward_native.h,sha256=TarCja-zmFPKKvTr3JNjqks9k2MVYMoTvAjjuN1vfMQ,818 +torch/include/ATen/ops/mps_convolution_transpose_backward_ops.h,sha256=XS9wqMIg3yA-784TKbZfoRYIV9uLSNxcsDLd1w13XeA,3572 +torch/include/ATen/ops/mse_loss.h,sha256=s9b1n9MamKtU3_WD0rwxPK7n8B2IvmXYTT-d7e00m6o,1364 +torch/include/ATen/ops/mse_loss_backward.h,sha256=tAfgGQU3t3bRREjSAa1L6ob4b_GxI7TkFMs9NAwAGC4,1664 +torch/include/ATen/ops/mse_loss_backward_cpu_dispatch.h,sha256=prsBkiByBVplwQwpPL6rcwkuHJQHzFumxoqWe86wMc0,1158 +torch/include/ATen/ops/mse_loss_backward_cuda_dispatch.h,sha256=k260nCwJOwDhqtC2PHfTluBlqWSBKxqaZlKYZJgGi60,1160 +torch/include/ATen/ops/mse_loss_backward_native.h,sha256=SGmGq_W7a4Kn6Joao4OAk_vjLCe7zXDUS8TqEaFiISk,739 +torch/include/ATen/ops/mse_loss_backward_ops.h,sha256=SlQZDPEX5VzryZk49GGx0iUXtdmwirk4PmaHUoARWBg,2208 +torch/include/ATen/ops/mse_loss_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wzoVe5Ba0jGdHzrYtqkpLp8aK3DJDDJqAKMtmzPLsAQ,858 +torch/include/ATen/ops/mse_loss_cpu_dispatch.h,sha256=6ZVxCEppchCoCnWGbfhNpNQwIKv8_WEDpyJCCC3fo04,1061 +torch/include/ATen/ops/mse_loss_cuda_dispatch.h,sha256=ZP4HTD0MG8FfGl5OWy4IvOHhrMqswu_wusCfAr46I0M,1063 +torch/include/ATen/ops/mse_loss_meta.h,sha256=93NwqAUs-mdvp_gslLQLoIv3RBsthoV4KaED3GNjlis,623 +torch/include/ATen/ops/mse_loss_meta_dispatch.h,sha256=wH6I1oRvOpI6RSnxkIYkyHoJ_yKFXX6x0KkFS-xjm2w,1063 +torch/include/ATen/ops/mse_loss_native.h,sha256=u8QBXgjIrITIUuZi081t3XwJGuZPp2gbJd5-PvmAi9E,648 +torch/include/ATen/ops/mse_loss_ops.h,sha256=fQdczsZdhJvPgw1dAQ5lfKr3md9A8x9oMNqFWBM_Dqo,1914 +torch/include/ATen/ops/msort.h,sha256=u0T5VK268bNPt0T3J2MrHfSjzfPSf5h-llWh6RpjPJs,994 +torch/include/ATen/ops/msort_compositeimplicitautograd_dispatch.h,sha256=9Az46MGiuCu0enLjDsr61JyiCbNcoXskzCDxdmUzFPQ,918 +torch/include/ATen/ops/msort_native.h,sha256=ZOWRFwc2-XguWwPz6zRWuGZTgDHv6NHNZfMDIARQQpg,552 +torch/include/ATen/ops/msort_ops.h,sha256=wOoWbvh4-M_Q_Gy08oyCwaHat7q5m2kEC3WsvaEtc9Q,1584 +torch/include/ATen/ops/mul.h,sha256=oAI8WGL6saw10OlHSQv1yNpezDOvg2CKx_kwX9MvpIs,1810 +torch/include/ATen/ops/mul_compositeexplicitautograd_dispatch.h,sha256=jEUDiEx5YLIIzB2n7F1C1JR2DKu7kyBX5uWh544STWE,1064 +torch/include/ATen/ops/mul_compositeexplicitautogradnonfunctional_dispatch.h,sha256=4Cxk7Qbusyzq3Hj-wvG6lPHkKlk3JaLhvl-wUboT-6Q,887 +torch/include/ATen/ops/mul_cpu_dispatch.h,sha256=PubA9Dmbd9biBychi74OR_taVAhP4Q3D4ZDxDunh6rM,1020 +torch/include/ATen/ops/mul_cuda_dispatch.h,sha256=pT_BMvqChXy5ih3nf4V4aCxMoanv2ZYdM6maN7r329I,1022 +torch/include/ATen/ops/mul_meta.h,sha256=98LudzEDsVRCVKA9wpXAjhSSl4Zr4bze7mtRCFrAq2M,605 +torch/include/ATen/ops/mul_meta_dispatch.h,sha256=uCHg0P3BoO1Mf07_NsrUU6c4DNnPnqubU3drOWzbFmA,1022 +torch/include/ATen/ops/mul_native.h,sha256=61rocPkSV9h-vAdp72q5zCKvpvKF0M_ChGxVkpIgvww,2484 +torch/include/ATen/ops/mul_ops.h,sha256=e8IgaZ1oeV65OcMsll93oPzMVupN_SvADoWy-BjUYFw,4373 +torch/include/ATen/ops/multi_margin_loss.h,sha256=52aD948VL48_TciVO4P3Gd7LHiAHhzPGl0riP_VDaGI,1954 +torch/include/ATen/ops/multi_margin_loss_backward.h,sha256=1KrrkrQTLcFwiajS4OCExBX5M43YWoCeHbVuJL2DDVI,2289 +torch/include/ATen/ops/multi_margin_loss_backward_cpu_dispatch.h,sha256=D4hzTedMU_p7kBUWseq_-AyvTEJKQtLON0WQjk2BZ-g,1510 +torch/include/ATen/ops/multi_margin_loss_backward_cuda_dispatch.h,sha256=uU06661kXvpnsKC-nW7f2W7_ORSrGSvRtrSdBOWPu54,1512 +torch/include/ATen/ops/multi_margin_loss_backward_native.h,sha256=uneg8VlE0CHICvf1SZYlauWGUpb0gPXBSk4QnTBbuYM,1528 +torch/include/ATen/ops/multi_margin_loss_backward_ops.h,sha256=iYCs-lzshQ3OWmR6R1zsU_XiBblJKOaKgZRWQ5TRrGQ,2890 +torch/include/ATen/ops/multi_margin_loss_cpu_dispatch.h,sha256=8vuVp8lt6jZPRGs7hbdKkT4bw5iJNgzBmMLIvDGqiEM,1381 +torch/include/ATen/ops/multi_margin_loss_cuda_dispatch.h,sha256=TiESz-gnu-pXt_8srOmrP3_UHWMwwzFlbESgCDG6PJY,1383 +torch/include/ATen/ops/multi_margin_loss_native.h,sha256=aZg4CpWNPQa3oS-E-epdaZzKE3nqoakwqU02ER1CKzw,1358 +torch/include/ATen/ops/multi_margin_loss_ops.h,sha256=nd-XrNDLXV_uxIA5JIKDgaPC1FxloVS7t5nurxYNzvc,2594 +torch/include/ATen/ops/multilabel_margin_loss.h,sha256=xKIK9qGfaI6XTMzVDn98zybK0aHtma3W5_VoLSxN6G0,1504 +torch/include/ATen/ops/multilabel_margin_loss_backward.h,sha256=4jzJyQ1Rtww2BVjtxdEgQoXS5WM6FUzfr1BBZrAtzOw,1981 +torch/include/ATen/ops/multilabel_margin_loss_backward_cpu_dispatch.h,sha256=tHGtNMwj4_hMZc0iYfI7pKc4V6v-lC6d-0PYQFpDDZ0,1290 +torch/include/ATen/ops/multilabel_margin_loss_backward_cuda_dispatch.h,sha256=ztcKAbtClLZTFmf40Wi17o-_h9tf_QBy4YqAZwMMJXQ,1292 +torch/include/ATen/ops/multilabel_margin_loss_backward_native.h,sha256=HV34WBrXOYWB448q-y-opHDfJzm7CoRNm_IMEqfCzrY,1250 +torch/include/ATen/ops/multilabel_margin_loss_backward_ops.h,sha256=B4zb_qlzKgCZ0kCSn_0fH6D9JeQoDO51M12VAOPLJ1w,2488 +torch/include/ATen/ops/multilabel_margin_loss_compositeimplicitautograd_dispatch.h,sha256=78u8wCES5KDdQni7yL6CIWml732XLYsgeNZg2IMJSKE,1147 +torch/include/ATen/ops/multilabel_margin_loss_forward.h,sha256=lx5Ea2yakyB9ndblOreF7Ae-Uzg38GiKYA8lxhBb-VM,1807 +torch/include/ATen/ops/multilabel_margin_loss_forward_cpu_dispatch.h,sha256=IpM-D4rJyMjV8vt8vfaBPyFlimLBp2m15_nPOkyZXnQ,1220 +torch/include/ATen/ops/multilabel_margin_loss_forward_cuda_dispatch.h,sha256=bkrgCtg7hYcRFM2agOfQYADEZhNcLVHNTPfGsakARDs,1222 +torch/include/ATen/ops/multilabel_margin_loss_forward_native.h,sha256=2P38XAc-SCFtqPLsmmKAUXAV2xR7tOiD4Ng3F7xGTYg,1142 +torch/include/ATen/ops/multilabel_margin_loss_forward_ops.h,sha256=WVwNb5emXeoLlciYv3_PO4FQScE0urrR1kzaDUrT3pc,2335 +torch/include/ATen/ops/multilabel_margin_loss_native.h,sha256=ikXiNt3UCrUk8tvHg2wtmk1gSd_9R0rnwxdMuHq23Jg,698 +torch/include/ATen/ops/multilabel_margin_loss_ops.h,sha256=MOgt5N9h_Ee1r6fJ2Sg0jDdDxDdWkXsktGXcdHdcRXo,1998 +torch/include/ATen/ops/multinomial.h,sha256=hTyqZRx0YCd54zJK_SZ3ZasTvg4Ll6MvbaAkg9RN6Uo,1657 +torch/include/ATen/ops/multinomial_cpu_dispatch.h,sha256=ik0cox1asfL1OESuXNB_mh8LbXhMI6qA7F8vqTfXmVk,1177 +torch/include/ATen/ops/multinomial_cuda_dispatch.h,sha256=3N07AgY_-zIcfbQbq62c_nDRB5WFdLowZiKHEOXnqL4,1179 +torch/include/ATen/ops/multinomial_native.h,sha256=cyw_Mm01brAZ7f7U7rkTgjEShFeNyVMzKkZS5S-TU_s,747 +torch/include/ATen/ops/multinomial_ops.h,sha256=QXLTkQZeqRWoD4yh6ed4TZ0-QIwPWU4HCdapJb0qRfM,2177 +torch/include/ATen/ops/multiply.h,sha256=AzRfVfcH7bRBBmqZC3wYUAMIPregAjrJMFSPt-Zly_4,1381 +torch/include/ATen/ops/multiply_compositeimplicitautograd_dispatch.h,sha256=JVc8j_zycpTTwTEds50BsZk3ARMCKnO18xq-tEkdv54,1245 +torch/include/ATen/ops/multiply_native.h,sha256=bsrxDs3fj2MO43HiiY1r0AB5-O818qjuBPcr0RPtG-o,850 +torch/include/ATen/ops/multiply_ops.h,sha256=VPBhsE70bMMKo0rDVs3M5V4vW_B3PI7DA5AKUnVOm8Q,3727 +torch/include/ATen/ops/mv.h,sha256=ZUVcA-FEsEpPHsQqzdkg3AuAcQ7L8vdFoXL8Fv_FQbI,1087 +torch/include/ATen/ops/mv_compositeexplicitautograd_dispatch.h,sha256=lZCLFGCvxrwdhMImb-9WFpCfholt8zwix1JHc7p0zpM,981 +torch/include/ATen/ops/mv_native.h,sha256=5RV50Poh9yxJ29KzZtObih-RJTT-tUoTxNgkc8FdApc,675 +torch/include/ATen/ops/mv_ops.h,sha256=hQx3m0t08zLcaCbrEk6oUYpsVY-r0-G1gxwd9rR5244,1726 +torch/include/ATen/ops/mvlgamma.h,sha256=Wo9vSyIkDi1_Xv8l0KnTX9cUrykiS8Dir_rqlJg7vU4,1087 +torch/include/ATen/ops/mvlgamma_compositeexplicitautograd_dispatch.h,sha256=JfMNo-T0tAdZIlt_AK8532RSvdJJGCepN6YkG2d0x5M,841 +torch/include/ATen/ops/mvlgamma_cpu_dispatch.h,sha256=U5q_MQLMvwJRvK2Ri2oEtfv4kN_ov_y97lZJpsnzeGM,849 +torch/include/ATen/ops/mvlgamma_cuda_dispatch.h,sha256=WYb2MIECUwJWCGovItVl7YXftKqjv9t6dJO0_FlVO50,851 +torch/include/ATen/ops/mvlgamma_native.h,sha256=HPbnWWOAWlsXxDRi342ZJvFAiJ1rWynlCEoQRP-10SM,644 +torch/include/ATen/ops/mvlgamma_ops.h,sha256=LWS38cGI49Sl2FninwLjUXay-LOuN9RzZU25DJMWS60,2254 +torch/include/ATen/ops/nan_to_num.h,sha256=rhmKl2Vs6lPrgHF5_5GsubiwziLHnBlH84wIeTEuYfk,2017 +torch/include/ATen/ops/nan_to_num_compositeexplicitautograd_dispatch.h,sha256=qRnI0waPAW2bdK9ORwqlvbuFtFsmUsllFi9lojo0opk,1099 +torch/include/ATen/ops/nan_to_num_cpu_dispatch.h,sha256=WEmxvQzCzYLv0JwlKsDSotUe4yRXZD5qCpSyKJsyAdo,1062 +torch/include/ATen/ops/nan_to_num_cuda_dispatch.h,sha256=DQvGmtTbZhliEt2htxmzLujaQPB94uFrSYxEt4_0ygA,1064 +torch/include/ATen/ops/nan_to_num_native.h,sha256=JFUbouGH5kzN8GqaxskZiOmIFg-HpHT6ogI9CU8-JvI,1571 +torch/include/ATen/ops/nan_to_num_ops.h,sha256=5H8WkUx5xL0kUfo3Ml03GahjMjCoXZQPnOcS75kuSLQ,3112 +torch/include/ATen/ops/nanmean.h,sha256=4LIM16lPb3rCypv7N-QGQROKP-NIxlJtE__Yj4NTkek,1584 +torch/include/ATen/ops/nanmean_compositeimplicitautograd_dispatch.h,sha256=04JYzJ8PWiHknDx6jHgh6T2Ty5WV3XWat-6Mnu6awrg,1242 +torch/include/ATen/ops/nanmean_native.h,sha256=SrxJt0foAtuU7bWzmdCbJ96Z_nc0UI0hDNDJOX6wMTg,756 +torch/include/ATen/ops/nanmean_ops.h,sha256=BOMv6omsNdypm1D9gjjfJSjf8uoLlRYQj_Xg2riwHaY,2179 +torch/include/ATen/ops/nanmedian.h,sha256=xeMzKtpIjyFn8fH_LoInA_4YDRyBjwK3FaAUj3iqDtw,3278 +torch/include/ATen/ops/nanmedian_compositeexplicitautograd_dispatch.h,sha256=iIxLfSVV3FxrCFhK7FTSjyloybUN700orv8HItOEahE,988 +torch/include/ATen/ops/nanmedian_compositeimplicitautograd_dispatch.h,sha256=8Nh-81uGIeKupoirioYUEH5S_nhKBNsg7M5fW4gCOAU,1164 +torch/include/ATen/ops/nanmedian_cpu_dispatch.h,sha256=vctDDPRonNKcbe-RGaJdAXaLXgT1SVMUZbnQqFEswRc,1050 +torch/include/ATen/ops/nanmedian_cuda_dispatch.h,sha256=5JLcRz9B0xyLsNGWQ5xz19kPG3XqYS8Hh7pXNK5dZ-g,1052 +torch/include/ATen/ops/nanmedian_native.h,sha256=tXESTHMpYOUgyIcWRxCKE6WgfkqMmplWU6LGTqt9ojI,1353 +torch/include/ATen/ops/nanmedian_ops.h,sha256=jDVyWJLG6uFFzRlG647ZjHUuO-nNaL2hEJ2r3WfcEvs,5074 +torch/include/ATen/ops/nanquantile.h,sha256=5RTyaZeLfT1AWUtPrZURE9ae1Y9biH38gjIMEwprQQY,2971 +torch/include/ATen/ops/nanquantile_compositeimplicitautograd_dispatch.h,sha256=bkAXVMQhD5eyhizwkif64Yj2P14qDFb_a0uqIyzsscE,1834 +torch/include/ATen/ops/nanquantile_native.h,sha256=cCHnWGxkPJmGFmlQ2w2as02iUqr9ZV4jo2dZfiSXDl8,1134 +torch/include/ATen/ops/nanquantile_ops.h,sha256=av9-DLOPJyEhyp5trNUjs1YpMNPPi_Wal6iTBYFH9io,4135 +torch/include/ATen/ops/nansum.h,sha256=G2OQn4WC8RR2nVUtu7HHYGDD8pO16gHT3XPhGxkHWRE,1574 +torch/include/ATen/ops/nansum_cpu_dispatch.h,sha256=SVSAfgp_ZW9QRsF9qba6bg4iGZDf6RW-gQjHKVdQdcE,1195 +torch/include/ATen/ops/nansum_cuda_dispatch.h,sha256=jZ2aklnXDePzOxP7A_8QpkolPJAq1ZR3ffppN92Airo,1197 +torch/include/ATen/ops/nansum_native.h,sha256=FRKCUYHTDZLWZGvB8RuReB7Z_QsLpGdtwp3b5bgZAOk,754 +torch/include/ATen/ops/nansum_ops.h,sha256=AgRmnGVTA4AXs05wTxWl05mJjpWK9UzpXlPV-4glXXU,2173 +torch/include/ATen/ops/narrow.h,sha256=QKzwKWoxFAc-_lgmOKvGgejiwAA7KQYz601yCqfaazs,2635 +torch/include/ATen/ops/narrow_compositeimplicitautograd_dispatch.h,sha256=oyEHgOR4sIWBFraHk2R3t_-9LN5NDaWKPrvv2n1MKs0,1150 +torch/include/ATen/ops/narrow_copy.h,sha256=e3CN9Pjl8-OjcNwuSRYixmzTdxEGr22BoE9EtszfNho,4091 +torch/include/ATen/ops/narrow_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WD9km1plJ9m4iDKP7zllGw1kkhGoi-LvksHuHsin80s,957 +torch/include/ATen/ops/narrow_copy_cpu_dispatch.h,sha256=aqWzrVAs2JtXkKTv_2XcpCfdu_emEKYGz-4tNVZK5aU,1427 +torch/include/ATen/ops/narrow_copy_native.h,sha256=i-wr0MPyn9357GtyZhcW3qCxHXzDnZA7GF117eLFBVc,906 +torch/include/ATen/ops/narrow_copy_ops.h,sha256=1k6vOgk6bD7_QhmgdQziCCPWe4CDjqG8_gBbNpypcak,1974 +torch/include/ATen/ops/narrow_native.h,sha256=W8OllgEnpJzpjevj5qoJ4wnZK8ViJfvJ2aOe38F0b4c,782 +torch/include/ATen/ops/narrow_ops.h,sha256=vCERx00f6FfuVPZDDDK7cDWGj3QtkYf_RNudQSlcW_0,1907 +torch/include/ATen/ops/native_batch_norm.h,sha256=YaWY9MwwynKQcZSC1D9QT1S6yzUQI1FejVsUIVksMpE,2779 +torch/include/ATen/ops/native_batch_norm_backward.h,sha256=_gvDlmnEcgVkz3mA65zcF9QdqTieG_rio5tcVBOF3Pg,3313 +torch/include/ATen/ops/native_batch_norm_backward_compositeexplicitautograd_dispatch.h,sha256=42oFXxwUu-kBJ7cbhaobVxtu75bduf1QUlP7PrqOaVE,1719 +torch/include/ATen/ops/native_batch_norm_backward_cpu_dispatch.h,sha256=v_D1ZtMiTAyb5imNTgl9bA4_TlDWQbEDVtWZr8QQv6c,1103 +torch/include/ATen/ops/native_batch_norm_backward_cuda_dispatch.h,sha256=3NtZvdoJ5JbcKG2IZIyWCjxokt7DHjoL9yPEingr7hg,1105 +torch/include/ATen/ops/native_batch_norm_backward_native.h,sha256=OzikzF844h1kwhJ1UCBM4x5G9MCej_CY5N9hKrK_a2E,2232 +torch/include/ATen/ops/native_batch_norm_backward_ops.h,sha256=PK5sh7ZMudKbIDn3xGdNGz_PoFDa6iuasDdJAfQOp6w,4235 +torch/include/ATen/ops/native_batch_norm_cpu_dispatch.h,sha256=e8-Xy26Rr8zJoo99L_DVfeO3BHYDZ_aBnjpRhrm_1wI,1816 +torch/include/ATen/ops/native_batch_norm_cuda_dispatch.h,sha256=wMYs2QM3mdRTTtygh1PGNL0LDMKy3fjvg-_QwhbTDLQ,1818 +torch/include/ATen/ops/native_batch_norm_native.h,sha256=Bp50N_e6XLCnzClgCNGVLSphgmEPa7lgnXXHZgrPNqo,2223 +torch/include/ATen/ops/native_batch_norm_ops.h,sha256=tzH7mgw0kntc55e14b2q2YWvkQcU1f_yDCXV3oALUns,3594 +torch/include/ATen/ops/native_channel_shuffle.h,sha256=BW-h07xhz3Kl3NWUPDFUWTKZNnIuRYqP8VCeGtpQLEg,1468 +torch/include/ATen/ops/native_channel_shuffle_compositeimplicitautograd_dispatch.h,sha256=EgPaeX8pigfObD21AIgI9rq7IDkJUv6WwUDBKAc9jkU,893 +torch/include/ATen/ops/native_channel_shuffle_cpu_dispatch.h,sha256=ckCrqMRnuAkwJBAZ84zT3YwRxbT9f57rj-nL0Zh8d84,849 +torch/include/ATen/ops/native_channel_shuffle_native.h,sha256=eF39fOvuFi54Dz7V40jQ09KpSazjkvAT4oJ-CXFuSfQ,589 +torch/include/ATen/ops/native_channel_shuffle_ops.h,sha256=UHUquz5B_u-8FIO86QJLcXYJtixi7w52aDolEozIr-I,1083 +torch/include/ATen/ops/native_dropout.h,sha256=JOyBKCNIqQMANNAV2T37VXUZX9i2krc8mm8s8EsBJgw,1513 +torch/include/ATen/ops/native_dropout_backward.h,sha256=pW3CNZaJj0TxEsV2LTCszkhMLAPQhUEZSzfmyJ9XQeI,1471 +torch/include/ATen/ops/native_dropout_backward_compositeexplicitautograd_dispatch.h,sha256=U7WG2d3WTPdD_N8R0MTlRKtxzRbvGDzB8sL21JqHo78,993 +torch/include/ATen/ops/native_dropout_backward_cpu_dispatch.h,sha256=1bORFhHM3PR9-gzAYkolIhplknpTrNFVzax0CauxA6k,783 +torch/include/ATen/ops/native_dropout_backward_cuda_dispatch.h,sha256=5rlBnKFiOAEU8mGr4Rw3f0Ao9Ort-GtWlnjpj0O06W8,785 +torch/include/ATen/ops/native_dropout_backward_native.h,sha256=IjRvSUGzIjSNNaAhRq4XvVQpjsXRay0ZVtfRWRd1zak,802 +torch/include/ATen/ops/native_dropout_backward_ops.h,sha256=pckW5clCMbnqf3oqGzloY2tbRC_AqG0und88ERLdMfQ,1998 +torch/include/ATen/ops/native_dropout_compositeexplicitautograd_dispatch.h,sha256=r8mgD659kCKU0CtpPCHT4WZroimdndXrIqtVMyZOkbs,1057 +torch/include/ATen/ops/native_dropout_cpu_dispatch.h,sha256=J1t24v-8w8dvEu4pV9iOB-4RoPCySqWbFyqzfD5ilKk,793 +torch/include/ATen/ops/native_dropout_cuda_dispatch.h,sha256=DpRPEEbKAofktJkwzN2J2QCiD5Af1InDfYeCScpgj3Y,795 +torch/include/ATen/ops/native_dropout_native.h,sha256=OfAo8pMMTQaT7ts0wrpefiunlzJ7fk-iQt2myXw3gLI,992 +torch/include/ATen/ops/native_dropout_ops.h,sha256=aPmxBj2lRdcNyDRvbqs8yGASeXp9-0R0zZH6ulo9yb8,2158 +torch/include/ATen/ops/native_group_norm.h,sha256=FnJfxNqRbgGy_K_No_cdUCu8OuBP05tFnBZuUjAi1RM,7219 +torch/include/ATen/ops/native_group_norm_backward.h,sha256=hm3CNMSaL7qk7BUP-FGIkjTdceesCKsIVp6tIVpvlR0,8728 +torch/include/ATen/ops/native_group_norm_backward_compositeexplicitautograd_dispatch.h,sha256=nqzu8K6VcV76s_yBsCDsu71nDUCwD_vWV84GxDsHRgU,2290 +torch/include/ATen/ops/native_group_norm_backward_cpu_dispatch.h,sha256=9mxBLoL-qZ9rUwTbq3AlJGm_xWpLkaZWn7OiopFhhAY,1321 +torch/include/ATen/ops/native_group_norm_backward_cuda_dispatch.h,sha256=MMBHbLHY8UcUkIC4nz9URzC7xWnyOa7fl04eGWDDig8,1323 +torch/include/ATen/ops/native_group_norm_backward_native.h,sha256=WUbuD65QkmhyiEsp35PZnn-V1pi9JcIQObLeEjcAEoU,1144 +torch/include/ATen/ops/native_group_norm_backward_ops.h,sha256=zn7QETKOA4clKYCVe8QwowsDfNR9bP6NQEYgCxpdiuQ,3585 +torch/include/ATen/ops/native_group_norm_compositeexplicitautograd_dispatch.h,sha256=aZYvlIJKnIvFvQvG3Rys29fPQy4Raa5wUEycyHrL0VU,2537 +torch/include/ATen/ops/native_group_norm_cpu_dispatch.h,sha256=bjPQD0fL41trWIbDUhuWtUvtfKZC64RGh_K-w9YJyS4,1185 +torch/include/ATen/ops/native_group_norm_cuda_dispatch.h,sha256=A5FwbDVUfMmvw2DA6bbFJoW3r6zeIB_DAxm8rvRm_Os,1187 +torch/include/ATen/ops/native_group_norm_native.h,sha256=HT_P5twrfM1iy4s9bBEDV9t-Qpjcqxz5uyw0GXKZdrw,1256 +torch/include/ATen/ops/native_group_norm_ops.h,sha256=aaZTC_kcFsM4TbmNHCJv8FB_NbBVfIp1LeQ9T5ltaQo,3143 +torch/include/ATen/ops/native_layer_norm.h,sha256=ua3HRqRDYX9FQ9m6Z7FuIXpQv9pYwLcumfUyn552VoQ,7039 +torch/include/ATen/ops/native_layer_norm_backward.h,sha256=7dXkj0bm7Sv9K-HLSrYJqMWiV0fYdaRuZnsA824zwXc,9208 +torch/include/ATen/ops/native_layer_norm_backward_compositeexplicitautograd_dispatch.h,sha256=I2r5ZdyaG2Zi7rvNOMUtCmcwFJVMnVXlXMovTKoJ0L4,2378 +torch/include/ATen/ops/native_layer_norm_backward_cpu_dispatch.h,sha256=_QfSUxGGVDe7zSG4gORp85X4oJxGRqeVLqAPGYRIF-U,1365 +torch/include/ATen/ops/native_layer_norm_backward_cuda_dispatch.h,sha256=Cd4QeMXRt6_XNiO1AKvY7dPyUYxkUUOoK_RR06iDn5w,1367 +torch/include/ATen/ops/native_layer_norm_backward_native.h,sha256=3HzipvWLKA56Po-vqO7wgc_xCQT-IX9uDJGlCc5Easw,1871 +torch/include/ATen/ops/native_layer_norm_backward_ops.h,sha256=Ox5VAkO1i8Up2ZAeMP-SgBCnybjKQ43A3OYWsZIzyO8,3673 +torch/include/ATen/ops/native_layer_norm_compositeexplicitautograd_dispatch.h,sha256=QZRBGWVDv2rf9DFooL3xqMEjiXPZLiiCbZtStWVptH0,2417 +torch/include/ATen/ops/native_layer_norm_cpu_dispatch.h,sha256=Zf-n_svH1weG8aJJNATHHEZBN5FRkjUvVil2-E7uOuE,1145 +torch/include/ATen/ops/native_layer_norm_cuda_dispatch.h,sha256=dr0btFvVGwjv2f5XIcegaZqeuFCxpkjVhbNl-oLr36c,1147 +torch/include/ATen/ops/native_layer_norm_native.h,sha256=U1RhBD4CcRW66KepP9DWEmBYJJZl_jlidydxbr-vAVE,1670 +torch/include/ATen/ops/native_layer_norm_ops.h,sha256=RiMb8NBDTBowSvwQ3chu0XlwfcokvP-3KAQxoELkkig,2961 +torch/include/ATen/ops/native_norm.h,sha256=i0Y6uqdMNOMTqzrWc2KU2390MBzyT2DjLZmdXDajfBM,2438 +torch/include/ATen/ops/native_norm_compositeexplicitautograd_dispatch.h,sha256=aqZLrU5I0-wzOTHeZLfIFlwUZoZj-HtqnJakjZduDxU,1316 +torch/include/ATen/ops/native_norm_native.h,sha256=KulsAvvLka9lhEx0jMfTq73EKbC8_fBtSqPhla5hSjk,998 +torch/include/ATen/ops/native_norm_ops.h,sha256=KmiGTt4CtHwAQlnaxcqzf_tGND3Zx-iovLVssFsI_Yo,3827 +torch/include/ATen/ops/ne.h,sha256=38KBxGzJKpEwVzrjOahG091_ON6d3GKNwRVWhxSJLVY,1819 +torch/include/ATen/ops/ne_compositeexplicitautogradnonfunctional_dispatch.h,sha256=0gw8iMctAjcy-Z2Hpb8eo75Imq53Z2GsmVOZPr-MxMw,1034 +torch/include/ATen/ops/ne_cpu_dispatch.h,sha256=wEEAkM1FWpUvaz-2utdvzOAnm3PnxBdiDy-w2om4mcM,1366 +torch/include/ATen/ops/ne_cuda_dispatch.h,sha256=BuZttwvEAMY1AsP0EBhQkXrIAVYH6qg3P8mZlygb_54,1368 +torch/include/ATen/ops/ne_meta.h,sha256=uiiS8DImrdUKy8mcwuDytSVwjOHaGDogcSHrzF2rbWc,751 +torch/include/ATen/ops/ne_meta_dispatch.h,sha256=vSzkq-l9bjmE5jw-u4khf8OCCah1cgqc_kbXKer1jgk,1368 +torch/include/ATen/ops/ne_native.h,sha256=Y0qvS-fK55WtXMGZ3_LwcyKZJ267KUTTy3yACY1nRVw,1205 +torch/include/ATen/ops/ne_ops.h,sha256=5YSGYg4oDEihLkmaYRarv99DiE_RRWJuFSSKJnz8YgQ,4376 +torch/include/ATen/ops/neg.h,sha256=ebMAz5mJC5FTBH59m_xcGVLNLmYPRgbbJYxRtLbdFmE,1107 +torch/include/ATen/ops/neg_compositeexplicitautogradnonfunctional_dispatch.h,sha256=8fv7s5dGUs5sM86rIZTZGxgn7oHZU349Mjo2GeU0eTU,835 +torch/include/ATen/ops/neg_cpu_dispatch.h,sha256=7euY_fTbQvFB3hm2rbv6Kn2OVEqlrHIa3QRYsPJiydI,916 +torch/include/ATen/ops/neg_cuda_dispatch.h,sha256=-KZliKkCLs-5szk1vYJ-idRfUsS7Wlku3MNcCka20ss,918 +torch/include/ATen/ops/neg_meta.h,sha256=OQ--QNoywwa2uPpjO5FibmMd5WcwjOawDOWZl1w6Fm4,572 +torch/include/ATen/ops/neg_meta_dispatch.h,sha256=VgpbxF9crPHGqcTksaTWQ2LO67i9nsHusQRU9HglkvI,918 +torch/include/ATen/ops/neg_native.h,sha256=FtW7ddS6vea8INZOEekFprjgo_pY5mrb6mswJUmA8zw,1114 +torch/include/ATen/ops/neg_ops.h,sha256=1QVUSd3Pn893aqoKeka7kKF63Y6YHZ7_ROrrM245q1c,2095 +torch/include/ATen/ops/negative.h,sha256=HEJ7mp-1TeRj-Tow0xoAwEZOWwq3AVU7dZFmB1vluwc,1172 +torch/include/ATen/ops/negative_compositeimplicitautograd_dispatch.h,sha256=op-PvxB7IRQOY89z8hgzpEvBJt3HwmIwPOd9bEEtroI,980 +torch/include/ATen/ops/negative_native.h,sha256=PcjvbokvaMLSsrS526TW7-18Fpv1WDWQEGkYlFaV2TM,611 +torch/include/ATen/ops/negative_ops.h,sha256=qbjYNYiSeo5UO_2ABMcNGdWrsBk5BBtAj98Cnd_zK6c,2140 +torch/include/ATen/ops/nested_to_padded_tensor.h,sha256=AIGI57wwD64AK58AcVSkM7LH84t5MSQkyK7m4mcweJs,812 +torch/include/ATen/ops/nested_to_padded_tensor_compositeimplicitautograd_dispatch.h,sha256=6moNWBp6TyA5XkQGyx3md2g4RYuNi7qrh7Y-Oa_Oero,849 +torch/include/ATen/ops/nested_to_padded_tensor_native.h,sha256=5DHZtN-kteCW6v_6A420nuP5eHxYt69a1m1Vt0JOQ1U,561 +torch/include/ATen/ops/nested_to_padded_tensor_ops.h,sha256=27rDY0mvy4gU9uZljB-YYQQaepawoazNn7mXFiN_9wU,1197 +torch/include/ATen/ops/new_empty.h,sha256=ALsEU69SfQyxmdcULWHRvjezXCgtrkbOITT3zFUvfrY,4352 +torch/include/ATen/ops/new_empty_compositeexplicitautograd_dispatch.h,sha256=TADXCha39FmaxQMON8wzlGAhCbhEJYak0QB6Sf5ehoc,1834 +torch/include/ATen/ops/new_empty_native.h,sha256=IC9s5PtTO8dvTjNPxBfGI4hgBfIb6-Ondk1Tdt-X-ZU,783 +torch/include/ATen/ops/new_empty_ops.h,sha256=rsHOLjXIE-SbrQVoxyv773rD6FN4eKLHuDq-jdcmByw,2280 +torch/include/ATen/ops/new_empty_strided.h,sha256=mlljpIH8A3wUwdSs2UdAejR1jhRyOBDaHYecZb9Yig0,5216 +torch/include/ATen/ops/new_empty_strided_compositeexplicitautograd_dispatch.h,sha256=7gyzd5EWa-1GeI9YpgmpoldiMEepkEIHYDrNHVjJ-f4,1282 +torch/include/ATen/ops/new_empty_strided_compositeexplicitautogradnonfunctional_dispatch.h,sha256=i9MjY0gZlrR4Jylt_pRMZ1bz-f5iXPRqMgyDbXaa2Jc,1560 +torch/include/ATen/ops/new_empty_strided_native.h,sha256=UeofYJipbxTy8iQruskvoNkm4_EwyrLmf9YuR4jMznU,855 +torch/include/ATen/ops/new_empty_strided_ops.h,sha256=v0DztbYHH1wIlVG6iDBrBwMw5IfrlZQNXmirE0giv2s,2516 +torch/include/ATen/ops/new_full.h,sha256=5Bze5Oi0NpEtMzcx52xGXaVmyifPnMlLyO1W_E9TvMg,4915 +torch/include/ATen/ops/new_full_compositeexplicitautograd_dispatch.h,sha256=nSzir5AGScE9p-_o4THWkEJdi3vlyaf23rLaDwjI3XE,2074 +torch/include/ATen/ops/new_full_native.h,sha256=oncOcqst0nVhMwIB7OdL_oz5pxBTjLEV31YSTAMfAvE,832 +torch/include/ATen/ops/new_full_ops.h,sha256=DXcufsbMFtHgGAMmUaVdSKV6gWu2sSl1fNB9-w3CT1Q,2476 +torch/include/ATen/ops/new_ones.h,sha256=mp-bIpBiLrXkUIpCD2p4NmFdKVIVte1_Fys8zmRYn4U,4323 +torch/include/ATen/ops/new_ones_compositeexplicitautograd_dispatch.h,sha256=Nf-FJ3K6dyK4crObgMCA1BcWNRFlMy1PoRLC4wyNew8,1826 +torch/include/ATen/ops/new_ones_native.h,sha256=T1p7R2cg_kT6AkRYZ6v6LerHDqdy2udKgQz590xPm9A,770 +torch/include/ATen/ops/new_ones_ops.h,sha256=LP-GxhKbrdMgA9k0fhVcW7K8_Z8AzbLop6Aqa6zajVw,2274 +torch/include/ATen/ops/new_zeros.h,sha256=M-WoB_9EN-vft4MxGlsm69LnHmz6AymPlXpNKK6mswk,4352 +torch/include/ATen/ops/new_zeros_compositeexplicitautograd_dispatch.h,sha256=UmRw1md59ZR00A3Tna8yD5EKFS7WsyJmOM8VrRbqtng,1834 +torch/include/ATen/ops/new_zeros_native.h,sha256=HT3nTqcJjfbUtEtPTfJq4OgS6njBdiHk5bqMyyOAXlE,772 +torch/include/ATen/ops/new_zeros_ops.h,sha256=h-BS7xOEw__Uyz1I1tyUb90mVngAT2jsW6xgzxBYocw,2280 +torch/include/ATen/ops/nextafter.h,sha256=7YG5G2pP0JQcLx_ruzOITdZprgRGbO8Xr3OMqHY8TFY,1175 +torch/include/ATen/ops/nextafter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=vYK-Z3NDwBdNmqQX2vSsLBYvoEUs2XpRPveolkEZ9zA,899 +torch/include/ATen/ops/nextafter_cpu_dispatch.h,sha256=LYp7ltIrRJEYMPKE4dN3jbWCdjlbTM_aPbAnUJLpNkY,1044 +torch/include/ATen/ops/nextafter_cuda_dispatch.h,sha256=Lxj1Z4o1WtYT5iVDG6-b24VrV6WA51-NZ3BKSPLakhY,1046 +torch/include/ATen/ops/nextafter_meta.h,sha256=jUVUckgtrFMBqZYegiIGRZWiUzUqb_HYK6hEwdNGEKc,604 +torch/include/ATen/ops/nextafter_meta_dispatch.h,sha256=F9PSZdrbwfFI0LlwM3o8PVIPd9HDsZ136TQ8SOSQe14,1046 +torch/include/ATen/ops/nextafter_native.h,sha256=g-3q8XGPjaaLA3cM40SGI9gFjqwpZgbD0ILU2Btva2I,631 +torch/include/ATen/ops/nextafter_ops.h,sha256=Kt_ZbcIpQsNnCIr_OGVunSjIr0y2H9QlGswmFqjE79I,2407 +torch/include/ATen/ops/nll_loss.h,sha256=qJle7VOeyAP9dpDNs5-y_nPli9VAOwDlTzJwp23MyqI,5530 +torch/include/ATen/ops/nll_loss2d.h,sha256=pZXExmGOu90JQpGJORdCEU_nUNZe6rIgJwBYYc49eYA,5592 +torch/include/ATen/ops/nll_loss2d_backward.h,sha256=dTDV0GnTaCwFNgafuvNXZIWD8bgbIPJ--jq9qkRKCv4,7131 +torch/include/ATen/ops/nll_loss2d_backward_cpu_dispatch.h,sha256=9LDEQzQDY6AamKXGEBefTvbCKRVer-juBOiZ3a0pmpM,2289 +torch/include/ATen/ops/nll_loss2d_backward_cuda_dispatch.h,sha256=PAWKMeZzxOCPkLv3PrR5L4EO0O0spcEAuNs1Ij_WPRk,2291 +torch/include/ATen/ops/nll_loss2d_backward_native.h,sha256=BSEtRSsF1odTBo-41v5XuX_Fg9mWVHsPMGbkSKLsPbs,1478 +torch/include/ATen/ops/nll_loss2d_backward_ops.h,sha256=W4FiYzF6LldLIEZ0wmDc4l29AIL7pMtOl5UDP3OdxKQ,2888 +torch/include/ATen/ops/nll_loss2d_compositeimplicitautograd_dispatch.h,sha256=RCbvXOotnKwjCx-_C-9iwXPQjHeY6VaOewnAE294Ymk,1973 +torch/include/ATen/ops/nll_loss2d_forward.h,sha256=BQCX2Rv3G8A1hJ8B341hG9ORowI7KvHZ6okxJOUewPY,6482 +torch/include/ATen/ops/nll_loss2d_forward_cpu_dispatch.h,sha256=8y-NCw8Nj7lGlBC8bIghB1tJdCn3Z8YQOsbJKPgIn2g,2143 +torch/include/ATen/ops/nll_loss2d_forward_cuda_dispatch.h,sha256=3hJzjJLba2Z0l6H6Rpdc6FRFgm7h-u79LEFT21zAd_w,2145 +torch/include/ATen/ops/nll_loss2d_forward_native.h,sha256=OdiRnX5QoxtXG6MrmsHVxCKWKljrOuqaxVQxdDkpq_Y,1364 +torch/include/ATen/ops/nll_loss2d_forward_ops.h,sha256=X7llkzlIS3MmopqZhK7Pgy0O_bBJuLewsM-p-1c1sOc,2729 +torch/include/ATen/ops/nll_loss2d_native.h,sha256=CDp2PqKQqyrsfI4W_DlkrpmX7UE1yk8MkaVe0NUvr3o,825 +torch/include/ATen/ops/nll_loss2d_ops.h,sha256=gXP_PM83cVcP6AZIXnfQRScixYBfWht816BtyIXYJKk,2400 +torch/include/ATen/ops/nll_loss_backward.h,sha256=jw-67wkVNC4gMPAEuoyrxISPRwe9BQrdTbmdnwTFKes,7069 +torch/include/ATen/ops/nll_loss_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=3-UjUtwNaOFPvWwVnL_qJCuKkytye8ZJq4QRUkJehLg,1231 +torch/include/ATen/ops/nll_loss_backward_cpu_dispatch.h,sha256=YKWWLlIZjVl2YTu7HnZuAp3_Mzcylt1Pj3JKoRMDqJQ,2277 +torch/include/ATen/ops/nll_loss_backward_cuda_dispatch.h,sha256=7Kvl0VW9V0Kb_W9wyn4vxmp58YniqL0LtpXpNL_TI_w,2279 +torch/include/ATen/ops/nll_loss_backward_meta.h,sha256=eupTKoByCI7jzyxWE2tETjmPb9d1epxkd73_558zAlQ,749 +torch/include/ATen/ops/nll_loss_backward_meta_dispatch.h,sha256=Qm3b0sOpxzjMDl1Z2SvujXQVSod8ZRgkCOXVUDamRhA,2279 +torch/include/ATen/ops/nll_loss_backward_native.h,sha256=4q1FfnrLyTWhvVaNFmzUazuvMlIwNtu6cv8iJbkUmJs,1141 +torch/include/ATen/ops/nll_loss_backward_ops.h,sha256=Q3D_5pAIigIznrbLsX5haVHx0myCauSCBCqd9a4-kUo,2876 +torch/include/ATen/ops/nll_loss_compositeimplicitautograd_dispatch.h,sha256=EKOAtEtvIyVhnhQFGhBf5ubz0XOlXj7ZPPgXX9rffPY,1961 +torch/include/ATen/ops/nll_loss_forward.h,sha256=pXj8tXiTBCzrNJzcXjAwhJL_pPP03QudXaWeNt3_meg,6420 +torch/include/ATen/ops/nll_loss_forward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=3gJVYghyXXevttYbI1NNT8FDP3H5f_UAQ0Q1EM7K7n8,1149 +torch/include/ATen/ops/nll_loss_forward_cpu_dispatch.h,sha256=g83Uy2yRipG4mnC9fyE2MWONxm_mSE4hyWr_VAfcWNw,2131 +torch/include/ATen/ops/nll_loss_forward_cuda_dispatch.h,sha256=ZM8NseOZKXl2fTPBUc68AVPwb9-NWCJCWZ9BCSSL7TA,2133 +torch/include/ATen/ops/nll_loss_forward_meta.h,sha256=4Xh3oQ6-DXaHFTvF7IZMjACj3qqFDVO0SygyOfReJXI,683 +torch/include/ATen/ops/nll_loss_forward_meta_dispatch.h,sha256=YzZut38BTN48wxdPMto5b7f4V0bgtHI5RLHiX3b_t7U,2133 +torch/include/ATen/ops/nll_loss_forward_native.h,sha256=C1J55wTZqaRq9cvCe5T60EVysZaJ1yklBz4ctdthQcs,1064 +torch/include/ATen/ops/nll_loss_forward_ops.h,sha256=3Fhi23MhfgDv4rSY_PkVxIdzG9Ei3ITxjbrgeylqqfc,2717 +torch/include/ATen/ops/nll_loss_native.h,sha256=8aN5Zkf8oM1uzb4rO1x3DlvxhVi14hYhYLF3IuqqGsk,821 +torch/include/ATen/ops/nll_loss_nd.h,sha256=Opfy_0X_jZk9Qyg_UTJMmxcAQvS8DbyLQ5ymaeUh4M0,2109 +torch/include/ATen/ops/nll_loss_nd_compositeimplicitautograd_dispatch.h,sha256=9q2xay-GOuc-ZkP9iE0cYeSvO8BuzH7-FNqLwezJPM0,1119 +torch/include/ATen/ops/nll_loss_nd_native.h,sha256=TNJb9QS_C4fF5Os7j3BaxRLkayG1mrcOnZtZ-awXw8I,632 +torch/include/ATen/ops/nll_loss_nd_ops.h,sha256=t6VWFX2F_XOkNyKpXxJAacQzaUyJiYzADQ7cVqKpqrU,1375 +torch/include/ATen/ops/nll_loss_ops.h,sha256=P0Efg7LMks2O0fidAGV-8oOmzM2vD-MqumMvmydA1oo,2388 +torch/include/ATen/ops/nonzero.h,sha256=ShF4OneJ90JZhay7wxefsQ5SW1RaU_ruaX3Gv3ii2yc,1014 +torch/include/ATen/ops/nonzero_cpu_dispatch.h,sha256=JVA9iWUJegnvjJsq2QZC7esCIvmq_8cnSqIKzxowGpE,880 +torch/include/ATen/ops/nonzero_cuda_dispatch.h,sha256=NS-AvF2Ad8-8_7bshKBnpna0l3ear6Ifh1dVV-94w6g,882 +torch/include/ATen/ops/nonzero_native.h,sha256=bTCtCEO1q48jRevJYdoZYei71HFINFqDakpO0WPJXPc,708 +torch/include/ATen/ops/nonzero_numpy.h,sha256=YWegqEVXZdlqX_s-JVl7ArYvDH7pz4w0jYhQES5HN5o,659 +torch/include/ATen/ops/nonzero_numpy_compositeimplicitautograd_dispatch.h,sha256=2M9_IcFfBHE-FLPOGonmDs8s1PaaLDKOkB63BCamvi0,786 +torch/include/ATen/ops/nonzero_numpy_native.h,sha256=IerM7tROFRtXVuGAJrTGkD-M2qabwIXMaM1U7N1JxZM,498 +torch/include/ATen/ops/nonzero_numpy_ops.h,sha256=VjoeHBhzMIde-izNZvrnJjPAPgflSX3ybRVbLrbyo4s,1035 +torch/include/ATen/ops/nonzero_ops.h,sha256=Z9HAVv3WObxN7nFeb31hERrfm2ABveJ9_H-Z-HnSl_I,1596 +torch/include/ATen/ops/nonzero_static.h,sha256=sN7fwCsInDVASds2EX1f_b4-blEw4cXT2J_6UeYFwsE,1336 +torch/include/ATen/ops/nonzero_static_cpu_dispatch.h,sha256=PLbx0ftOcfMXwgwWPUU5pHKPFpuFo7busPF4WmcxvXA,1009 +torch/include/ATen/ops/nonzero_static_native.h,sha256=FN53h68C-WxBN4rSUgibRDaiaIUrrqVTkuZ-V_xbwdE,649 +torch/include/ATen/ops/nonzero_static_ops.h,sha256=I0ikGZjOvSAmdPoriyKmp5KHngRNRbFO0JFv0bxyDkE,1871 +torch/include/ATen/ops/norm.h,sha256=Es1C3Ve6-vKtGXFUrRG8htp_UBo2ZfF3s3uJBAs0zaQ,6239 +torch/include/ATen/ops/norm_compositeexplicitautograd_dispatch.h,sha256=J9QpOhV-MMTqtQ3bYpErP47YI54zTN6uetqYRDD1gCo,1373 +torch/include/ATen/ops/norm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=axfrB--xzxUXGM2lXl4Nc1wVygGRUu7qV9yyC7de-80,1016 +torch/include/ATen/ops/norm_compositeimplicitautograd_dispatch.h,sha256=s_Npi2OBzIVu2FQh8HfRwv0WXjDo6SQ9uEhw2a7tk6w,1642 +torch/include/ATen/ops/norm_cpu_dispatch.h,sha256=oRHvQ2Z0wbeZAUVMs_mSiG_pSvLfTq3EVVaiE4jnd1w,1598 +torch/include/ATen/ops/norm_cuda_dispatch.h,sha256=-0QH5R8yDUDW6MQAywO2ZtxblPoWQ5cRtAazHPmQgQw,1600 +torch/include/ATen/ops/norm_except_dim.h,sha256=spbxDoYtLdd9ZNYTl9HkmMlDC2rVrQqtiyB9UHcHtOY,703 +torch/include/ATen/ops/norm_except_dim_compositeimplicitautograd_dispatch.h,sha256=WFyBku0XyXj6g72Hb-vx6c3ss713V9R94V41HMQH3yw,800 +torch/include/ATen/ops/norm_except_dim_native.h,sha256=5ELhnSatI2hOFW-dwW8eEBwwmkkv3rhn4JaGt-rZA1g,512 +torch/include/ATen/ops/norm_except_dim_ops.h,sha256=rnTXW55jbLYVd7UVV7eUacYKK-6Z2aJDB7h_1L4hDHo,1077 +torch/include/ATen/ops/norm_meta.h,sha256=oYUeYmwhZapAEo-QdZuBInNDayOOYbgYNMM-fF9TvS0,865 +torch/include/ATen/ops/norm_meta_dispatch.h,sha256=tPerIpHfPUgAePF29glvJpS7FW7DBb3RAx_M48sqfiQ,1600 +torch/include/ATen/ops/norm_native.h,sha256=27UTuH50xUe7tbLaNvG46aNMuexGmLMaRL4QuDWVsRY,2258 +torch/include/ATen/ops/norm_ops.h,sha256=v8yFjgTqwi3KelwVzAz2o_CMf5fiLSM-bqV9clwQ6h0,10623 +torch/include/ATen/ops/normal.h,sha256=csBRULBmid5rra4zsHosgFxpDwJ-nHvpXlbU9Q8eRys,11678 +torch/include/ATen/ops/normal_compositeexplicitautograd_dispatch.h,sha256=6tdhh8ylOxipXPELHt3va33ArCWClxNDnx0LiyfuGeA,2667 +torch/include/ATen/ops/normal_cpu_dispatch.h,sha256=fuuqby3n7A6tlry4f_rbmHhiOVkrCz5JDvr4Mi_PaVE,2052 +torch/include/ATen/ops/normal_cuda_dispatch.h,sha256=KuAPqktLKuGNbecRsplUJKjb_7lk43ZIG7Ek0b8K4NQ,2054 +torch/include/ATen/ops/normal_meta_dispatch.h,sha256=v2wWumBdYl_cwWdT_Nb2-vHExk74YiSN_N_Tzd7alM4,2054 +torch/include/ATen/ops/normal_native.h,sha256=oGXjyH2LrPuX33WLHggKgF5r3RkkTYLr6sFxkxBg-PQ,3333 +torch/include/ATen/ops/normal_ops.h,sha256=tyxyUkq0Cn5KvtiKSLUJE2HM5u_WzG4m91D8LuUjtLM,9974 +torch/include/ATen/ops/not_equal.h,sha256=cxFEy0bAs0ptFYb76KOHZ9sRuIPgd-LSzw1PukAdbj0,1952 +torch/include/ATen/ops/not_equal_compositeimplicitautograd_dispatch.h,sha256=Hv7pe_IWW23uaET-h9de1yltqNvplM1rgp8QFiBY7Mk,1466 +torch/include/ATen/ops/not_equal_native.h,sha256=NLcBfqMHl_EOZJyrJq03leJFLAG9j01Xx8fFVVsFToc,962 +torch/include/ATen/ops/not_equal_ops.h,sha256=iFU_wsR6ShtNLgyQdyrh1EJ7fveQfAkhT04Dn6_Dz7E,4502 +torch/include/ATen/ops/nuclear_norm.h,sha256=R5ZuexSTkiApE7qYkBETBWMp5bxFan8WuJwthqIFYDs,2063 +torch/include/ATen/ops/nuclear_norm_compositeimplicitautograd_dispatch.h,sha256=vhRWRYRPERaidyNV-hbowCBZ2HlABPaku9map-l-4Bw,1339 +torch/include/ATen/ops/nuclear_norm_native.h,sha256=GT6ymZ46nK4JSuOJPfbY01tD-k2V1ajInPm_LH62rbA,820 +torch/include/ATen/ops/nuclear_norm_ops.h,sha256=wzB4EX4cQaaEPzGpNSSkGS5-IxRhkVxS-dKC_k9XgpQ,3199 +torch/include/ATen/ops/numpy_T.h,sha256=SMAXLlxWEJf1edN8_tJFNRubc4b3DpWjE6QvacX1eFQ,481 +torch/include/ATen/ops/numpy_T_compositeimplicitautograd_dispatch.h,sha256=OCNdHot_8eP2h9wLwO048woz7aS8pqQo8mcIujot0Ac,765 +torch/include/ATen/ops/numpy_T_native.h,sha256=d7ueS4hLvlkLxtlVJCYUIA4GxLaPH7HS10ZWKj5wp-Y,477 +torch/include/ATen/ops/numpy_T_ops.h,sha256=R2k6SuO6n-6vrijKtlTPyDbeQDUYpcB_Xvw0VEWBp5E,976 +torch/include/ATen/ops/one_hot.h,sha256=XqRgl_62uE4c8SmuDiAGEDitKDBGH9ySJltj6CBjI2U,675 +torch/include/ATen/ops/one_hot_compositeimplicitautograd_dispatch.h,sha256=tVy4fq7qb7U4eeTz-cPl--TBb0Ee4jVUOWQsKswy5dU,789 +torch/include/ATen/ops/one_hot_native.h,sha256=72PXxcr9ENI-1KvJH0C6g5IvttRWEZigPsu-lLKP5P0,501 +torch/include/ATen/ops/one_hot_ops.h,sha256=SvV4c3DrBPavuroVvyMn-ZQ1l35QP3yeisqA84N6rqM,1041 +torch/include/ATen/ops/ones.h,sha256=aadykRYpFCX_gb_HWw9GzMLcAEvYKh1NCRaQtJau2KY,6846 +torch/include/ATen/ops/ones_compositeexplicitautograd_dispatch.h,sha256=AiETvdSXcoJBdtMd8A1xccPq6gZTUQW4_VpaPKlNWr0,2174 +torch/include/ATen/ops/ones_like.h,sha256=ffQZA2nvHo9A52msNxB7fUgdwpu8Sy0t7Xk4EZ_xC7M,2185 +torch/include/ATen/ops/ones_like_compositeexplicitautograd_dispatch.h,sha256=XR0hn113hRkgXqfdxjJiLMqfOufaOAnhxrs80iGHDjE,1388 +torch/include/ATen/ops/ones_like_native.h,sha256=4kfI2eTA6DJGe2df_wOSgWVvZCV8f7YXYXtK2c6fCwM,830 +torch/include/ATen/ops/ones_like_ops.h,sha256=j_pkl8u7SMnzKhOvZf_WuepdvJ15ukgKH-DwjCZXyLM,2438 +torch/include/ATen/ops/ones_native.h,sha256=G2fPkYzOO11iFnDWhHj8IJRJPUaRRJvEnhGFqAjO5Fk,1066 +torch/include/ATen/ops/ones_ops.h,sha256=kjU5vUMdsD88kvbZib6bXz_CJxgjBHJKxDkS1E1LxkY,4003 +torch/include/ATen/ops/or.h,sha256=b_1lQfeRG7ddKMjENm9a6vCnxHjb0qBr7Oq-FWx6z1o,867 +torch/include/ATen/ops/or_compositeimplicitautograd_dispatch.h,sha256=ig4iyemDkwA6VDc2NPJrsRjHRExSL6pSF370YKeHDCA,1024 +torch/include/ATen/ops/or_native.h,sha256=lmRG4Rysu3sdX-WvEgyONBv6zqmEx2nnouz-3RILtGc,736 +torch/include/ATen/ops/or_ops.h,sha256=zsIOHUd0GF5_xiPR9ZbiIX9OZG0va3d7ctuhhiVZ90s,2988 +torch/include/ATen/ops/orgqr.h,sha256=G-d4ZJH9y-JsBvMNc6mssaCJetK1fZNTuGY7sGGfrYk,1144 +torch/include/ATen/ops/orgqr_compositeimplicitautograd_dispatch.h,sha256=oP1VzvT-WgUOZ9C1M2SqhSV68rJ5eMxLAjL7WEq5cBc,999 +torch/include/ATen/ops/orgqr_native.h,sha256=ZCj6T0sHQgr13wYU8aGYSK4Wlwim3PQVZ0SMGFOTN6Y,606 +torch/include/ATen/ops/orgqr_ops.h,sha256=X9XfbELpHNRbFxek5wwyGeho-s6_HEV16VnuC0TKsmc,1762 +torch/include/ATen/ops/ormqr.h,sha256=V1m-MRuzyOmPFmjjn6ezpGAjbOkm2QoQb-Gblv5Z464,1562 +torch/include/ATen/ops/ormqr_cpu_dispatch.h,sha256=g--JcO7fArdX43KXgL9q76WVWPA_8MrPRRI-GYIAP48,1139 +torch/include/ATen/ops/ormqr_cuda_dispatch.h,sha256=lUHWwaDpsKUzgE4sTV9UaHhfOyCmDkr6GYea2rKQKjk,1141 +torch/include/ATen/ops/ormqr_native.h,sha256=_ZTtgfmrxBmjNb-kDcUDJ0LzXPFQsIQWCbbL5M0nIzQ,725 +torch/include/ATen/ops/ormqr_ops.h,sha256=rtogJUcgh8QGLPM_T4hPrdITRS_Zo8LPoeIKDEsZeqs,2148 +torch/include/ATen/ops/outer.h,sha256=Oq2XbQJDZKNeZXBCNYTO5EGM0J3JZ4oOHIvRetOMwTc,1126 +torch/include/ATen/ops/outer_compositeimplicitautograd_dispatch.h,sha256=noHy4UXi4eieN5N9eRpcIwH86Dca3XXx2GXEZHoDD4E,993 +torch/include/ATen/ops/outer_native.h,sha256=tXJb-rMDDaIRb-qqHmH9XpVGBir6GMXeJsmbWglAX3o,602 +torch/include/ATen/ops/outer_ops.h,sha256=Pf-ZUzzcBf0zoRSzbZ1EPKU3AApEJ75dttt0ptV1nW8,1750 +torch/include/ATen/ops/output_nr.h,sha256=OnILE1zLKSce5MaA4bXMwPRO7rsajkI4jqFGIlQ7jH8,483 +torch/include/ATen/ops/output_nr_compositeimplicitautograd_dispatch.h,sha256=ADkZzPrF0NkaDkieqleI9Ul_CZA_0q6DGRRbqC1t9jw,764 +torch/include/ATen/ops/output_nr_native.h,sha256=xastFhleH0TdjVCk5BfLBM5Ae5YtkZlE49CKicx9XOI,476 +torch/include/ATen/ops/output_nr_ops.h,sha256=jUHXlgipu0DilJkM23lB33e82KK84aePBkkx4dJCTmg,964 +torch/include/ATen/ops/pad.h,sha256=T_O-N4iUWL_N47tZTGE7DtPmumxn7Fn7HQW2Y_Si2PI,1769 +torch/include/ATen/ops/pad_compositeimplicitautograd_dispatch.h,sha256=28fqM75cbYejZUJ1AbIZZ0GCqPJyUiyvMrCHR285_fQ,1025 +torch/include/ATen/ops/pad_native.h,sha256=sPPVVj-oC6_QC8F47RASP5DgmYBIm5Tdel6Vfri1tuM,585 +torch/include/ATen/ops/pad_ops.h,sha256=WOwXYqyunWLzqWkBwK-KNTjWU3KeqU5ilQhip1GxYmE,1236 +torch/include/ATen/ops/pad_sequence.h,sha256=u1zpgRMWMwM6w0ElJotqBEW-Sau766c9t68vf-exJT8,857 +torch/include/ATen/ops/pad_sequence_compositeimplicitautograd_dispatch.h,sha256=6jXwNXB-_-lM-S2Etnt46VFWVAmCVW6UH3-8Q7oxqcQ,860 +torch/include/ATen/ops/pad_sequence_native.h,sha256=mz-ENBB2UaFUhuJV6jy1OOrT8Rc2NSoNWANjiiqi7Nc,572 +torch/include/ATen/ops/pad_sequence_ops.h,sha256=BAhG0x-8fcbNzgIdnt5CiQzP_E6jhtMEhhnPhdD4z9k,1241 +torch/include/ATen/ops/pairwise_distance.h,sha256=ly-_EzDELC5mRDilBX1bBbj3xlx5JrhdbBlJAWn4kt0,805 +torch/include/ATen/ops/pairwise_distance_compositeimplicitautograd_dispatch.h,sha256=PJCuj024kpFizJvOWremBwg2xrEckFuZL9p547qPVOM,846 +torch/include/ATen/ops/pairwise_distance_native.h,sha256=IOOl2SrplMyErLUPOTBvS9FhQmrqv1kFp31rA8-x0Ho,558 +torch/include/ATen/ops/pairwise_distance_ops.h,sha256=1QsVVoYM7Rhp5ucjbnDoD8fCd-KEVHljcZDUmoywvvM,1213 +torch/include/ATen/ops/pdist.h,sha256=HdD0mMthnxAFIYGulE2b-8kbvfrPgJ2VpJcf0EkPf98,636 +torch/include/ATen/ops/pdist_compositeimplicitautograd_dispatch.h,sha256=nJWNmO-kEwhIwizjFVtsJ5uwPVp5WPR68hFqDbw9aVQ,775 +torch/include/ATen/ops/pdist_native.h,sha256=_DzeeGRkXI1t_uggiYDoI42_dm1ZWvqBnCnCTUm1W_k,487 +torch/include/ATen/ops/pdist_ops.h,sha256=n244EMPwJxX19XD9lI2fc0Ejblrk5SsZNHfAKeitkik,1003 +torch/include/ATen/ops/permute.h,sha256=0m82iKdTFAFwvOj4a0pNZDa3oPbf-MiwPJ92nhT4g_I,664 +torch/include/ATen/ops/permute_compositeexplicitautograd_dispatch.h,sha256=znIjnggAXqVPwL1-FUsICvhKAqa70EppXBIJqSNI1is,787 +torch/include/ATen/ops/permute_copy.h,sha256=kWhTR9-qtZll2BoiW1ylDWegKo_pJ7Tk2y-pIrqnhc8,1184 +torch/include/ATen/ops/permute_copy_compositeexplicitautograd_dispatch.h,sha256=RpiSHAZlCmZc-aCUlLDmlcAtJhiSlbkm5oX3M-EUVtM,923 +torch/include/ATen/ops/permute_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=HvgN6ExkSCEPfBW-ty9WLMppLX7vJZZgksAl-GEE7LE,818 +torch/include/ATen/ops/permute_copy_native.h,sha256=tBY1QrSN9VKHHobnal2GmL05cBeXNQehle2NI3qFOjo,610 +torch/include/ATen/ops/permute_copy_ops.h,sha256=175qzWRSQBJjq9lOUShCiMu7Q5t6lmnvuBWRw2H7JI4,1772 +torch/include/ATen/ops/permute_native.h,sha256=pCJ6PNU6hVeB_yy4qP8rfvIlrtxNZS4u1bt2lhxCjUU,587 +torch/include/ATen/ops/permute_ops.h,sha256=RPT8qIddeD9EYihvSQF0gMYUqQUDapcQEUT6nnYtkQs,1049 +torch/include/ATen/ops/pin_memory.h,sha256=nyneqhdKF-782kXQz9bsZfUezJw3sVgqA-Z4vJaEw0U,484 +torch/include/ATen/ops/pin_memory_compositeimplicitautograd_dispatch.h,sha256=kocXgLW-JBPhuFlPFQNNT8l5PUfCi7qUJNdd5lfs6xA,819 +torch/include/ATen/ops/pin_memory_native.h,sha256=wjLy25AgvK1AQsd4TVGz1VaQBizwQ_T6Y7ZaPrSEiLI,531 +torch/include/ATen/ops/pin_memory_ops.h,sha256=aE1jvrYxY5EymOJDDYPAneupjkHx6b1Sir2RSM5i9Ks,1107 +torch/include/ATen/ops/pinverse.h,sha256=lwIxlB10264llzm9xgv9iljgy8NckYXe3AbaviHurLw,668 +torch/include/ATen/ops/pinverse_compositeimplicitautograd_dispatch.h,sha256=agiX8Sh2u1RWp5cyWjjoAhOwJRJwjt_VFFCc2dstTk0,786 +torch/include/ATen/ops/pinverse_native.h,sha256=s6rEyXFVKzy8mpfDaVuABdpDKj7y0mAdBE8xUPBL3LY,498 +torch/include/ATen/ops/pinverse_ops.h,sha256=ri0SDzK9T-r5T3wYnc3vwgzT2CeSutSwCf8iKmxL8ns,1028 +torch/include/ATen/ops/pixel_shuffle.h,sha256=BHBREVu_GVraXy3FrAn9ls8HgkUQ0Cu2O4QYOxbueVc,1254 +torch/include/ATen/ops/pixel_shuffle_compositeexplicitautograd_dispatch.h,sha256=T6qQA5lkzWlFqcBeRqBYqqr8W2sGmp8UU8nYQ1-8md8,929 +torch/include/ATen/ops/pixel_shuffle_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2KlZIKqBxNARnD7EqM9_l6NYAEsIKZKdt7XyTteDjHI,821 +torch/include/ATen/ops/pixel_shuffle_cpu_dispatch.h,sha256=VMxgDyVdVzrE6RvjqLTJn6Bd0nF8ZDySg6wnkVa1asY,751 +torch/include/ATen/ops/pixel_shuffle_native.h,sha256=vEQlBiZoQEfavsX14ob1xVYheL67zRzBxVDd3fQ22Mc,710 +torch/include/ATen/ops/pixel_shuffle_ops.h,sha256=Zfyl_b7IYhH8UBFNpo8Fk0qO9Wgqai6dXQFQkMdBDmY,1786 +torch/include/ATen/ops/pixel_unshuffle.h,sha256=bprGsP9MZCiiDxRUcy5k4QVMvw8wxfGNIS5-klEl1ao,1292 +torch/include/ATen/ops/pixel_unshuffle_compositeexplicitautograd_dispatch.h,sha256=8bTZWrKYAEj0Aq0mluWXCi4pywLtREZ9MTT3Olkkqcw,937 +torch/include/ATen/ops/pixel_unshuffle_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AYolLvLlcFkkPYMheQ6wRpA6wSN6NeW81mmMIbpHIW8,825 +torch/include/ATen/ops/pixel_unshuffle_cpu_dispatch.h,sha256=DSsyO3j2gQcQEqhQbIR0gc15Psd_KpyuKtWB8F_4hqo,755 +torch/include/ATen/ops/pixel_unshuffle_native.h,sha256=8F5a7npyuRfmdTegTKlXhG7PaBceuYPFKMujr7Vk7-0,722 +torch/include/ATen/ops/pixel_unshuffle_ops.h,sha256=Tr3h7cJKo-Qi0T9UgJ9OKelGISx4R1Q3xHF-QUFcw9M,1810 +torch/include/ATen/ops/poisson.h,sha256=bLMNTOt9Duc6mTSLwV1aM_ioxtL_Jkj_oCvZkaEaWeA,1284 +torch/include/ATen/ops/poisson_compositeexplicitautograd_dispatch.h,sha256=yd4mV6N70240Pjy_0LXHLv_V7ABzCRoso8g78g17hlM,968 +torch/include/ATen/ops/poisson_cpu_dispatch.h,sha256=PKoQN6hYHJjTbp5BVVqRxm1OQ_VryWPHuNubHzV4xxQ,778 +torch/include/ATen/ops/poisson_cuda_dispatch.h,sha256=l-UPjooxB7EQBj4e_xPnIJaw6i6uwJpKLKWBOqdaCYg,780 +torch/include/ATen/ops/poisson_native.h,sha256=7KU2BMiiMOFQrKT8GjdohvtKidGVT8583QiBiqDNVTI,782 +torch/include/ATen/ops/poisson_nll_loss.h,sha256=QAU5nn-aqeMv-zJXzjqFUr2mN8ejMpbcwzd-2J9-au8,851 +torch/include/ATen/ops/poisson_nll_loss_compositeimplicitautograd_dispatch.h,sha256=ygZ3zCKxpcJtm0WO2GkK1QBV2JMv_tnEXyBzevESOJs,860 +torch/include/ATen/ops/poisson_nll_loss_native.h,sha256=2wNpqUdIZlZF0srVT2cs9lwNDw3YpmDJe10X-sjm5b0,572 +torch/include/ATen/ops/poisson_nll_loss_ops.h,sha256=jLRh-re2KEpptvffH4JJKCbY5XYf1DqcYWNnRbceJEo,1287 +torch/include/ATen/ops/poisson_ops.h,sha256=oBEMv2wupuQHDYQgnvsIniE9_2qys3koIUDr3HTkRls,1882 +torch/include/ATen/ops/polar.h,sha256=_-c6ERy9JgiCmcGYT5-X5tTWPdbVp7DfJe-XUe5x_V0,1126 +torch/include/ATen/ops/polar_compositeexplicitautograd_dispatch.h,sha256=63Ox2FepgsSZGwAawSnrPlaH1nEQTMtBTpAqr2jp9Xs,788 +torch/include/ATen/ops/polar_cpu_dispatch.h,sha256=Hi3FYfTFa594PdJqZ_sCnyvcb2ih16bPTEk_VNjBWqk,871 +torch/include/ATen/ops/polar_cuda_dispatch.h,sha256=9OVYb0efmcOF7CGqsy_FZuyciMVwUAixJxU47bG81E0,873 +torch/include/ATen/ops/polar_native.h,sha256=stOczZN6A7m89gAXRiA8TZGNxoHdZrorQd14YJVWaJU,602 +torch/include/ATen/ops/polar_ops.h,sha256=wIjAEuscdl__duBUitC0YyxuiklbojwWIWWRdhlzWRw,1750 +torch/include/ATen/ops/polygamma.h,sha256=mKFkQ37UsQ6RpLm1Pdyg-i3L0Tqt6l7z5ugCR7VnGjw,1097 +torch/include/ATen/ops/polygamma_compositeexplicitautograd_dispatch.h,sha256=ASsPSw-zfYpT8PQJKUnnvPOEPvfjv0MK1OzTLYIr3ME,775 +torch/include/ATen/ops/polygamma_compositeexplicitautogradnonfunctional_dispatch.h,sha256=n1z83T4C-YPI2lWZLG_piOySec1DdPPhaCu3r9_BO2g,804 +torch/include/ATen/ops/polygamma_cpu_dispatch.h,sha256=LGTvcKphVkyna4BvZbWC-kuBS_iF2-ToySj08uKWr3Q,919 +torch/include/ATen/ops/polygamma_cuda_dispatch.h,sha256=MPsys_kw-m2u9VnJhu-rU4YTkA3eYoIUMNrr29FglYY,921 +torch/include/ATen/ops/polygamma_meta.h,sha256=YwdAj0ijfofK6FyITcFb3je7S2dOUVgHnkKrja4_NEI,589 +torch/include/ATen/ops/polygamma_meta_dispatch.h,sha256=3JVEP9XNDBNhwIH6dLGs47YbN5tkBkudjoqDInPrRxU,921 +torch/include/ATen/ops/polygamma_native.h,sha256=bZueh1wZERV_hUPYLZmAzxCs72MvUAumg8KX6aBICrQ,681 +torch/include/ATen/ops/polygamma_ops.h,sha256=frdUASk6J-ZyZdaiHG8G-EKrCu1-sLBJFiQADWZBR6Y,2263 +torch/include/ATen/ops/positive.h,sha256=AVe84biVBTWi8506VdZRcCy68DYh5nMGZfAB0M4wmb8,628 +torch/include/ATen/ops/positive_compositeimplicitautograd_dispatch.h,sha256=WBfoIVm0ikbK7jTxuwyaIIp6dgjxkHQs7DaueD2gshE,766 +torch/include/ATen/ops/positive_native.h,sha256=fUTOymqNClKnfvgP_BsSPk8X79xw7yG9-UpFN2W91bo,478 +torch/include/ATen/ops/positive_ops.h,sha256=QOYEd5JNnxuGcWGfMijZK8BbP-HuBWIH0FjvOo_cfyQ,979 +torch/include/ATen/ops/pow.h,sha256=WX60qZ_rw3qjrJ8pVxuoHGKvxHXbmgw1SvkxKOvpKfE,2684 +torch/include/ATen/ops/pow_compositeexplicitautogradnonfunctional_dispatch.h,sha256=JPxA_SlRKjv2a00yRoSSTDfQ0ZWgOpga2IgPfZbjhB4,1130 +torch/include/ATen/ops/pow_cpu_dispatch.h,sha256=chd2P9T6nZ32HKiEwa3Q8gdqXNYMuBEs8tehRt_Dh7g,1687 +torch/include/ATen/ops/pow_cuda_dispatch.h,sha256=9wRa2C-ltM77NCSAgchGBl07oBXPnEERms3hj2Mc7Ss,1689 +torch/include/ATen/ops/pow_meta.h,sha256=wlzwScxx4N2ydJM0bsLLM7hm5ZMpSuJUmLAqjGEQSJ8,924 +torch/include/ATen/ops/pow_meta_dispatch.h,sha256=qdZqUESwksiWvJPwZ1DmvRZQuUmZbl059u5bmS16pOI,1689 +torch/include/ATen/ops/pow_native.h,sha256=EOujbAc9obUdaL2pjIMDsdXGmC94wBxRets6MOi2EwU,1226 +torch/include/ATen/ops/pow_ops.h,sha256=3g8V_t_egj5CKHj6K-mFyG68Mu8mvIN0Qop-8rIFPSQ,5901 +torch/include/ATen/ops/prelu.h,sha256=chFRphPXigY6RJO_c3CSOqwcXujo0fU_Ki60FxoUGys,660 +torch/include/ATen/ops/prelu_compositeimplicitautograd_dispatch.h,sha256=DHiEZxoXkNYDxzuO5hTycax7fG_Y533mk3Gos-Vrmbg,790 +torch/include/ATen/ops/prelu_native.h,sha256=-spBJr03EKvj-bumO2BqjCVMGnnCZRIbyUfBe2Pf-d0,502 +torch/include/ATen/ops/prelu_ops.h,sha256=UKGuljOYQo1vi7zXIY42KuaIdfgQuHALEFSm9t_lZGo,1053 +torch/include/ATen/ops/prod.h,sha256=Cst5SRFAk-pN8v5LCs3yk2KdLFCAGby5HJDh5h2Q6eQ,3283 +torch/include/ATen/ops/prod_compositeexplicitautograd_dispatch.h,sha256=QsRfBaTCebS9R2T7sK0sZ0C_zW3p7BkSkuT6Vco8VCE,956 +torch/include/ATen/ops/prod_compositeexplicitautogradnonfunctional_dispatch.h,sha256=uYoRgtpJW7R9VgJt4_TxmSxZL4fk2RpDCUEFxqhP2WM,875 +torch/include/ATen/ops/prod_compositeimplicitautograd_dispatch.h,sha256=p8umbXYvFIhiGysA46hzR3PjKmr5qb5MeCaoJvL-3Gc,1167 +torch/include/ATen/ops/prod_cpu_dispatch.h,sha256=njF3jj27O6aY3RSxHseo2C1FqAGRlSlWD_uXn9Pwvqc,1217 +torch/include/ATen/ops/prod_cuda_dispatch.h,sha256=0WjYcz0eeuh14QM_gKt6H5h0auIO8Yx_CrERlNBhwDI,1219 +torch/include/ATen/ops/prod_meta.h,sha256=dl5TgzqArMcyBznTBy1ehzQ2SmRNJXO6JbIlaZyhMNM,647 +torch/include/ATen/ops/prod_meta_dispatch.h,sha256=ExmlyZLuCh6uc3PghSl2zeXibrm0d6ypM96T0yKFU2c,1113 +torch/include/ATen/ops/prod_native.h,sha256=nwaNe1j_5ketmrtpcVC2HdMNT5nCRJwPIEgp1Kgl9LY,1174 +torch/include/ATen/ops/prod_ops.h,sha256=cZidpeWhsoP_Yt4nBHOXkCDBenth-ujfvkPvcB6qS4M,5203 +torch/include/ATen/ops/promote_types.h,sha256=3ji6mbi_XwwC-wUpPja00V0SjHdiStrwzH9NvKfXF3Q,700 +torch/include/ATen/ops/promote_types_compositeimplicitautograd_dispatch.h,sha256=PXydHUCkUBtxwqqijy44jDt3ON4oHNWn46NmYrto8Vc,794 +torch/include/ATen/ops/promote_types_native.h,sha256=U4z_wHig-qoyu9h0Pa__UtpvGymkDPZMAoj_Kde_LBA,506 +torch/include/ATen/ops/promote_types_ops.h,sha256=xrMb3PGNArw2UWdzt-rJErJibmT1lNNEzfvUNVdSaPI,1077 +torch/include/ATen/ops/put.h,sha256=1FUb11EslbxemiWCV3dzvSoDD6x6a4azanm4AGRoA1Q,1433 +torch/include/ATen/ops/put_compositeexplicitautograd_dispatch.h,sha256=l_DGfdycUKgZmXB8dpRWMDuxdT_D5SbzcRmFYE-hwxw,1134 +torch/include/ATen/ops/put_cpu_dispatch.h,sha256=4w1kldOr0MjApINC2y1DTQKALWKSfnDGX3tRAfhZT7M,790 +torch/include/ATen/ops/put_cuda_dispatch.h,sha256=tO-ABuc3BLcp3EqlSCNDFWFnmnMw4czDZV1opfZlGtQ,792 +torch/include/ATen/ops/put_meta_dispatch.h,sha256=lDUgR5LgSZNlJfh8fguKabnS806cl8m5aMUWhhJcCqc,792 +torch/include/ATen/ops/put_native.h,sha256=yPvq-4hUaGJO86y5xvZxYZ5wmlB3lDQ_43A7Gs71R-s,818 +torch/include/ATen/ops/put_ops.h,sha256=rsuj47IP6n5jj_hiyOOvE0WHBe5cHp7g_QrBSzuAqXU,2809 +torch/include/ATen/ops/q_per_channel_axis.h,sha256=rehh1C5hA5wrOeQps2LkkB1hCoSR7sJLZwg5ZrluOJE,656 +torch/include/ATen/ops/q_per_channel_axis_native.h,sha256=yEuzS_BS0WB4mMT72cNZM0M98ANCo9ljuC6f83a8sjI,485 +torch/include/ATen/ops/q_per_channel_axis_ops.h,sha256=owMMXDT6kKAtDzprj67BLgOu3SAVbzTYEbFDr8atRac,991 +torch/include/ATen/ops/q_per_channel_scales.h,sha256=iVPHI_Ei_gvPjGm7BokwjHLDbveVQeKz4pjcRpGmWOQ,1144 +torch/include/ATen/ops/q_per_channel_scales_compositeexplicitautograd_dispatch.h,sha256=BdNy2NLW_IbfCFGdVTgfHs0DoPyvBCpTG8Y-we51VN0,895 +torch/include/ATen/ops/q_per_channel_scales_native.h,sha256=HqfZk2cC8V8wZVxx453T3ikZ5SMAaDpjMM5MKSoLkIk,582 +torch/include/ATen/ops/q_per_channel_scales_ops.h,sha256=JHJtC7-FGhcKP0OPmc2EUyLYwZH-QlUsTQUUyiN90HQ,1674 +torch/include/ATen/ops/q_per_channel_zero_points.h,sha256=u_3m6wCebqJivezkVMiuYf8otW4IyuINQj6lE6dSBCQ,1194 +torch/include/ATen/ops/q_per_channel_zero_points_compositeexplicitautograd_dispatch.h,sha256=zyI61O2nyKl0qwm8xCWSLW-f43wbuolZhFGWwP8ZEOY,905 +torch/include/ATen/ops/q_per_channel_zero_points_native.h,sha256=NX_Dd0s-FG5tYpLwToO7RVmF9S_payMpCIdegSu5s7o,592 +torch/include/ATen/ops/q_per_channel_zero_points_ops.h,sha256=yo8odfnwtz6Ei7Oy0KsLdTnblWiWvcy2azwINZyWlDE,1704 +torch/include/ATen/ops/q_scale.h,sha256=4-rHXZMSjmP6GJm1eQqfsDCprczFNLzrYgdBjqA9UDk,613 +torch/include/ATen/ops/q_scale_native.h,sha256=KKZ1zPXa4OTSgWDHYDtgwnSzFhbiDVpJUDccoHOrc7U,479 +torch/include/ATen/ops/q_scale_ops.h,sha256=L-sXz8Q00PTN1kJUP673OlU9uoqRa9hHMJ3hRH3Vqxg,957 +torch/include/ATen/ops/q_zero_point.h,sha256=7FHWIXpaXqcJPhVqlbCdFhf74F8dMsulj-gTAS9A-rA,632 +torch/include/ATen/ops/q_zero_point_native.h,sha256=etbL4tAbcSTcHzCFN45y-LKNAyEU5yZmnElE103yb-A,485 +torch/include/ATen/ops/q_zero_point_ops.h,sha256=u9qk8NMb6XABWxBJmPU8ssGaNY7I3xQS3M1GVlHDN_E,973 +torch/include/ATen/ops/qr.h,sha256=A2ZnS4q4RqTcwipSoBItOk_pHXEYAxZvVxRLZFEktbw,1248 +torch/include/ATen/ops/qr_compositeimplicitautograd_dispatch.h,sha256=_HiiHYUpG2P_4m5PgJQoKRjKXlhj5zint8T9Dw-pyDY,1059 +torch/include/ATen/ops/qr_native.h,sha256=kWCmmfjIW4xIEEVy3Zqq1WolPmwV0xbU5vc_MvE09nQ,639 +torch/include/ATen/ops/qr_ops.h,sha256=gfm5ZyMMMkFdDkgFXFXHPDA7Jpjg78FfBE18UBcsxSc,1890 +torch/include/ATen/ops/qscheme.h,sha256=eA9C3dOQpYpkhUK3n4w7J6xExsBL6An_UOVLTUwmBFo,481 +torch/include/ATen/ops/qscheme_native.h,sha256=c0krA00bfqQ7xMohCgn4BHxGS0taeddm_QVtpzgMqVs,484 +torch/include/ATen/ops/qscheme_ops.h,sha256=OI093FcA0gtkJcUSM52gBH3GgP3T9QKGn2kwUR4oIrs,974 +torch/include/ATen/ops/quantile.h,sha256=u-j_7Iy7EhHpFBNQK7VrnxT9hMOlfmMKwu14VK78xPg,2914 +torch/include/ATen/ops/quantile_compositeimplicitautograd_dispatch.h,sha256=ErahUS5ag8nVf-y9Dsjj9amPj7vyzNFpkxNIwBssRvs,1816 +torch/include/ATen/ops/quantile_native.h,sha256=7vuQfoioBRAZwVZG9Z0mrjRaBT96x1rs6Gr4r1NBwLI,1122 +torch/include/ATen/ops/quantile_ops.h,sha256=9vVNKp9Cqb5dGPhViV1Lx7g5xHv8TzQZA4FLkdH8W1g,4099 +torch/include/ATen/ops/quantize_per_channel.h,sha256=N_FiBkJm14GJ5Bk4HZKlunSIt1X6vdVZHwy3yUEF9MU,1720 +torch/include/ATen/ops/quantize_per_channel_compositeexplicitautograd_dispatch.h,sha256=ickXjBuswE9V6JsJxytriK8dIwcmVSB_9xIm4fsYWVo,1085 +torch/include/ATen/ops/quantize_per_channel_cpu_dispatch.h,sha256=NaFKeju7p7cM_QrueHErPKxLyxqDHLEKBAdoB-RxWSU,829 +torch/include/ATen/ops/quantize_per_channel_cuda_dispatch.h,sha256=JIBPdR5Uc1MVf0MfcsdrjtPmQPNe51Yf8gfjGUNP8t4,831 +torch/include/ATen/ops/quantize_per_channel_native.h,sha256=nk_BfJRE9Is4_dcIR3LsEEE5BnG484IDwR-ZuEz8m2c,772 +torch/include/ATen/ops/quantize_per_channel_ops.h,sha256=9qWyZZTYBQgb1wyZzqHFhb46fwzi5pixEbeyHv5YDlU,2310 +torch/include/ATen/ops/quantize_per_tensor.h,sha256=gupbFyodGW6iEhCX49QaRM-g4xjQ7CAZrG_dGScQ7NA,3911 +torch/include/ATen/ops/quantize_per_tensor_compositeexplicitautograd_dispatch.h,sha256=34H0NkvyD9s6dCwKQq2jiYB0XdkKWm33xFAaVYmiV6s,1677 +torch/include/ATen/ops/quantize_per_tensor_cpu_dispatch.h,sha256=QdydgcSmRkv_QespwhuE_vrXSRS-fnfVeKeIdsbTkuY,1097 +torch/include/ATen/ops/quantize_per_tensor_cuda_dispatch.h,sha256=UsMtktdrp4usMxFc-Ow1sMAmftneuH463NbGL6-8C2k,937 +torch/include/ATen/ops/quantize_per_tensor_dynamic.h,sha256=MC30IbpaDP19Hd_Bf756QmVgTPPQ8qs_PITHTkRd6XY,1511 +torch/include/ATen/ops/quantize_per_tensor_dynamic_compositeexplicitautograd_dispatch.h,sha256=ZYH0mlVOHbiGw-yaWCD6t1m3-4VAjXTN2ruaMlMEz5I,991 +torch/include/ATen/ops/quantize_per_tensor_dynamic_cpu_dispatch.h,sha256=1eJnnQqid2OO88F2-eDS4MYR-WkvOnHRAFp7hAiWk1I,782 +torch/include/ATen/ops/quantize_per_tensor_dynamic_cuda_dispatch.h,sha256=-7hnghli2hQCNRS_ZQ85qAkgLBNFFb3bMXVKwfmeiMI,784 +torch/include/ATen/ops/quantize_per_tensor_dynamic_native.h,sha256=UA8OiAU2aQ2qBN2J3oGZkgVupyHN6gdBq5IKV06t33Y,678 +torch/include/ATen/ops/quantize_per_tensor_dynamic_ops.h,sha256=M7rDTT19KxpuT91-xBQrYKX9n3Xt7-PlWxgxHexb0yQ,1998 +torch/include/ATen/ops/quantize_per_tensor_native.h,sha256=0Lk3aKPqjT8gHTLG1hSsictMimIxVzPax7hY_ROabXg,1382 +torch/include/ATen/ops/quantize_per_tensor_ops.h,sha256=AZPzabwZFGwgQpf2-qLNi9GAnxq1nG5suzNtwvmVndI,5749 +torch/include/ATen/ops/quantized_batch_norm.h,sha256=_5Wwx-1jsaTVJ9LDqekXdED4jawj-TWA6Uek6QIYcSw,2254 +torch/include/ATen/ops/quantized_batch_norm_compositeexplicitautograd_dispatch.h,sha256=e6qFMuEYM5bISuNAhUcL10tnrQ_DK9QfnXKgzbNATIs,1287 +torch/include/ATen/ops/quantized_batch_norm_native.h,sha256=QjlQyeoxml6khgG3VwZrDJks578O2Za0Pu7B3nPssZY,974 +torch/include/ATen/ops/quantized_batch_norm_ops.h,sha256=SzmjtVyBdoNvj8705RxVFtFkZ7qkkM5x0psl8vLMtxs,2956 +torch/include/ATen/ops/quantized_gru_cell.h,sha256=LnO0ayRHWkCP57_ow0J7_7X4xFd-tTACgyOvjnWIKck,1399 +torch/include/ATen/ops/quantized_gru_cell_compositeimplicitautograd_dispatch.h,sha256=mye9ElBjpdgd6Utk2WKGA8JdetNzbrgjRAGA12Jxv4M,1156 +torch/include/ATen/ops/quantized_gru_cell_native.h,sha256=AgxqqgyFbKRjO2Qz0SHfyzY0tvr-hod4pZcq5GaGRrc,868 +torch/include/ATen/ops/quantized_gru_cell_ops.h,sha256=4eduLwKeUYz_482sFPbmKQcHdEkA3P_FaMfIyRCmhns,2247 +torch/include/ATen/ops/quantized_lstm_cell.h,sha256=U1bsfEcJyACPV5NtxgGG1arbg3MVKrlPQMe3VCW8UR0,1436 +torch/include/ATen/ops/quantized_lstm_cell_compositeimplicitautograd_dispatch.h,sha256=CHd94WfD-LLTc15yqjg8dg90n2DTA2ah_Sk-jCMj4Lw,1178 +torch/include/ATen/ops/quantized_lstm_cell_native.h,sha256=bfXsk_9rlzlmzhr6LPcbqU8QdFqH9CelaUdUb6kvov8,890 +torch/include/ATen/ops/quantized_lstm_cell_ops.h,sha256=VaDEw0u5PyqJUKf-5UnW0sl94AWrRrY9ASSBQXNahiA,2325 +torch/include/ATen/ops/quantized_max_pool1d.h,sha256=uWob9fTDvUdv7ddcw4_cpPXiaPxf7oldvL3DCBX4-8A,1974 +torch/include/ATen/ops/quantized_max_pool1d_compositeexplicitautograd_dispatch.h,sha256=nTpmlgbJPKzjSKB7TzdnAbX8fP4ozzrZfxz2tQ5GNLs,1148 +torch/include/ATen/ops/quantized_max_pool1d_native.h,sha256=f_SIDSuZTZFdDkCvwVHP42UT9tuxUNaPGbEGPA-R0Zk,835 +torch/include/ATen/ops/quantized_max_pool1d_ops.h,sha256=PgdH6ZrXeCIHEBTICy8Fa6Phms58O3nPdY4m9jvf66g,2496 +torch/include/ATen/ops/quantized_max_pool2d.h,sha256=e42a06YsRLbmLyuVn865LaYUrOyWZHc2Jm-pUB6glcw,1974 +torch/include/ATen/ops/quantized_max_pool2d_compositeexplicitautograd_dispatch.h,sha256=_mAEwP7G_qANjqlEsnm_cHcW9OrEv9tKx4_Pr3OGjZA,1148 +torch/include/ATen/ops/quantized_max_pool2d_native.h,sha256=znLsYn6Ze2QfpR9NQFvyT4AVm3IJqCIMLzNgI-FRz_M,1042 +torch/include/ATen/ops/quantized_max_pool2d_ops.h,sha256=P91dAE-kITQy9TDQqf0SMIc2Gnj58Hym-zrLZ6Xw9L4,2496 +torch/include/ATen/ops/quantized_max_pool3d.h,sha256=mN84F6l-oez36Qi6R_NHvoAcZQbj31goyCpwlYGKv84,1974 +torch/include/ATen/ops/quantized_max_pool3d_compositeexplicitautograd_dispatch.h,sha256=Ah4TkNI_CfYqSXg7NFT_JOgS5mW3XQxy_CwmZ4HBXEE,1148 +torch/include/ATen/ops/quantized_max_pool3d_native.h,sha256=jwxntBHnnUF09A9Lz_7ai3ykFkvXcz_7mzRCbSL6UN0,835 +torch/include/ATen/ops/quantized_max_pool3d_ops.h,sha256=TUjS0fibXUHRk5ViSXIodazh7n8sMxzF0_TvssspN_w,2496 +torch/include/ATen/ops/quantized_rnn_relu_cell.h,sha256=189eARwnewfk9fXmhENEHBdxjjzJcmdkuvBzfPuAJfs,1419 +torch/include/ATen/ops/quantized_rnn_relu_cell_compositeimplicitautograd_dispatch.h,sha256=_NTM5Jv2skraZVsgLwRmYOXHo0HdGdQLdSo5fmFHr2Y,1161 +torch/include/ATen/ops/quantized_rnn_relu_cell_native.h,sha256=fB7IDGSpvTL-ef_6UtNBOnVKxLdKCLqVwGJEuT5a4GI,873 +torch/include/ATen/ops/quantized_rnn_relu_cell_ops.h,sha256=Fvik37AemYR03T5F7Wvw0r6yPsV8u7HkBY5nTQFVtV8,2262 +torch/include/ATen/ops/quantized_rnn_tanh_cell.h,sha256=OmmEoywVe-_xi74cqEfGgBa_rxOQAYdNQC6ofMASJ4o,1419 +torch/include/ATen/ops/quantized_rnn_tanh_cell_compositeimplicitautograd_dispatch.h,sha256=hdXVManL2nYQGh-VksacdcyqKPGjaFuUZOYPX2vbkN4,1161 +torch/include/ATen/ops/quantized_rnn_tanh_cell_native.h,sha256=9E-jPiTGhCGcCoc4vcB1L7g06anoYS5XlYD78oi5nz8,873 +torch/include/ATen/ops/quantized_rnn_tanh_cell_ops.h,sha256=FStdBWGSBfrZvBgIHIFPLW7Cvfa_0HuZnRKKunQ94K8,2262 +torch/include/ATen/ops/rad2deg.h,sha256=tNgchAtHF7JIDjfwGrwdNaBFGTrBvVhQ_abSBNdTc_4,1159 +torch/include/ATen/ops/rad2deg_compositeexplicitautograd_dispatch.h,sha256=rP60qflK5pdHdkhb17SLZSnBZb9bEXGr5_XPgc2Ix-g,976 +torch/include/ATen/ops/rad2deg_native.h,sha256=9LfVRZ6-WrtAnpT3CQXARtTNuYIXZZZWyBgQ9WY_NI8,1034 +torch/include/ATen/ops/rad2deg_ops.h,sha256=AKth119jXGDhdmxNEQAPGd-Jsn85zK_Pm6nN4RkELH4,2131 +torch/include/ATen/ops/rand.h,sha256=MD2znsf_D1nXBKFX-pKcGSjbjm_qPzGWcyWw0rfPao8,24901 +torch/include/ATen/ops/rand_compositeexplicitautograd_dispatch.h,sha256=2I7MyOoqZgqVneuLSsLtBakGygng1mkRQ1BRmHxG7SE,5074 +torch/include/ATen/ops/rand_compositeimplicitautograd_dispatch.h,sha256=wNRe6KPbNZXatGewiYryAMGq3-KsceP0nf2sO7ZKhl0,1194 +torch/include/ATen/ops/rand_like.h,sha256=L7-skByU0KdMXCbyJNJNC9Oe5PlZHQPr8tflzG2Js04,2185 +torch/include/ATen/ops/rand_like_compositeexplicitautograd_dispatch.h,sha256=GKvFilfx9HqPr3_Jki_qfXelQQIHQtupE7h58FFpcmQ,1388 +torch/include/ATen/ops/rand_like_native.h,sha256=lJ908YWiNX_pOK-xfkV_VO0xS4Q6nhRALuL0wMDLI-Y,830 +torch/include/ATen/ops/rand_like_ops.h,sha256=kIX2VnOM8aw1zeITP2f2U_unsohEZLDFMVUG42FGJBs,2438 +torch/include/ATen/ops/rand_native.h,sha256=KVPipma4JoLlgD6RccfhjysbSNiR40HBocIVhUtGUVM,1915 +torch/include/ATen/ops/rand_ops.h,sha256=PZhETHGuIBiT3ifPhChAu3DbSYy8-l4j60IAS1ElD1k,8333 +torch/include/ATen/ops/randint.h,sha256=4V81Y655E4Oy6uxg03fySEppAoG3MxKrVfhM5U7JcQM,26144 +torch/include/ATen/ops/randint_compositeexplicitautograd_dispatch.h,sha256=kHll7vC6OAgUs4IESjCnO-IvQOUIiOR7_HMe9IPtucM,5822 +torch/include/ATen/ops/randint_like.h,sha256=j9FbPy6zMrQcZHmM6TvRnZqLx6n7KJxk7fuXBbj6GFA,15501 +torch/include/ATen/ops/randint_like_compositeexplicitautograd_dispatch.h,sha256=GSJ158rjdA8G5OwQgkgG7-ChXyr1xn346fjDwufRt0I,3902 +torch/include/ATen/ops/randint_like_native.h,sha256=2vxPmGNJ-sviCYAtF5fPwwUaPp3M7ndmrtZII5lD2qk,1368 +torch/include/ATen/ops/randint_like_ops.h,sha256=5rqXSWeBOKbQWcZ2NubLBfOjEGaHKEuni68n0dy94rE,4903 +torch/include/ATen/ops/randint_native.h,sha256=E9WhIF_ShwiObU12pAZFRn8KiP_e_P_NiihLB76CQ3M,1894 +torch/include/ATen/ops/randint_ops.h,sha256=4gbr28SvBG_XxDxZW4wSUoVtEQYrYTJ7PNvLEu7VIQA,8555 +torch/include/ATen/ops/randn.h,sha256=pCNXibvV08JSRmg66BoE_5RSu6lT2terqt4lZSiBNFA,25062 +torch/include/ATen/ops/randn_compositeexplicitautograd_dispatch.h,sha256=J6flTzRpf9yYKtM8rixKzxv_50iJFogLcvOE5SnqkWc,4782 +torch/include/ATen/ops/randn_compositeimplicitautograd_dispatch.h,sha256=X7CDngvZE9bRvykiy18ecTwuSTePIBdMTx0jqjc3TFg,1518 +torch/include/ATen/ops/randn_like.h,sha256=TbNgAN3zf3xbv7veR4UAcf_Y-ZP5j0Lmqs-MHQDl9So,2198 +torch/include/ATen/ops/randn_like_compositeexplicitautograd_dispatch.h,sha256=_c5dtJZhGofSjz57wtvZkCHeLyucTvwg2WUsifaCmFg,1392 +torch/include/ATen/ops/randn_like_compositeimplicitautogradnestedtensor_dispatch.h,sha256=21b6Ptlw-c92kSHNoMfDxqsLCg_CohEkssr-kic8BM8,1138 +torch/include/ATen/ops/randn_like_native.h,sha256=7MOSTbWXLnVpON2Bjf3rBRoFqY80aCJ9b-OksCvs42o,832 +torch/include/ATen/ops/randn_like_ops.h,sha256=U43VapBu60f7OQZ5WzvStNG7FtkQoU-Xb06FA2TotxI,2444 +torch/include/ATen/ops/randn_native.h,sha256=2JQ0Jk-SSZResonE85kcV_7CZG1-IaJJQGFUb3PdGeA,1923 +torch/include/ATen/ops/randn_ops.h,sha256=n7t8nraLEMQ_CYPYwUZATiag26ry2LvhFRC5vY4bnqU,8357 +torch/include/ATen/ops/random.h,sha256=qwXt5B4wc-1Qn1JNoUV8yPBTH8mBmH1b0P6SdHrlQzY,3209 +torch/include/ATen/ops/random_compositeexplicitautograd_dispatch.h,sha256=hideafbsoEfTN2z6DTI4DwGQ0wp6bECtV08Bbf95Tgc,1976 +torch/include/ATen/ops/random_cpu_dispatch.h,sha256=AX_KDBI3UWfuiyyoVDhWFK_yfV-EN3YWFhTjrbpmhv0,1045 +torch/include/ATen/ops/random_cuda_dispatch.h,sha256=okfAsOHoTYkczs9U17QqSHODz331J_BS0FfMSu68R3o,1047 +torch/include/ATen/ops/random_meta_dispatch.h,sha256=f0589RsXv5uLYTf-H-al-H2ErnpXKDejKJtyWhjqAEo,1047 +torch/include/ATen/ops/random_native.h,sha256=3rpPY5SB6nzCJrFIv3bbKD6yTNdP6ZefAQXTGQ8FDF0,2006 +torch/include/ATen/ops/random_ops.h,sha256=X_BZrHPcooAhg01UAKt3iyKtu18sBzGjs1FIHpbN-P8,7414 +torch/include/ATen/ops/randperm.h,sha256=Z_JoTk-CFE5LdMUjLHPAMEeYuEpeOiYl0cQBS1kA5Yw,11025 +torch/include/ATen/ops/randperm_compositeexplicitautograd_dispatch.h,sha256=wrnWce2oyNo_kce7bkS5iEEeI39mZsm081e-zhm-4qM,2274 +torch/include/ATen/ops/randperm_cpu_dispatch.h,sha256=8l-gxOBKzpVl8q6TQxYieexGLrpp-DI-3w7YXYtlEqM,1122 +torch/include/ATen/ops/randperm_cuda_dispatch.h,sha256=53wTn-HUiD3SktVTyvC60TM7QSG89CMRd7If-2QkOxQ,1124 +torch/include/ATen/ops/randperm_native.h,sha256=3rPQxVemUkm4QPL5oq5614LGJ6rkwiiD5KgW4lLCFi8,1153 +torch/include/ATen/ops/randperm_ops.h,sha256=SCuW40LkrQI-UkWHBcGVnCHTPfgaILSlM8ZPjyl3VMU,3977 +torch/include/ATen/ops/range.h,sha256=nfQPIf1gcQiKUiBb8WlNrzqLTTzTVEym0sbwsQJjQrI,3370 +torch/include/ATen/ops/range_compositeexplicitautograd_dispatch.h,sha256=EbhcEAP-37htLpxuxMu-GHvjIuj9uX3L-VTe-zw2Lkc,1629 +torch/include/ATen/ops/range_cpu_dispatch.h,sha256=-1-AMk7WfA1bS1JkZQir3cRFV9O-KyvXZ7CZqNyV-_E,921 +torch/include/ATen/ops/range_cuda_dispatch.h,sha256=gd8fhCREoZUKnkfzzy9bhrkAsw9FGBfv3jpaY2AFcU8,923 +torch/include/ATen/ops/range_meta_dispatch.h,sha256=ZfGyV-T6liGMdi1Z1IBqmzbUS_EKLhIjGWl03akaXO8,923 +torch/include/ATen/ops/range_native.h,sha256=1ApLv7l7bxzMwaOJY-3gCVzuSobomWnoHbiFEJG0FjQ,1288 +torch/include/ATen/ops/range_ops.h,sha256=P4aHO3dZ7gZTZX38_oGc11fWRTf3VhVox-QGCP6W6Bg,4245 +torch/include/ATen/ops/ravel.h,sha256=8Ku8SaOnywyt1SAi8YIIEoiROPDlu947WdYpvPeSAdc,616 +torch/include/ATen/ops/ravel_compositeimplicitautograd_dispatch.h,sha256=y34AGJ-fC0ir8cxN9PK5iRh3NEVBVyFQpfdj3W1KmCY,763 +torch/include/ATen/ops/ravel_native.h,sha256=JMpvLmwhdYUbfmYNVFU-e4HZuQppQh_ujI8pb3CJg-k,475 +torch/include/ATen/ops/ravel_ops.h,sha256=GEJQStdOtidOMcd0RFKTpQN0lAW0q0X_o_V8_PhLbHs,970 +torch/include/ATen/ops/real.h,sha256=y3-7rP3pwLM2iWKJMrbdvnTe8M822e3gXFwdjikaF_Q,612 +torch/include/ATen/ops/real_compositeimplicitautograd_dispatch.h,sha256=6YNWDH2lAMTG3q3-6sMk-oGOjw_JJKHVoJB2_3UYkfc,762 +torch/include/ATen/ops/real_native.h,sha256=TyETnCQFF1x1BPiKsqR9ArZa7FvExDTmPDl8RNxlGAg,474 +torch/include/ATen/ops/real_ops.h,sha256=7TNm3m7hKJdFCaTyg5zyUKrOlArw9ktTyFZy1_ZJxuY,967 +torch/include/ATen/ops/reciprocal.h,sha256=3D7v7-Fn_JILIwBe0JgPpMWTZSUfTf3RCwuu0xhxiFE,1198 +torch/include/ATen/ops/reciprocal_compositeexplicitautogradnonfunctional_dispatch.h,sha256=J_4RQFUT9KSGXNEDQkJiwN0dTGpeA0Zk6MignB34bTM,849 +torch/include/ATen/ops/reciprocal_cpu_dispatch.h,sha256=dDhXsZ1vOEoOMKD2kbBrBP6OM6QZxFlq5Xxt26h0Dgg,944 +torch/include/ATen/ops/reciprocal_cuda_dispatch.h,sha256=dYwm58iVZsm0GZvrj7v1O7wd9fo7sQb7wg4zuw7BdI8,946 +torch/include/ATen/ops/reciprocal_meta.h,sha256=WZsPAfVBG3kJqP8pSrsNSCbLHPC7EGfVYWpnfOHd0Xc,579 +torch/include/ATen/ops/reciprocal_meta_dispatch.h,sha256=tPwqi_RTl-9qeA9HsJoMQp87fRPKA_tpD2AhbCLAZlU,946 +torch/include/ATen/ops/reciprocal_native.h,sha256=VJ_9PcoCTy1NSt_hrpTL9e0ESeaBwlHiM_YI8IgE5uo,608 +torch/include/ATen/ops/reciprocal_ops.h,sha256=u73LNWR5VoiOOR7gjlKJnGYr_kSg5sfvFkPn5vbBFcQ,2158 +torch/include/ATen/ops/record_stream.h,sha256=uS1ndp9u2FnTL6K9EqzgJoqDdYz_hNriZAFyQjLWPWg,487 +torch/include/ATen/ops/record_stream_cuda_dispatch.h,sha256=GPazjuR-Wmnn4PUBOcIDw-vdf5P7Eq9eqL5vCR1K7Rk,731 +torch/include/ATen/ops/record_stream_native.h,sha256=3v6g40OyuWsGJulrkOs3ywYOUED55qbDqP9ktz-2FBE,490 +torch/include/ATen/ops/record_stream_ops.h,sha256=Gh27W0SQZd5rLdx2D5oVsUlXXzIt5grKS_7Gjv_8hok,1002 +torch/include/ATen/ops/refine_names.h,sha256=wdBrUpEc4Vs9_HKTDMnce2fCxvT5obND7RRv0pidzOA,486 +torch/include/ATen/ops/refine_names_compositeimplicitautograd_dispatch.h,sha256=8H9ZnsGXJhB9doMZPEOxAMQ8SvcKB-Ka58LeGL-S63U,793 +torch/include/ATen/ops/refine_names_native.h,sha256=9XKNDuYWfEzmZJ6YLcgK5PbDPW4se6vfEnKuN-A9CKU,505 +torch/include/ATen/ops/refine_names_ops.h,sha256=O2Q2ik7Bm8LOH3--P8gRk6KefNveJWxdoW_CGk8_Ks4,1071 +torch/include/ATen/ops/reflection_pad1d.h,sha256=0mnUhzPrVMuzH6CdVyOpzC2If1N7gN4rxTXBltEoy7M,3904 +torch/include/ATen/ops/reflection_pad1d_backward.h,sha256=vK4YF7tiTod-nBZI_q10tCGphJCDR1N-pYFzI2SbD_k,5067 +torch/include/ATen/ops/reflection_pad1d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=95aWO_HH8E3qaTZ7MJDPkeYHINT4hQw8M1A3o5J4Pqw,1007 +torch/include/ATen/ops/reflection_pad1d_backward_cpu_dispatch.h,sha256=AEFqDUiMaM2qny8f9ZDyJ4B92irNj7oSVLjQdswsb90,1605 +torch/include/ATen/ops/reflection_pad1d_backward_cuda_dispatch.h,sha256=XJxhGgNrK7Ea3sjjUbObaOE0JfGBavX2-YIdWmky778,1607 +torch/include/ATen/ops/reflection_pad1d_backward_meta.h,sha256=ayVug9J4niZcfwBmPnOl4h3ubiewjcDGJJgBWPdaA2U,657 +torch/include/ATen/ops/reflection_pad1d_backward_meta_dispatch.h,sha256=Sn2aR1y1iboVxz0C1yEMw_MEbBHgxbSmLpbH2nb2KU0,1607 +torch/include/ATen/ops/reflection_pad1d_backward_native.h,sha256=6myDrCT9tIS5x6LoAwoyxHq8B7SIdgEJk3x3RWB3GMo,981 +torch/include/ATen/ops/reflection_pad1d_backward_ops.h,sha256=KSxmevPRjZ7Mf-USkgIODc3x0E7jA_FhMN8NVmwTieA,2150 +torch/include/ATen/ops/reflection_pad1d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=I-LHGrJ1CA0q5q1062boCYDLfbpXVJZI8UOaU6uTUiE,925 +torch/include/ATen/ops/reflection_pad1d_cpu_dispatch.h,sha256=somMZblKAyhJAOaq6vDkT4EXtAlkptnsVJTCGty63sU,1331 +torch/include/ATen/ops/reflection_pad1d_cuda_dispatch.h,sha256=qN_8bcvpwSnl5FHYHY6rvhYWH6jFwCdgE9qUZ6wouX8,1333 +torch/include/ATen/ops/reflection_pad1d_meta.h,sha256=wD4kzix7PLFiGPamzbzI7jI01yhH_tjd8UURMSb7Z0U,616 +torch/include/ATen/ops/reflection_pad1d_meta_dispatch.h,sha256=gDk3u5d3Vpmk6aQe6RYRor9MpXf7pdOk16znJlJ_1Vs,1333 +torch/include/ATen/ops/reflection_pad1d_native.h,sha256=4w2GbYwY17H6XZziBJy-th8cHP1U5KNey5iNVDD1KsI,985 +torch/include/ATen/ops/reflection_pad1d_ops.h,sha256=36Wtp5jW9BA_j0AkbCRHj8qUTPaKdgUnHQ8_ZE0Jdog,1846 +torch/include/ATen/ops/reflection_pad2d.h,sha256=a1dJO_id_h6AWVMWqveY1izBxW5qkRVm_1tPT-pxsKI,3904 +torch/include/ATen/ops/reflection_pad2d_backward.h,sha256=02H1VBU3OQ_oB_9vGN6yIbWM3hRzqpPGC1WiO9sWH60,5067 +torch/include/ATen/ops/reflection_pad2d_backward_cpu_dispatch.h,sha256=lQOD4hAqcxjNFI8cd7OiPUPiUdkVbMqxXS2uK87-zsY,1605 +torch/include/ATen/ops/reflection_pad2d_backward_cuda_dispatch.h,sha256=T0fmRINLtZQdJLuUq1d7gxcDCZLBnlMqFgI-N-9GthQ,1607 +torch/include/ATen/ops/reflection_pad2d_backward_native.h,sha256=1Gi4-cazA3QL1yUb4_lnVZ8TUoWqhNtLA8v7ISCjMEo,1022 +torch/include/ATen/ops/reflection_pad2d_backward_ops.h,sha256=eOLZvPMrO6LMXUtIn23e5DaAp4C7j47qI82WJxsmoAM,2150 +torch/include/ATen/ops/reflection_pad2d_cpu_dispatch.h,sha256=NP32RgPsJ-Bglx5Mttvg0DtuhJ4qJObeIkGnpj6gq3g,1331 +torch/include/ATen/ops/reflection_pad2d_cuda_dispatch.h,sha256=OwmJRckbc_C6BGuQlFzanUgBjT4Fs_ennbuQbyFAI6U,1333 +torch/include/ATen/ops/reflection_pad2d_native.h,sha256=-cd3Dc_l_JbsZJurspJF77Zl5ZJ8xS1gegz1h5AEY0w,947 +torch/include/ATen/ops/reflection_pad2d_ops.h,sha256=XeTv5_cln_R_jYoj-9hYsR5QnxJyrST9SIYwlePuQxk,1846 +torch/include/ATen/ops/reflection_pad3d.h,sha256=ucoQqgQKimedpdcoJMTotbAeejnLdU-EIfFX7juMcRI,3904 +torch/include/ATen/ops/reflection_pad3d_backward.h,sha256=UX8un91XckDHOe_Ttjk9NO_US_RBdpr65bzgbcAx3u0,5067 +torch/include/ATen/ops/reflection_pad3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=4wjSityiDuFqEswI5ejk4TVRUS9RTUZbI83xfeFBz2c,1007 +torch/include/ATen/ops/reflection_pad3d_backward_cpu_dispatch.h,sha256=WmwR_afgzOhcqnw07Xtu9ytsb08mhBSltOFRWRCes8E,1605 +torch/include/ATen/ops/reflection_pad3d_backward_cuda_dispatch.h,sha256=5VlhwkxKeUXDvUvHodhC7C7xcKMq12I0Ip2-2RnqPpc,1607 +torch/include/ATen/ops/reflection_pad3d_backward_meta.h,sha256=hPRYsE5sCZP9WHYzojSGxLlGgJFqTSWJ0iPFysu722w,657 +torch/include/ATen/ops/reflection_pad3d_backward_meta_dispatch.h,sha256=mcyvHJqET-Iv8htPZmEkFXnOwsk0kv_OixcfQnNJmCQ,1607 +torch/include/ATen/ops/reflection_pad3d_backward_native.h,sha256=OPr0yZIzSp7wXOVGIIspG91LjRpWxTZc5YVxB1o01Xw,981 +torch/include/ATen/ops/reflection_pad3d_backward_ops.h,sha256=J1YTb5byFpKN-GVlBBAFgtKrwjKHfnDnoG2Z4mp2_Yk,2150 +torch/include/ATen/ops/reflection_pad3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Gnd4ekIbrfW0akHtc0zCa7BqxprbYKmxlG8zLxyEvrU,925 +torch/include/ATen/ops/reflection_pad3d_cpu_dispatch.h,sha256=Mo-T70F9YhfSvxKOJBTLq9UbEqENR4LWREQy1t5BTX0,1331 +torch/include/ATen/ops/reflection_pad3d_cuda_dispatch.h,sha256=_UMYGJoet3_HdpKw8PE-zJUYOUXjFNVuT31B3wdS7Zw,1333 +torch/include/ATen/ops/reflection_pad3d_meta.h,sha256=VphI-osvufiH4zFZ5hiK45alyYRygg-OHUbkuVkWL44,616 +torch/include/ATen/ops/reflection_pad3d_meta_dispatch.h,sha256=hVEo5ZqxmmKw-r1suerYQ4HmZ-BkcO5xxTJLE_W_Xvo,1333 +torch/include/ATen/ops/reflection_pad3d_native.h,sha256=jC1Gld_fmVm9oiX7YWF8EnsDvvts14MssCbhvh6AIgA,858 +torch/include/ATen/ops/reflection_pad3d_ops.h,sha256=APQ0BhY7f5cQmYfdIcssu8RYGQ0k1UAKtXcdvxILwug,1846 +torch/include/ATen/ops/relu.h,sha256=fjAA1pmSKiWgptPhsNRN1R4am81MHmIGi0lheT2Ly00,1120 +torch/include/ATen/ops/relu6.h,sha256=vxM_MsqF6UBRr9E1aj-gyDgnioh3sJwHqshiYEPIP7c,749 +torch/include/ATen/ops/relu6_compositeimplicitautograd_dispatch.h,sha256=mBhJc4t_xtPNb7dz4fXdXV_hppGR07QdI9CPd_l9uPU,813 +torch/include/ATen/ops/relu6_native.h,sha256=UrN9u7jaoHu4uqaesk5xNq75lbh5XiVeDKUxQS-Pmos,525 +torch/include/ATen/ops/relu6_ops.h,sha256=3lCkm_E-XlaKOj3UV2u4fhbowhRGGgat9lY6_wU9xi8,1493 +torch/include/ATen/ops/relu_compositeexplicitautograd_dispatch.h,sha256=HH80q0NpxEA3ZuNRMMYaMfXVV_aDQbSh_W9lhx7LKaI,863 +torch/include/ATen/ops/relu_cpu_dispatch.h,sha256=lVcL-OUvag-ZLkRP-kSHn9nf-vKxfJUAG3281XbBERg,767 +torch/include/ATen/ops/relu_cuda_dispatch.h,sha256=RUuu-YvFcSRRfugnoQS8ITtE3QMyk1TqT_7w35POrRA,769 +torch/include/ATen/ops/relu_meta_dispatch.h,sha256=bpW3LZOp4PQbvaFxEVHfg9Atzy7-bmPd0KM1Kipg1U8,717 +torch/include/ATen/ops/relu_native.h,sha256=w6D78hzY1IjuaqyhflCMhtsmP8wMdqQViAntizPaQW0,1339 +torch/include/ATen/ops/relu_ops.h,sha256=dydIgLCn2SnQ6HaQV99rSBMWXnyH03nhUxmKVbse09w,2104 +torch/include/ATen/ops/remainder.h,sha256=FUveiHhdb_NJ_gvgLJI77LeIPiG3zqqmA3If5lolVwk,2729 +torch/include/ATen/ops/remainder_compositeexplicitautograd_dispatch.h,sha256=2uzaypqoap8QoYtG7ImwiStZJQu1PdYCzDctZ_xZNps,1303 +torch/include/ATen/ops/remainder_compositeexplicitautogradnonfunctional_dispatch.h,sha256=zV24m3xE6UJDCRv3eSF3hojV_9JuNV-iHrY1rsxYOpk,899 +torch/include/ATen/ops/remainder_cpu_dispatch.h,sha256=8ySH5a0NLQt6y3Qmw5ZyMVzxjOgWv1rh3IfE-d-Y73s,1127 +torch/include/ATen/ops/remainder_cuda_dispatch.h,sha256=O9fSel-afEwEVp2jdg3MFkEwr1y9uXQJQ0_lOIna23A,1129 +torch/include/ATen/ops/remainder_meta.h,sha256=gBuPfdMICupXdH7jmYcmUNa5sPUv656SKolVY_Bsmyk,611 +torch/include/ATen/ops/remainder_meta_dispatch.h,sha256=G8etek_LNqOEEGMnKEwaKKHWg5VEHaIdgU5EU1sW4Do,1046 +torch/include/ATen/ops/remainder_native.h,sha256=g6PHkrzN8jYif0FlaPXihrq_eECQLr4zafj3XS0Vd3Q,1112 +torch/include/ATen/ops/remainder_ops.h,sha256=g_1I-FI5ZtZvW0iYBW-dBmNK7uPt95_YB_I702zRprs,5931 +torch/include/ATen/ops/rename.h,sha256=_d40fEOVRTMyxaU7Iule0fMFIZag2yO82PNsr1uFuKY,480 +torch/include/ATen/ops/rename_compositeimplicitautograd_dispatch.h,sha256=Dr-R55heYaJOtB4_-e0I8g3GCspIQUNiuBaCjoSdEA0,895 +torch/include/ATen/ops/rename_native.h,sha256=61RLZ-3w43NxJtDMdlRE8jfn3AomPs69KyykcY9xjMU,607 +torch/include/ATen/ops/rename_ops.h,sha256=LFL0ZxuDfQCxe1qgXgrXfchaGwCMY1XUQeEXMUbkX7U,1769 +torch/include/ATen/ops/renorm.h,sha256=t6TQdRgFAyQHNt15BSHmSvuUCoYEYuMtlo5JVhyJ5no,1349 +torch/include/ATen/ops/renorm_compositeexplicitautogradnonfunctional_dispatch.h,sha256=QgUBZ9SHYogr9rAbqVqalcWvcVUrSiiB-0hxxBhwnK4,967 +torch/include/ATen/ops/renorm_cpu_dispatch.h,sha256=se6xcBN5LSr4CTKp3cxDGSn0nsvWUqZwKT_TCEtbK4M,1180 +torch/include/ATen/ops/renorm_cuda_dispatch.h,sha256=Y1ZhbG5I9ia_oEqRUKlG0MdLeXIqh7ZCTTZ6virg-es,1182 +torch/include/ATen/ops/renorm_meta.h,sha256=YNft1tmaQM6bmmhbT9jpjsgrlI7SKlt85hmj09TycGk,638 +torch/include/ATen/ops/renorm_meta_dispatch.h,sha256=qIKDnf2I6fDc2c1bIfp5pCIEVnct3M1qw4-Om7rYQeE,1182 +torch/include/ATen/ops/renorm_native.h,sha256=tIY7NKYcnaMkTTwU4Popih4dbbkSWPpKA8U15Mj_6jA,659 +torch/include/ATen/ops/renorm_ops.h,sha256=fO_exIg7vWJFgMWtXHMr_zzOvdvt9bHM6sKNt7qgbRQ,2752 +torch/include/ATen/ops/repeat.h,sha256=JGSyYChH0kzVUAtwOQZtHmS8ddkaRmqquZfScqoSsWA,3179 +torch/include/ATen/ops/repeat_compositeexplicitautograd_dispatch.h,sha256=oZMOkEWBjGt9qD_Qx4clQL9NOBrebKV_q9hIrsw6qXk,1315 +torch/include/ATen/ops/repeat_interleave.h,sha256=FNhHSWVygT2xTXfa9H1gv6MQ75Z-TwiJvcU2PJ5f3es,8305 +torch/include/ATen/ops/repeat_interleave_compositeexplicitautograd_dispatch.h,sha256=-6bvFUuvUOvD2LOaJL7sLSbNH9NbIJ_1hw5cOKecLlQ,1284 +torch/include/ATen/ops/repeat_interleave_compositeimplicitautograd_dispatch.h,sha256=qlhtCIRjZMS5JiNmdEIK1PbzTGEU5qt2lKoUbg_9DgE,1478 +torch/include/ATen/ops/repeat_interleave_cpu_dispatch.h,sha256=7zw_EBfyrrkSMOyPIkqYuh1NxDoozsqB3u8mqPZoolk,919 +torch/include/ATen/ops/repeat_interleave_cuda_dispatch.h,sha256=sfaCKLwRsL9k56-4-yfQ7VXqA0xRvil5keAiuC46fRI,921 +torch/include/ATen/ops/repeat_interleave_native.h,sha256=TfHuEgQYDNNB7nxIVk8V333qz4fKXAXs719_KblCYy4,1218 +torch/include/ATen/ops/repeat_interleave_ops.h,sha256=AtlSFn-_EyewZBqni23BUJX9dVpFfQRShHevVRONNYM,3842 +torch/include/ATen/ops/repeat_native.h,sha256=Y-hE4u19QJKJ9Zsh9xiaWhsj19E_wkJQu94Gc8koJtg,615 +torch/include/ATen/ops/repeat_ops.h,sha256=_XVP-5j8S46-LAB_zi0IQ_zLKkJv_2Z0-d2nIxNmU_M,1784 +torch/include/ATen/ops/replication_pad1d.h,sha256=bDrDkbSwN28vjFnApq32lLMROuFNPDYqZoEeY5JiMGU,3935 +torch/include/ATen/ops/replication_pad1d_backward.h,sha256=jKV9vP4KtQc9TPmDhHjXniCQZuloDq_-r455CIp6UqY,5098 +torch/include/ATen/ops/replication_pad1d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=L6biPZEg7OKEe3480KDefz2QY3l9WcUe1SIXuwG0en4,1009 +torch/include/ATen/ops/replication_pad1d_backward_cpu_dispatch.h,sha256=A6bDpsVajw6tiaetLap2fzPNbeUVvtNdfhBvyCAhumc,1611 +torch/include/ATen/ops/replication_pad1d_backward_cuda_dispatch.h,sha256=ahQQ9MuOSif0-tqAdF6C6-Xwu8z6erqEQRqE6pDUxT4,1613 +torch/include/ATen/ops/replication_pad1d_backward_meta.h,sha256=BJqD1ylP_HLj7bkRkrKTIlKbQC4W27OLtgSquvdjWMY,658 +torch/include/ATen/ops/replication_pad1d_backward_meta_dispatch.h,sha256=F_mHlFTrWkGe_deWecmju_tFrQm26x_Q4WuE-RcWMcI,1613 +torch/include/ATen/ops/replication_pad1d_backward_native.h,sha256=B4ooFJNkSZd_Vc4etLR1BFlUQrwkZs8KDS7-Lu0zRxM,986 +torch/include/ATen/ops/replication_pad1d_backward_ops.h,sha256=r59dd4n1UfHuvjawi3XiJPi9JACZlWQUZES-VHrAaBw,2156 +torch/include/ATen/ops/replication_pad1d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=z56ajy0Aga2UO2SPvQlo-jjMX0pt2HW37Par8tp55p0,927 +torch/include/ATen/ops/replication_pad1d_cpu_dispatch.h,sha256=3eaq6-Ei0_egbV6rj6y49GtHamE9cQ0_rZ-15UfwqGs,1337 +torch/include/ATen/ops/replication_pad1d_cuda_dispatch.h,sha256=8dnyHc6crRC3hb5xMtWecXawiQNXvzYYvmoWSwdHQtI,1339 +torch/include/ATen/ops/replication_pad1d_meta.h,sha256=oBssVA2OdTN2md0JwVlrsKjbTd__HPSGMkhMpEahZeo,617 +torch/include/ATen/ops/replication_pad1d_meta_dispatch.h,sha256=6FzzCkI2IXLRKvEHlpJWJ6jiU3JNdQ1kw3nwZuZySL8,1339 +torch/include/ATen/ops/replication_pad1d_native.h,sha256=zLalidv0bKYcObaJEWtob1-jzlNYbA0nYDAqe6upUlQ,863 +torch/include/ATen/ops/replication_pad1d_ops.h,sha256=YLT28TOwKDXsqAWk0t9woPCh4lBLXU1uiVPurSh9pbs,1852 +torch/include/ATen/ops/replication_pad2d.h,sha256=mlTdb06Vf5EYvZC0pmLARs3qIni08rSfLPqtHaBMHgA,3935 +torch/include/ATen/ops/replication_pad2d_backward.h,sha256=wfbGEW75aLVmCwYDx6sDE6_qxP_vbteYLOvdHgcUUro,5098 +torch/include/ATen/ops/replication_pad2d_backward_cpu_dispatch.h,sha256=wzmv9ZtGaU4aaGXboHPSbXSWJ-oDvTXN1o8CIXr7Ps8,1611 +torch/include/ATen/ops/replication_pad2d_backward_cuda_dispatch.h,sha256=oXbBRAtGaUob0EZEhlf0PP-1uaT8Pi6wBOCA9waB7eM,1613 +torch/include/ATen/ops/replication_pad2d_backward_native.h,sha256=sTwYo7ydM2f_71JrskcNTJTlLS0xpIRvRKs84s3w3To,1026 +torch/include/ATen/ops/replication_pad2d_backward_ops.h,sha256=cKzfaTSm-KRPCF4ux3En58dXyTwzjDCJrqrGssP0C6c,2156 +torch/include/ATen/ops/replication_pad2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sSktEXd7PLFY9LUgT2KHGwtDLCMaQAtz-1mjKZHnIBY,927 +torch/include/ATen/ops/replication_pad2d_cpu_dispatch.h,sha256=pQEZB3EbhRcGZYqCajmWBt2YuOLWTpRCThvRiZpc29w,1337 +torch/include/ATen/ops/replication_pad2d_cuda_dispatch.h,sha256=L3xgS_4AWITvrfLUJdcZxWmZioirL7CQibs9tvY4a78,1339 +torch/include/ATen/ops/replication_pad2d_meta.h,sha256=o3TNqjj5NvWpQXLHnKZjPHAO61HCqprhZBzH7huqVPo,617 +torch/include/ATen/ops/replication_pad2d_meta_dispatch.h,sha256=Ie6ig5u5_MoxPeD-NQt8H8v2ut2-YWOx3H7ZsOeEk4c,1339 +torch/include/ATen/ops/replication_pad2d_native.h,sha256=dAovwCukbjMJpHVwQa8aVZfBGYKvi5rj7iw8J4otFjw,863 +torch/include/ATen/ops/replication_pad2d_ops.h,sha256=5bMRqrc1YCSUQmYhu-I2ZEERCSk1xDK_iUPovhShMeE,1852 +torch/include/ATen/ops/replication_pad3d.h,sha256=AcZLWUKHpp_-XB0pbL6npfz57MUk37TZsydQQ5RMrrY,3935 +torch/include/ATen/ops/replication_pad3d_backward.h,sha256=-sTTBXOa9VcveBGslFg1l2vRPBlDnZVu5SRLOZZ3Hlg,5098 +torch/include/ATen/ops/replication_pad3d_backward_cpu_dispatch.h,sha256=bvIfvjnpNuI_TkU23qn-AbZHVZZib23cm2_xt3ApOsc,1611 +torch/include/ATen/ops/replication_pad3d_backward_cuda_dispatch.h,sha256=TzedmNakDu0AkJ7a77-VfPwdcUqo2xEUdGvELmhNplw,1613 +torch/include/ATen/ops/replication_pad3d_backward_native.h,sha256=Ip7Nkx__0Ze_G5Mp2tPYEhspgVgq6kIzYHZYhasWxO0,1026 +torch/include/ATen/ops/replication_pad3d_backward_ops.h,sha256=zw3wioKfamsbEtFHtqK0Dhqf9ezy96MXvPckzcJiEsc,2156 +torch/include/ATen/ops/replication_pad3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Tt8HnZ1OC0jx1MfKp5lUvHk1hWV66M2oX5x7I3ZnLhE,927 +torch/include/ATen/ops/replication_pad3d_cpu_dispatch.h,sha256=HGjUgC8LJ5hKGdIgBS7pQAaOZrzREHXkSpCKpBZJXGw,1337 +torch/include/ATen/ops/replication_pad3d_cuda_dispatch.h,sha256=VXym5ajSji7nIF1jj3H6KV9wf_FD9dPd3pqrZbtCODI,1339 +torch/include/ATen/ops/replication_pad3d_meta.h,sha256=T7dUWAykkpUj9EE_Rie-6jUrAp8E8EXwbw7XDEzNAgM,617 +torch/include/ATen/ops/replication_pad3d_meta_dispatch.h,sha256=WXwFp4CDgoWvUZudaQl9SYrKZGU9kOTaLDTHVnl0RVk,1339 +torch/include/ATen/ops/replication_pad3d_native.h,sha256=11_vJtErSw11PT0_qbtj3_t2Hv_6rKTFkn6fWPU8jo8,863 +torch/include/ATen/ops/replication_pad3d_ops.h,sha256=L58AO4phGQB0GdZIX637xZstI1uyMNNfZphvWhb6m6Y,1852 +torch/include/ATen/ops/requires_grad.h,sha256=BTK-9WS8MZNvmRONIeoYBg17rL33yE2dDNlmkM3TCOA,487 +torch/include/ATen/ops/requires_grad_compositeimplicitautograd_dispatch.h,sha256=JY3EEmTP0X6TiuKaZCpOJysT1agi5ePx5XIq_3MkpHE,793 +torch/include/ATen/ops/requires_grad_native.h,sha256=ilikXuahYtijf-ploNbk5zPYpD9SyG5p1r0-EdCrD7A,505 +torch/include/ATen/ops/requires_grad_ops.h,sha256=0tIFfJ8-zC8aK9ofyi-XhGM624w8X350a-IPyZOZidQ,1058 +torch/include/ATen/ops/reshape.h,sha256=q8UZRhD8xWpkT8RDF1HZubt9mXPbiBfWnlc6Ut9zuEw,1393 +torch/include/ATen/ops/reshape_as.h,sha256=jLyG1_KJy5JjeLA0XdWgN6DqXwBNvMliZr5EEqd5yUU,484 +torch/include/ATen/ops/reshape_as_compositeimplicitautograd_dispatch.h,sha256=tLZKaKQ_EQR_ZMb4h_tmwgqgLDQErSDqS2dEjiqB7Hk,794 +torch/include/ATen/ops/reshape_as_compositeimplicitautogradnestedtensor_dispatch.h,sha256=LZQ44sFn2Y7mmwqFH2GUsHn3oylyRhRT2N7t-7JlUEA,818 +torch/include/ATen/ops/reshape_as_native.h,sha256=PzKEDl8yVxH03sMya3Ib3lNNilf7QquesyLEM0uwye0,597 +torch/include/ATen/ops/reshape_as_ops.h,sha256=52WRH7KB3aiEdtQBqT9Tu_iFvnD04NYv9ANO2Lshf_k,1071 +torch/include/ATen/ops/reshape_compositeimplicitautograd_dispatch.h,sha256=EvhmAAjIA54gEVG3l-J8cdGrfHYsaC1AGh2FfV5AB10,877 +torch/include/ATen/ops/reshape_compositeimplicitautogradnestedtensor_dispatch.h,sha256=HigwPxjHEUwWEkIgGmQAH_xX2iYk_03CITv6UIQGKSI,901 +torch/include/ATen/ops/reshape_native.h,sha256=ZeL3cWlBxJkI0nY-vhdCtUPxI4VbPI82nBwe3JFOy2Q,607 +torch/include/ATen/ops/reshape_ops.h,sha256=ZNDgMIWM1vs2PSw5OpJ6ZIzbm1b0SFD6GBJo-uH8uPo,1067 +torch/include/ATen/ops/resize.h,sha256=O995d3r_Fhj6BqAiNkwNA5DufnOKUuUz6xOlxXbKINo,5360 +torch/include/ATen/ops/resize_as.h,sha256=KAbZWcc4N5N-HV2lnw-16RkMHGyffx_-_vaUtEC6FOA,1936 +torch/include/ATen/ops/resize_as_compositeexplicitautograd_dispatch.h,sha256=IsOtUhQ_aNdtfOzf7EGx6jHhbp_e9Yz-JJQbqaZrXR8,1393 +torch/include/ATen/ops/resize_as_native.h,sha256=2vusdq1AmAqxEJYSeUwptXQcMOzvE3Dq3WmUwEd_MSI,914 +torch/include/ATen/ops/resize_as_ops.h,sha256=2QzD3NIofTl7NOOmK9ryDyJI_9rPeaXBNID9BFerxrc,3049 +torch/include/ATen/ops/resize_as_sparse.h,sha256=PEyYS6NZVsb5NxtHIrRLdvp8FguB7t3PSLksoM5wL2w,1584 +torch/include/ATen/ops/resize_as_sparse_compositeexplicitautograd_dispatch.h,sha256=QwJHx9R70LFmcTuKjLcFref0C94ZIafpD3UitdUo7bs,1074 +torch/include/ATen/ops/resize_as_sparse_meta_dispatch.h,sha256=1zzp20-5piFKDb5bpBXioZ3nBFFsPNifSErixgvrOhM,774 +torch/include/ATen/ops/resize_as_sparse_native.h,sha256=K8pZqGgeJHbraK_S5EJCbRheyHtw9USyjSNsG5jtA_s,875 +torch/include/ATen/ops/resize_as_sparse_ops.h,sha256=iq1VQBvyKpiNa-sUjutWN4KQnMfTixLdQWIPbAXwE1Y,2605 +torch/include/ATen/ops/resize_compositeexplicitautograd_dispatch.h,sha256=W4ok_4RLyHqNDhsAZPg4tD5V4Qoi_ljukztknwm27U8,1699 +torch/include/ATen/ops/resize_cpu_dispatch.h,sha256=Stxen3YB8yc-BAjHaM75WS3IubKjEXtrEVCRhlZGOmM,975 +torch/include/ATen/ops/resize_cuda_dispatch.h,sha256=g1Hz4XD8OZdBvHy1elIBVPO0T1cwgWhFNIp3sQo3lHM,977 +torch/include/ATen/ops/resize_meta_dispatch.h,sha256=gDi16_RR1gAveIBj09rBdbfsefgOi9Q-EF1oPwDImZ0,977 +torch/include/ATen/ops/resize_native.h,sha256=-A0itv4L8voh44B8J1rQYiOTEPTxXFWxpKwDbtA7-yE,1531 +torch/include/ATen/ops/resize_ops.h,sha256=VvL8vSPorUOlrIsLXekCgXUcr_41zKs_pjfUEFoMlVk,2965 +torch/include/ATen/ops/resolve_conj.h,sha256=VXpRlGMt9fqymQYd8uar3JXLrCWWDGjYTyaiTP3zBJY,644 +torch/include/ATen/ops/resolve_conj_compositeimplicitautograd_dispatch.h,sha256=ZmkYQAlc3Apqab1QViSQ1RMQddMmlr51mmOZTGNrwlQ,770 +torch/include/ATen/ops/resolve_conj_native.h,sha256=cCRQwgHT9nWIUNsXdjW8NHLBofK9RlRwxBXk4melD8o,482 +torch/include/ATen/ops/resolve_conj_ops.h,sha256=jTmFjOO071P9mMZUoAKJaXBF2sJi4evjvmz-qiAHth8,991 +torch/include/ATen/ops/resolve_neg.h,sha256=H4OvFGmqIAyur5geISKGs-GR3_Sy54btuCmfmUvb2zg,640 +torch/include/ATen/ops/resolve_neg_compositeimplicitautograd_dispatch.h,sha256=11SrCruZeI90vY4Qwnsbj2XCnobYFpqT8xMbscyXtN4,769 +torch/include/ATen/ops/resolve_neg_native.h,sha256=vpn1xVuiRJ_WgU5S07eiV9VjVHhg1ve6bSQtmlQwjtI,481 +torch/include/ATen/ops/resolve_neg_ops.h,sha256=TrfhdG6oYhYPma8CYIkIE2b4YLs39q7ZWEwWqEeugoE,988 +torch/include/ATen/ops/result_type.h,sha256=5Ukw86XWkdX0gw9tEJIukXBWjg5mAdhHUGefBB6PNfg,1424 +torch/include/ATen/ops/result_type_compositeimplicitautograd_dispatch.h,sha256=khmrnKFhxJ1uBKU8OMoPD_OStz5DA0jWhmlVpVRmW3o,1078 +torch/include/ATen/ops/result_type_native.h,sha256=ObJjd_tN-pUod7ccbZyEwelHxeHZRoq26eWH-0By9CY,790 +torch/include/ATen/ops/result_type_ops.h,sha256=eX62eU_u8qSg2j7kf1qQU2bqfIrtsO1Jm_JkIrwf0Ls,3192 +torch/include/ATen/ops/retain_grad.h,sha256=VO9dQ3mHmHETtU4dkUHxlz5Wrdv3WlgtG70aAnAuoyw,485 +torch/include/ATen/ops/retain_grad_compositeimplicitautograd_dispatch.h,sha256=w1VMAh6zxvN6UsSRj3UtI-EgSKIZL4YgC0zX4olW7_E,757 +torch/include/ATen/ops/retain_grad_native.h,sha256=3S9KQTYMTl0ZiTObCtedCVwLVz96ZNOVCX5okH4-hx8,469 +torch/include/ATen/ops/retain_grad_ops.h,sha256=ljyXOOHWHCJ4OqecmND5Q8AitaSvaYee_LE45LxcnlE,946 +torch/include/ATen/ops/retains_grad.h,sha256=9IWK3jaXdeVoGgSNfeqR08DADY6gTcuuCXr-3x3B27U,486 +torch/include/ATen/ops/retains_grad_compositeimplicitautograd_dispatch.h,sha256=enUUiMkfZIH4mlTJNtUccCS-MentCylWDuXCVRaw-04,764 +torch/include/ATen/ops/retains_grad_native.h,sha256=Sm27qZ455a6iPbyyt3BewNU-QyVlgtc6wsvQXYqbP6A,476 +torch/include/ATen/ops/retains_grad_ops.h,sha256=6X3W075uEX5zsQKkH3beA9nsSNkbpBSwUdk4jRbbi20,965 +torch/include/ATen/ops/rms_norm.h,sha256=Kvu-HS-_g67RNKip37gf-SSkRfKdRZE0LPYNoVLbMcE,843 +torch/include/ATen/ops/rms_norm_compositeimplicitautograd_dispatch.h,sha256=rENXCOEkAb8RSZ4oCsgACc7t3-UUOyD9gZbIZepAoSE,892 +torch/include/ATen/ops/rms_norm_native.h,sha256=ugjA7ukHkVWUBG2SbTUmXzKsM4EmBmw04L1pPIR6XKw,604 +torch/include/ATen/ops/rms_norm_ops.h,sha256=IRYlfJ31TSyFHC09FForClmfSE1XhmqkZ7Inm2_dzRI,1331 +torch/include/ATen/ops/rnn_relu.h,sha256=cmRKkDjWmq2h4A3YWMuKjYpxWuVJvGrbXw2WlW0FSt4,1584 +torch/include/ATen/ops/rnn_relu_cell.h,sha256=OiOyGqkJucuXs7nH5xgSrOCCwiuwh7XIdgMpvQxRQuI,911 +torch/include/ATen/ops/rnn_relu_cell_compositeimplicitautograd_dispatch.h,sha256=A8AuWIBfIFArHZqvXS5LD8TBUk9f9iIKoLS3GrnjDU8,935 +torch/include/ATen/ops/rnn_relu_cell_native.h,sha256=LxW_-REgf9U5C97J22IZvQ4pR_7dkXLtWerEzHOLusM,647 +torch/include/ATen/ops/rnn_relu_cell_ops.h,sha256=IsXlIzqexsgdkYwwqSft_vf94NljlAvoCAdZVbrP3Bk,1514 +torch/include/ATen/ops/rnn_relu_compositeimplicitautograd_dispatch.h,sha256=vvp8xf-n-jkfmnmc-H19KrWJtAgSauCP9LG0-svyghI,1185 +torch/include/ATen/ops/rnn_relu_native.h,sha256=prm69DndN_DFG9Wn-fjdWyIibBTPpZkQybN8R7nHGDw,897 +torch/include/ATen/ops/rnn_relu_ops.h,sha256=J6LwPVOZ1pgHnVmIIzZEn__8LfEvP_KL8eCOUIuByA4,2762 +torch/include/ATen/ops/rnn_tanh.h,sha256=xmHVNRv6h9VCH80Tw-9exufYqL-iXhbenTaDr9Nxkac,1584 +torch/include/ATen/ops/rnn_tanh_cell.h,sha256=94f7Cck-0jmisOoFTRcyHvK-xzm4UR8YsAKTJU8R1LE,911 +torch/include/ATen/ops/rnn_tanh_cell_compositeimplicitautograd_dispatch.h,sha256=-XxQdG1L4xmwca0ACRE6-yipEuZg8laCa54GxRFnJhM,935 +torch/include/ATen/ops/rnn_tanh_cell_native.h,sha256=fyoMaZJUI9zgEdcAbPUe2x8DppXy2_uLHgxZWGxMrlM,647 +torch/include/ATen/ops/rnn_tanh_cell_ops.h,sha256=k17J5GzZKSAIVY7g_nbh-ZNAQ_bcVUcHZPQ3GW3YQq8,1514 +torch/include/ATen/ops/rnn_tanh_compositeimplicitautograd_dispatch.h,sha256=ZuKpLpwAz0scQRX62m2bFRa7s_Y-JhYpRMrMiZC7fEU,1185 +torch/include/ATen/ops/rnn_tanh_native.h,sha256=oVRzntaGxMank2u4FFfLjdrM5Z_YR-YM5k1g17sh3yI,897 +torch/include/ATen/ops/rnn_tanh_ops.h,sha256=IL50LlyvfkRGYvNqcfdpGLZRThTcodIRIqkyyJsFgsk,2762 +torch/include/ATen/ops/roll.h,sha256=CPIZQ68NdYY02K8S8Uxt1wDUL2_A4pRsROV3SOj_OJQ,3958 +torch/include/ATen/ops/roll_compositeexplicitautograd_dispatch.h,sha256=hBh_kkbiLd0M90PjUNrEYzzrjjJvWGihDHqs2kl6Q4Q,1228 +torch/include/ATen/ops/roll_cpu_dispatch.h,sha256=axXRlKC24d29HC0DBLbjdbS34lrRZu-qBSMktf0hkZY,879 +torch/include/ATen/ops/roll_cuda_dispatch.h,sha256=3dpd8y2z6ed-KRLK4ZIbB8waA2n6wzilPdNOzgZij1g,881 +torch/include/ATen/ops/roll_native.h,sha256=ga8SCUcAJgT3ZmRlCDRn0EdzDAv5HABBYY24iKrpiXA,762 +torch/include/ATen/ops/roll_ops.h,sha256=nlXnJXlLiNaSDtWvaMF-akUD0oRnVlkz5wSVyxCij9I,1922 +torch/include/ATen/ops/rot90.h,sha256=2fSpMwA2pmhbq2-SAgz8guQ8pkMfYOTig6jdOevZGPE,1217 +torch/include/ATen/ops/rot90_compositeexplicitautograd_dispatch.h,sha256=GFYfZF7RcVShlT_17llI56R8iu2nmXXoqIl5Ba6iKYc,1033 +torch/include/ATen/ops/rot90_native.h,sha256=lZAGPdSefVvfdczFGHILaJmd529p75kVAAB3Z3OelF8,626 +torch/include/ATen/ops/rot90_ops.h,sha256=J6g0A1czyUqj8hIQnHqzbzv5JyW4iNzsH5ENLy4OB7s,1822 +torch/include/ATen/ops/round.h,sha256=iVgK12qgpfqrbODlZn7XTmR6wq3f6SfcYl-g_sAX6qY,2034 +torch/include/ATen/ops/round_compositeexplicitautogradnonfunctional_dispatch.h,sha256=0V2NnobNWh0sQL1wHRn2TTH4zA3CTepuaCUn6y97NeM,978 +torch/include/ATen/ops/round_cpu_dispatch.h,sha256=vOTwuJ8VF-z9QHhiAnvAF6Q-XMY3FXZ4qz82uwVcj4g,1254 +torch/include/ATen/ops/round_cuda_dispatch.h,sha256=Cuvv4uKSE5Mp4QG_WozfoKgIJUrsnM3TKH6ybMZz-9o,1256 +torch/include/ATen/ops/round_meta.h,sha256=pxqATgItQcl6Cg1eoQJNV7hvGX0nqhFgWVjcmS-ltJU,718 +torch/include/ATen/ops/round_meta_dispatch.h,sha256=SMBB9WAFYFPk8LF8UOJuWZCWtjDvsJyww0jMRYRSiPU,1256 +torch/include/ATen/ops/round_native.h,sha256=cDBfv_Ed3WcYDF2Ub4ZeqnJVvzTDNy5XS3RTDaGUSBk,1182 +torch/include/ATen/ops/round_ops.h,sha256=6hMB-on4Yit20s9lDRKHrNxY6BhKsP2YWuv6EsgkOWY,4054 +torch/include/ATen/ops/row_indices.h,sha256=n0a6og62dhQDfSs3z7bGpnIDckr9gt3mrAdJ02ryO30,485 +torch/include/ATen/ops/row_indices_compositeexplicitautograd_dispatch.h,sha256=XRkeOOlfX0uG6In8T9KWjC1NzlrLAiT44gKXzuYmlIc,769 +torch/include/ATen/ops/row_indices_copy.h,sha256=_rn4jIXrMW9016suiQ7Ar8FfXlz5RFrUxymq8Pf5iQw,1104 +torch/include/ATen/ops/row_indices_copy_compositeexplicitautograd_dispatch.h,sha256=dG-jU5bOeKZfsSoHa-IfF5QpatQOwe7wi3pV0ue4_7c,887 +torch/include/ATen/ops/row_indices_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=774cIYq8ig8CUXei9RCUIpVv02qOBZIdZH3SPDj6aJc,800 +torch/include/ATen/ops/row_indices_copy_native.h,sha256=govFMfR71N4-uNXkHrtbxZmw1LGoZyfOOu7WMuGYr0U,574 +torch/include/ATen/ops/row_indices_copy_ops.h,sha256=Qxlj6JBm4omJYK1CX8At7hfV1lA7TraxMZb-GwBbapM,1650 +torch/include/ATen/ops/row_indices_native.h,sha256=aJ9ulwVaASpfF7yJlP3dNTxNyFP2MdgpLBIQItPM_qU,559 +torch/include/ATen/ops/row_indices_ops.h,sha256=LujL-zl07NCnNbMFftiN5FqwRLMSXyrkwhnyBt8cPMs,988 +torch/include/ATen/ops/row_stack.h,sha256=QV7vGO9vE8r65F2t7Dhn0LNm-pNXLAQ1MPLWy2DfK8M,1055 +torch/include/ATen/ops/row_stack_compositeimplicitautograd_dispatch.h,sha256=-TXconVBv4tdpJS53iuXvkOfIA0caAzUdeWWM9YDCXo,927 +torch/include/ATen/ops/row_stack_native.h,sha256=Ea9aMgqEn_L7WhEwYYr34_i5n_xC13kLQA51DUehvpM,558 +torch/include/ATen/ops/row_stack_ops.h,sha256=_vTJsAbqZq7QCYTy_l2zuzycUBtly5YohLqoHzMelQU,1606 +torch/include/ATen/ops/rrelu.h,sha256=Pt_eYuDKkeXA53tZwihCfBEP8Rb0H2iZ-DBvO6GSBa4,1331 +torch/include/ATen/ops/rrelu_compositeimplicitautograd_dispatch.h,sha256=sOlNAtPqCLuCdxJiyZj7xjj_Q8ThjXPvbW4Ou7WM8H8,1123 +torch/include/ATen/ops/rrelu_native.h,sha256=riKmqPC41qtSGcT5jKTPs9LfFSl0eUZoKEHLeCpkYoQ,835 +torch/include/ATen/ops/rrelu_ops.h,sha256=kp4gP2LYfi9sYhad8TgJw2pIMddR-9eJBRwj3iDRnrA,2287 +torch/include/ATen/ops/rrelu_with_noise.h,sha256=ZUA4SC_PCHUqflFPDAV2f3CbrnorfuvhBs7NUwrqRnE,2582 +torch/include/ATen/ops/rrelu_with_noise_backward.h,sha256=N2HDI6_LgIajitPNZGLuOF3849j12wN4FvjbzL6jxLc,2106 +torch/include/ATen/ops/rrelu_with_noise_backward_compositeexplicitautograd_dispatch.h,sha256=uD68SYGhj86gXjXrfyu3f2towXP0Ya01yLDUj_E_gXU,1416 +torch/include/ATen/ops/rrelu_with_noise_backward_native.h,sha256=BUeh4nsvnoLKF_IE5FtA3IDe3mT0SYloj_akb7CKOwA,884 +torch/include/ATen/ops/rrelu_with_noise_backward_ops.h,sha256=XcOxEn-MX1Cfi9m-x9w43ITEGgIM_dZck8q2wsIvIT0,2668 +torch/include/ATen/ops/rrelu_with_noise_cpu_dispatch.h,sha256=VZ4074IayBe3RhUQSyXRkZ9rUIjFiCZnxaWwdnan9PM,1646 +torch/include/ATen/ops/rrelu_with_noise_cuda_dispatch.h,sha256=oMOdiVawaEiSZpGR9SyoIbNDMWdVM1lfU8xQIkRpq58,1648 +torch/include/ATen/ops/rrelu_with_noise_meta_dispatch.h,sha256=HgvJPu6jzr4BHpRwQHNih-JUW21MiGnrQNRT3aIeRTQ,910 +torch/include/ATen/ops/rrelu_with_noise_native.h,sha256=Itz7egfgJ6mCRfgXzTZmnL3Oo2MI6FL8iovhCI9YYjc,1869 +torch/include/ATen/ops/rrelu_with_noise_ops.h,sha256=ea51yil0FSvrXxxwfSu2Evai8Dl8WlZ-uc-j_urmPjo,3661 +torch/include/ATen/ops/rshift.h,sha256=QElrIEzYBx_OwkAoCx7ylscM5gwRx9WpFnBpjcIn0-4,1967 +torch/include/ATen/ops/rshift_compositeexplicitautograd_dispatch.h,sha256=MWeyIz9AL6i9msqCJ241dd9oz5P-cxSURc4eXaCcdpc,1144 +torch/include/ATen/ops/rshift_cpu_dispatch.h,sha256=j6s6RFQr6206UMOSBz36xXrtNyzTZrI9YMZ5J54-jdY,996 +torch/include/ATen/ops/rshift_cuda_dispatch.h,sha256=Xf3yEHw8BYpQjKTedmLqTrXiUtydmtCNvhRb7Xi2was,998 +torch/include/ATen/ops/rshift_meta_dispatch.h,sha256=yaz-qqIvrJamkQNjxL8_MvNMUqM6-c2TAAL_vjKvG_E,830 +torch/include/ATen/ops/rshift_native.h,sha256=TCGYwJxzwG_lPWSb0qI6_lpAXcVwjLPvtYTGdx1wfRA,982 +torch/include/ATen/ops/rshift_ops.h,sha256=H1W93noOghP6stSWJzD6xlFjt0dhefH1uSZZXE_MTVM,4520 +torch/include/ATen/ops/rsqrt.h,sha256=M_pJVdSbdEZhkft9HddgUre4tWhrWaA7_qrUHTGKIOE,1133 +torch/include/ATen/ops/rsqrt_compositeexplicitautogradnonfunctional_dispatch.h,sha256=fh3O5dkf6iLgqhP6Ka-pYX46GXAm6TZKasT8Wm7YaV0,839 +torch/include/ATen/ops/rsqrt_cpu_dispatch.h,sha256=ghHcwbzWagekZD7IWJrawN9nnqjLtT8fzj7mkwZXPtU,924 +torch/include/ATen/ops/rsqrt_cuda_dispatch.h,sha256=4cXGR1hqEhhL9F0LCgmL5LTGCVYA8JIUcD-e-kkZfX8,926 +torch/include/ATen/ops/rsqrt_meta.h,sha256=HKoHG4GW-iN7s6xPj3NXN82eGUmZv4iAYxPMHAEIPBM,574 +torch/include/ATen/ops/rsqrt_meta_dispatch.h,sha256=v-B-lt4IrnhGaVcmfiXJDbooi6DBtl7ML7CFulA3rl0,926 +torch/include/ATen/ops/rsqrt_native.h,sha256=wqcI6080AbFPyz5o7ygaaNIHsyhzC-43ynkibbXTGL4,593 +torch/include/ATen/ops/rsqrt_ops.h,sha256=wGXLWI-dBhOiDmN0vrFpZWmQXWnhCu68tI-bsB5PWZU,2113 +torch/include/ATen/ops/rsub.h,sha256=GhHr5EMCFVNgpS4Yo-2Bxz8wzks5ZaiiDWm65I1FDI4,2162 +torch/include/ATen/ops/rsub_compositeexplicitautograd_dispatch.h,sha256=GRZhj4F30Cf_B3pdhBnM88rAgEkKR-psUEn9RUn3Ux0,1334 +torch/include/ATen/ops/rsub_cpu_dispatch.h,sha256=VNc_G6174WTu0bOkSc4fObglbM5NPVZActMqQ4E83j4,772 +torch/include/ATen/ops/rsub_cuda_dispatch.h,sha256=zD99Ts02VA4iOECeHpHlCP3DCOT95NobE0N_HuA_YdE,774 +torch/include/ATen/ops/rsub_native.h,sha256=8q8ESM1DPltVTqJ8Cdzp9hJWJ3mg0ulvH4U1xtMRC4c,904 +torch/include/ATen/ops/rsub_ops.h,sha256=NXAFxwn8-EmaLtZbX2A8hvfRw_jKHDH642QhCzgTYHM,3503 +torch/include/ATen/ops/scalar_tensor.h,sha256=cNf1aQH0bihQpSX77En_PMp_rWx4q61IwOaDht-Nc80,1711 +torch/include/ATen/ops/scalar_tensor_compositeexplicitautograd_dispatch.h,sha256=HCOV4UhfkXHIt0f0NRE_5T8OsvvbRRHC1RYeLeOIjnI,1166 +torch/include/ATen/ops/scalar_tensor_native.h,sha256=1LIkYhG0LhoVlRc9Q3qxuEkcwDOAzw7d9vaJkHY6HVQ,719 +torch/include/ATen/ops/scalar_tensor_ops.h,sha256=UzJ1WNz0m6KyLqzRnEB8KPbtpNzsnmGnoFmdue6t4hw,2110 +torch/include/ATen/ops/scaled_dot_product_attention.h,sha256=GxeAo5z7U8H1Us48Kb3f6ksKrrC31anBqlKsbvzADBM,1120 +torch/include/ATen/ops/scaled_dot_product_attention_compositeimplicitautograd_dispatch.h,sha256=2YjeJBWw-tfcOT8DoVhC9J3mkH3MV9dDFJDT3N2uLA8,1000 +torch/include/ATen/ops/scaled_dot_product_attention_native.h,sha256=UYPo0Y6lR1c-ERdYaachCYfG0A8oNpryZyc9kisshpU,712 +torch/include/ATen/ops/scaled_dot_product_attention_ops.h,sha256=wU-J1k417TM6Fa4C_Y_nN7rywC062uG4pZxE0Ej87bI,1654 +torch/include/ATen/ops/scatter.h,sha256=n6bB8-vwzmdeJWfiWQDE9aEmlls4LdlAt-FkWHOSM_k,5068 +torch/include/ATen/ops/scatter_add.h,sha256=DVDsoU_LPp1dpeaIUxaMSaiLpsqBrR9AukC-XFIzcp0,1688 +torch/include/ATen/ops/scatter_add_compositeexplicitautogradnonfunctional_dispatch.h,sha256=m9ZRGQ5-e9ted2n75uGyI8pQyj2r8YvnhZJppfisSQI,977 +torch/include/ATen/ops/scatter_add_compositeimplicitautograd_dispatch.h,sha256=kTHxoN40Eqy0DWatvfY4OnbMZbDD2yzNMeZu4PQ5QQ4,836 +torch/include/ATen/ops/scatter_add_cpu_dispatch.h,sha256=NqBR0dGWODpIP4jns2Zg2AnCe7hAA5yTd9rhi3GuB2o,1200 +torch/include/ATen/ops/scatter_add_cuda_dispatch.h,sha256=NQ_hwDwRJBFMfhsySHaJKXbqR4scjgiJRAxk924FQWk,1202 +torch/include/ATen/ops/scatter_add_meta.h,sha256=_r2kVYjxYTmkAPLlxKnEk1B1T4ivaiwKxi0DXcyApyU,643 +torch/include/ATen/ops/scatter_add_meta_dispatch.h,sha256=N4qbckPkeQqmGDmZQKkifoYx3lP0ngyFGI6_LnnVoCY,1202 +torch/include/ATen/ops/scatter_add_native.h,sha256=KoNQBgT6Fpi9nlR4a9jvHC9l9rAsjPEuyHlhutx_v7E,796 +torch/include/ATen/ops/scatter_add_ops.h,sha256=BS-haug73qaPcCM3hUXYShzK2VgdqNZUf6P2OtjW1yE,3594 +torch/include/ATen/ops/scatter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=oSWi6DoH_X37iafenDTcv72XwWPPONYMrDAWq2aw3cY,1776 +torch/include/ATen/ops/scatter_compositeimplicitautograd_dispatch.h,sha256=-XLZyuVGdwe61bYI2MnKM7SIFi68L9OyUxSXfJ2jWUo,956 +torch/include/ATen/ops/scatter_cpu_dispatch.h,sha256=5QwX_CejBN6WsbaOCFZwe306aR6jItzF1Nx0c5zM7U4,2954 +torch/include/ATen/ops/scatter_cuda_dispatch.h,sha256=Xid702LdxbTSPhBydoFnBULtsdy2rfEjeObx7UmGLtw,2956 +torch/include/ATen/ops/scatter_meta.h,sha256=Mlay0bpeEapjWrFXp7yf2QkbzCdFYURWIDunW-3gABY,1269 +torch/include/ATen/ops/scatter_meta_dispatch.h,sha256=9ztDwGQFf1ZSEdOgBRVd1kZ83TzTIAGx5yUBuUPtkEk,2956 +torch/include/ATen/ops/scatter_native.h,sha256=3Fcet0F1TJVY2vLNpUZsBoHFUWzjddfOtMyJe0ZpSo8,1640 +torch/include/ATen/ops/scatter_ops.h,sha256=yz7XBva2kK4zlAyzvI-G7q-WAd2K9fzQQSWpJqGZnic,12138 +torch/include/ATen/ops/scatter_reduce.h,sha256=tASCUZSFXRTlPSH4MgzFdMYnfWmFUA7yOK0jazoOTPQ,1772 +torch/include/ATen/ops/scatter_reduce_compositeexplicitautogradnonfunctional_dispatch.h,sha256=6mqUFSz2AZR6e56S_5WA0j8SinAIg1MmiOi6po4oMp0,1081 +torch/include/ATen/ops/scatter_reduce_cpu_dispatch.h,sha256=fZM15D2sVXCTL4FOeaicUHCwwJg5txm9aWU-Kqe7tL4,1403 +torch/include/ATen/ops/scatter_reduce_cuda_dispatch.h,sha256=MygIO6BKIsw-SHGUOYrAaEr2-QPBxA74J-cekbMkwiQ,1405 +torch/include/ATen/ops/scatter_reduce_meta.h,sha256=mGjcYKAUKHJ-IuqCVwR31539e9LqwZUmwuc5_jKdr9U,694 +torch/include/ATen/ops/scatter_reduce_meta_dispatch.h,sha256=5HzJlOg31A6DsoriYqxSjbtyq4x1e8KanRLVJUKgADE,1405 +torch/include/ATen/ops/scatter_reduce_native.h,sha256=B8q_zwH4IZ8V40LXMkypcUbRUHFKO71Tv40eeKMe0wI,731 +torch/include/ATen/ops/scatter_reduce_ops.h,sha256=AGP-2W5Y6F-oh49qzdSnW5R_guYBjQbRdBGFixvqlo4,3308 +torch/include/ATen/ops/searchsorted.h,sha256=Ry4WIs29PlSZ7E9ohaJj9TQXCx2R8vVVg7b42kv_h1g,3641 +torch/include/ATen/ops/searchsorted_cpu_dispatch.h,sha256=qFhm5VwNjvzseKiYJ-YT1oeIlYvyYziM1ximmStArLM,2132 +torch/include/ATen/ops/searchsorted_cuda_dispatch.h,sha256=JCsiyChmMxd4eSi2dGdcf41wlOXn1HKrb-yAf1wu-C8,2134 +torch/include/ATen/ops/searchsorted_native.h,sha256=HjB6ns_jMDtpqThvi9tntzY43xgX6KNuw94jtvzygW0,2338 +torch/include/ATen/ops/searchsorted_ops.h,sha256=QPZSuSTpxRZuEQRkO5HHHvBKapxfx9D3blFtp1pryN0,4910 +torch/include/ATen/ops/segment_reduce.h,sha256=B9zGaftEmyMwLk_ccOrXoqIxUDo-yw2UQL_DdsgdHRw,2414 +torch/include/ATen/ops/segment_reduce_compositeexplicitautograd_dispatch.h,sha256=bbbkW5_Em_F7RukgvcbX3f3ncMzQ13Q7rRAz0vhjXig,1379 +torch/include/ATen/ops/segment_reduce_cpu_dispatch.h,sha256=B9WEYbiQJBVYUrlHioM_NRJcLkgt2HwELoAMQUvwrrA,992 +torch/include/ATen/ops/segment_reduce_cuda_dispatch.h,sha256=OfKPs90RFx3oaWs3pKk34ebclIRByWn7ExJWSpI0CYs,994 +torch/include/ATen/ops/segment_reduce_native.h,sha256=lBR6lLHLWT7U-QoV43ylH8dRz-X1HrQ7aRQHxGKv1lU,1073 +torch/include/ATen/ops/segment_reduce_ops.h,sha256=vwrbNvKRumtfIwTUZth5acYhEBBBdxK22MnOIt6qLpQ,3193 +torch/include/ATen/ops/select.h,sha256=8sazcQXXq_HDOFUyfq9XO0SuoaGMfIAxzWcmji-JEEo,1633 +torch/include/ATen/ops/select_backward.h,sha256=Ew87u20sEpj9E4hZL9G6XHuE2RA1n4lse8w1IfMqsFo,4839 +torch/include/ATen/ops/select_backward_compositeexplicitautograd_dispatch.h,sha256=aVLl5KjFA6W3K-1jAN7-mxJ99d5ti5seAoejVsSGm_8,1346 +torch/include/ATen/ops/select_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ywf5WDI1JxlJ9Pq0wRErTcivTjxUoSBbD94yu4o9VSM,1005 +torch/include/ATen/ops/select_backward_native.h,sha256=ml2b9p9mM-79VyVYYFJ0LQXfBxy-tpaGdWxlPl-hqd0,730 +torch/include/ATen/ops/select_backward_ops.h,sha256=RjdzlR9-O3XccRC6EF5HAOLfPfMVtFqm5BBQFqR4CbI,2122 +torch/include/ATen/ops/select_compositeexplicitautograd_dispatch.h,sha256=DnptFiET-zNkqvxgp5MfPtggpQA9Ie3X1YRJRs3x1ok,885 +torch/include/ATen/ops/select_compositeimplicitautograd_dispatch.h,sha256=Qin5wY6OUza9eU_7AQU4s9VuL0IssPn_BY91alwldq8,796 +torch/include/ATen/ops/select_copy.h,sha256=1uIvANDYSnVnmY6qU1klHpeetE3BpW9EjKOYB3aEJUU,3761 +torch/include/ATen/ops/select_copy_compositeexplicitautograd_dispatch.h,sha256=nShinM7M8_kqNYs-2E1ZskrTLaHPHO-p_h13mLXXvCg,1178 +torch/include/ATen/ops/select_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=H4YZEjo1Pj4YJOmidZe6rzs3v4ct0LEwwtnLix0GCFg,921 +torch/include/ATen/ops/select_copy_native.h,sha256=zxspNdk4wcnxFGCKjmAQdhhk0UWqxVFgeterQD6QSSU,744 +torch/include/ATen/ops/select_copy_ops.h,sha256=5ijpkQvZqE7JuHLCX8wpfj99RFZn4qach-eJMJn5ezc,1861 +torch/include/ATen/ops/select_native.h,sha256=Spz4eLp9HAV5Zy4VmIhg49KtHZ8drg0RtSApQwC5n7c,783 +torch/include/ATen/ops/select_ops.h,sha256=oEkdMyJLktZaUC0xfG4vkrIyvh3hQEp4ndW9Yk7l8V0,1765 +torch/include/ATen/ops/select_scatter.h,sha256=2cPgUVVDos48xtTsruOgjZEf0m717_2pnbWCdXkWcBQ,4202 +torch/include/ATen/ops/select_scatter_compositeexplicitautograd_dispatch.h,sha256=x8ioi3LlnaGyLzLXfhluuyal4yC7T22aCV0zUVdhv4c,1286 +torch/include/ATen/ops/select_scatter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=vhYMKNGEITJ1WVIWUKsB0bl-xduMSyxb89cty7LcAqI,975 +torch/include/ATen/ops/select_scatter_native.h,sha256=0arNBNw7toPpkkz81ECTNYqdZYQPw88fDhRltTpw6kU,696 +torch/include/ATen/ops/select_scatter_ops.h,sha256=L2lbRix5yyuxZ6pvJ5P2wMaM0RvsRItbJIV5IprdWRU,2016 +torch/include/ATen/ops/selu.h,sha256=AQQlKVZfr0Bg0i46jDC9JRoM0y8Uuhc8aDXmUPTaxKc,742 +torch/include/ATen/ops/selu_compositeimplicitautograd_dispatch.h,sha256=feIEgPVppKywSSF1J0UwT2WGtx9Fl8xcApm3uY3F9Y8,811 +torch/include/ATen/ops/selu_native.h,sha256=bkKuqmhOCxmMbLt6YA9ttJmDTLU0XaMUp87xQvNLuvE,523 +torch/include/ATen/ops/selu_ops.h,sha256=SI7dM6vNtooa4egHeMbMj6T4z1mPasE70ryaEo-kSrg,1487 +torch/include/ATen/ops/set.h,sha256=w-x0OZpPeJ-suQRw_01TITotk0oXY79EK7GYEXL9ojA,9237 +torch/include/ATen/ops/set_compositeexplicitautograd_dispatch.h,sha256=uffcQLn5cK01e5EMxZKzJJOtVep08CiB8xPQ4Mw0X48,2470 +torch/include/ATen/ops/set_compositeimplicitautograd_dispatch.h,sha256=qLaAXSCm0WXaxN6kyZted1krq9kQ7NX-zxuY1bJhKiI,1025 +torch/include/ATen/ops/set_cpu_dispatch.h,sha256=IQj-xc2kLWSfgH6xH97EyHVjo60kfnsE18Oe9h5o-MI,1158 +torch/include/ATen/ops/set_cuda_dispatch.h,sha256=obDehO4hPNPO7Mb0lwqxPeEIfBb07PRbc24sq5h_zIU,1160 +torch/include/ATen/ops/set_data.h,sha256=NnrfyRlVbryQCSSezAz7M1NWDMZxWF76i44UusRQNLE,482 +torch/include/ATen/ops/set_data_compositeimplicitautograd_dispatch.h,sha256=yL2R4AodKWpLR5_gj1kDt-pkBcKkOUmz1XXBMZ4HLMQ,783 +torch/include/ATen/ops/set_data_native.h,sha256=egEEWgSdKOCxxzK_Zvw4Yr8I9pFFrOKZziz4BpvJjpc,495 +torch/include/ATen/ops/set_data_ops.h,sha256=j67F7fWpSpe5JKWRsk8HJJmzHLTWSZOOwi2Fpl7bbpc,1032 +torch/include/ATen/ops/set_meta_dispatch.h,sha256=k-R5X55oU6GtG0kR_HkHtakdSMWGBNUrP3PIegVB-80,1160 +torch/include/ATen/ops/set_native.h,sha256=skc7dEQ_fyLsXyLVoGdj79UbZm2bEIlTWWH1E5Z36ms,2414 +torch/include/ATen/ops/set_ops.h,sha256=dtIASNhLKVLQDI-Xnm1rDBuqgYKfsJiMcXFDqi-LGR4,10090 +torch/include/ATen/ops/sgn.h,sha256=w_1T-DQnkDABRNB9End1km3IlLqCFceh_tjVn7Z5H0c,974 +torch/include/ATen/ops/sgn_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5LZTvBGItXSSEPoj5He0vvDPrxfS51-kWkyZ6o9PjuI,835 +torch/include/ATen/ops/sgn_cpu_dispatch.h,sha256=N8oA9WbFOLV034Ofk9Y_lEoKo8EL_E1gNd6z37s1DH0,916 +torch/include/ATen/ops/sgn_cuda_dispatch.h,sha256=gEHpo5mz-1MJApDpovu2PBGyFHkKgnUGcC1GVKiyPU0,918 +torch/include/ATen/ops/sgn_meta.h,sha256=p4U44EfLiZqbz-UFx988VTlr_2shhNTUFWNzdERaRUo,572 +torch/include/ATen/ops/sgn_meta_dispatch.h,sha256=8yfX_c-0Sgq8Y_pRhnUy4PizSGbyX-pQMdajpHcDrNw,918 +torch/include/ATen/ops/sgn_native.h,sha256=Lyl-3VYpHUzzPJtGCXEilHNwCJWkisXCgz4a6eUUYLY,1114 +torch/include/ATen/ops/sgn_ops.h,sha256=ftL5NX6yzjLsxXX3jCcr59mLsFkTJsLqUxwrxetKMtU,2095 +torch/include/ATen/ops/sigmoid.h,sha256=8GPoMxtMmKEzwZw3l4OYboTvw7DaPs8_bjX3Et-jpbU,1159 +torch/include/ATen/ops/sigmoid_backward.h,sha256=y3lGXXei3wNZtHnRZD5UsflBZQAtGTUvJQDc9y106iI,1387 +torch/include/ATen/ops/sigmoid_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=KeXoyaLReJobC_434lczR2HcHZ-hbB5pAywmeclxVRg,834 +torch/include/ATen/ops/sigmoid_backward_cpu_dispatch.h,sha256=Z1EoGWZ4eiOSy7lPpI_5qwyzWyFqxY9wC-35gcOLfDo,1023 +torch/include/ATen/ops/sigmoid_backward_cuda_dispatch.h,sha256=6ZRfEflpXl5fpK-du2JcRoDGKNJjB-LydnnY6mq6OXI,1025 +torch/include/ATen/ops/sigmoid_backward_meta.h,sha256=Q2TsbzrZM5CXo5d1drgCSGA8JusHYjx_zfIJdO_tyVc,619 +torch/include/ATen/ops/sigmoid_backward_meta_dispatch.h,sha256=S4rELKVhE_1DTVGnOsDDb_8rwjqu0gHQwaJQyOuOLHw,1025 +torch/include/ATen/ops/sigmoid_backward_native.h,sha256=zWfWt4YdQWcMwL8foeG0VndAO4MHby97tlHHksPgrws,667 +torch/include/ATen/ops/sigmoid_backward_ops.h,sha256=vTt1Fk-bEglgV-2LW62kve1XuuopyA8N9vacI2PjSmw,1912 +torch/include/ATen/ops/sigmoid_compositeexplicitautogradnonfunctional_dispatch.h,sha256=PgSe5Gt31ah8_osPFrHYOww8BvxGX1FAB-T6z4sIlSo,843 +torch/include/ATen/ops/sigmoid_cpu_dispatch.h,sha256=9BqLnt0EV_KwajFVFxF7kDs_-hM6m2r-I-s6TA5mIYQ,932 +torch/include/ATen/ops/sigmoid_cuda_dispatch.h,sha256=QVopEHCkwY_JxiodcFYJU0G5YY7ClWLQK0oroxRrFO8,934 +torch/include/ATen/ops/sigmoid_meta.h,sha256=K4Q8Sb00UUZQgVYRsjSwzHeIZgbkYLobohFaSBwvQBI,576 +torch/include/ATen/ops/sigmoid_meta_dispatch.h,sha256=tHMuVNnesOCzw4LWcETtkRJUayW-Y2Mt3UMxT5SgayY,934 +torch/include/ATen/ops/sigmoid_native.h,sha256=zqsO6FoepXs8KVtH6e3t3NQH5XR4ciToAV_9yQ17PBs,789 +torch/include/ATen/ops/sigmoid_ops.h,sha256=lzqi6rrhvf061x2LINWNHrKuCUI-SkWBcQG38qsMWAI,2131 +torch/include/ATen/ops/sign.h,sha256=uYy5y3g5jr_EalGQphc5gnGd-C8iBsMjJTYU2QT1vF4,984 +torch/include/ATen/ops/sign_compositeexplicitautogradnonfunctional_dispatch.h,sha256=3sLCp0R6yinNMOZNmVu3TVD2Kz72SerkQDIQaN_rhLA,837 +torch/include/ATen/ops/sign_cpu_dispatch.h,sha256=dRjDYMA27wsP2uva7J-Kgdy5UvukNUp6IfQthimzXeE,920 +torch/include/ATen/ops/sign_cuda_dispatch.h,sha256=pqJj4CvmdlTDW6JNChQyrKMfUpUrFIobRDu3_I-X5i8,922 +torch/include/ATen/ops/sign_meta.h,sha256=kZ50XZlNB_ahX5sQZ725nwM1bcKU9Bwvlvh7Vy1syMw,573 +torch/include/ATen/ops/sign_meta_dispatch.h,sha256=zTgSWNx26W0SqO2nyrie1VW5-I62rLQKwHVk85ImOnI,922 +torch/include/ATen/ops/sign_native.h,sha256=y_XI_ArCQP-L0sTnaml34ipbsIQNMzQyDz0H_KhhlLM,998 +torch/include/ATen/ops/sign_ops.h,sha256=36fzM7PNMwzYC2f92LfqlLF7SI0Y6jfYOEMJsdrzCDY,2104 +torch/include/ATen/ops/signbit.h,sha256=N-Kc6uxgOnu9PdMH0zZcK92izNaE3L6bve-wXHoHTxo,1014 +torch/include/ATen/ops/signbit_compositeexplicitautogradnonfunctional_dispatch.h,sha256=LKAm5dgD5A2eFGCFY4CYUTIl5OTWUm5om6_g5-oBr_4,791 +torch/include/ATen/ops/signbit_cpu_dispatch.h,sha256=rP9ipQWOjXsjCT1FWW15McZXhlElRK-CO_f0m4bDxYI,880 +torch/include/ATen/ops/signbit_cuda_dispatch.h,sha256=FcRfgPia_yIEHt6_4uzob0Cb5Lrew0BvKThRrXioHKU,882 +torch/include/ATen/ops/signbit_meta.h,sha256=w0_n07i9y4ML3mfA5X4cffXQfQ7L2-ZjG2Tonboz2Sg,576 +torch/include/ATen/ops/signbit_meta_dispatch.h,sha256=3TpQ5vHZrP7rtq8vi0ePFzQv_3gu8unuISKA9-JT8ac,882 +torch/include/ATen/ops/signbit_native.h,sha256=IlHrBXQHNotonA5cOTs7hgk0ercD7eILFxz5lxJhbQ8,903 +torch/include/ATen/ops/signbit_ops.h,sha256=e0CqonP8gaRvpA3w4-pGj4h0rKq1WgvfeCEe200aAy8,1596 +torch/include/ATen/ops/silu.h,sha256=08UqXs3fqpQPMKSTlmN-O6GMkwTsjQ8fbuNETKxLU6I,1120 +torch/include/ATen/ops/silu_backward.h,sha256=UEGumNImmUN8Bi2bi_BkebVYjE90enHwmfa9ERDjjck,1339 +torch/include/ATen/ops/silu_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=8YaGB-R4mVUWiIqPoByZJbvej048UGQjTm_O8uBNCeY,829 +torch/include/ATen/ops/silu_backward_compositeimplicitautograd_dispatch.h,sha256=sQm5odhJYhYcckBGh6uwZ68ih5HjuNIIcfDJEQfFfCM,803 +torch/include/ATen/ops/silu_backward_cpu_dispatch.h,sha256=EG_bVXsVJLRVkSi9rLsFGSyu0Tk5y3d79jYiJirYAEI,1008 +torch/include/ATen/ops/silu_backward_cuda_dispatch.h,sha256=JhWuMQ6oEORadSG48Zy1zi_KaNEuhbfYeAtnG5s91Lc,1010 +torch/include/ATen/ops/silu_backward_meta.h,sha256=dSIq5tnzLvQN3ntD_nfkDZVvEFwiqpQQQZcM9A1NAZA,614 +torch/include/ATen/ops/silu_backward_meta_dispatch.h,sha256=mVbl8ejUfC8rCxiW1E7c7S663PjTZUBADWI5m-3D_LA,1010 +torch/include/ATen/ops/silu_backward_native.h,sha256=eEHVrZbqD-0woJ8n7tIY749zqUekOgtEGdc2kjJgULY,854 +torch/include/ATen/ops/silu_backward_ops.h,sha256=fRVpfpQh3xwSo3CeygxZslm-D0cCRkXwkN_nhR2or8c,1882 +torch/include/ATen/ops/silu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=H7Lya-LmVuzj9XLj49GvmeR9pPgteBXanYLuB4TjXB8,837 +torch/include/ATen/ops/silu_cpu_dispatch.h,sha256=VW-LKzanvEolYrQajqECOJSHGJXlc7jhXv3Qc7vDo4Q,920 +torch/include/ATen/ops/silu_cuda_dispatch.h,sha256=xkK0nHdqbsDqgHS3WeyQdxyTOy295H73P6t_RQ8abuM,922 +torch/include/ATen/ops/silu_meta.h,sha256=IZW-afXIgQa1nK5TWaTrSGed4UeL1EeCGTmVdY8DH8c,573 +torch/include/ATen/ops/silu_meta_dispatch.h,sha256=_RFEFf5hb9lvl-zAckvaGyo213ylhEURqRJSZQgrLUg,922 +torch/include/ATen/ops/silu_native.h,sha256=bT9LZvqxlMN2Mk3h9_cr5pZsCXFBS5z1a3OXUMHvCFw,717 +torch/include/ATen/ops/silu_ops.h,sha256=6RjX3RS8mW-MHGUTeZAJbAj1hA86gaFdLXpGFVY_LfM,2104 +torch/include/ATen/ops/sin.h,sha256=FscfoKol37MhM3lH9BUewnllZWn_RgXqvJ3ozH0DdYo,1107 +torch/include/ATen/ops/sin_compositeexplicitautogradnonfunctional_dispatch.h,sha256=oK8texD6_Mm0uHJLY80EPD2e4B2awH0--EUWVnzkKw8,835 +torch/include/ATen/ops/sin_cpu_dispatch.h,sha256=N2sGALxM13cOEMx_Xw0Z5QpZSz3Z0QB1yv7EF95v5IA,916 +torch/include/ATen/ops/sin_cuda_dispatch.h,sha256=AY7K9iIlZ_AWmzB3s8bQes1AtzQ-Y0J6i8qX1sFlzC4,918 +torch/include/ATen/ops/sin_meta.h,sha256=4j4j-VHroVZpeYAqQhOusCeWbyLxwa00nzAGKRuz2e4,572 +torch/include/ATen/ops/sin_meta_dispatch.h,sha256=ZXhNfI5oQuNj52zkysQ-oSrQDrUFP-c-_GIv1ZNN8v8,918 +torch/include/ATen/ops/sin_native.h,sha256=S92tI9VUAeMDnATiAL1-FyhU9u4wtDaQCXjNHqz1IGQ,1047 +torch/include/ATen/ops/sin_ops.h,sha256=y-pK-1JCIal7xqGP2kZkypyJEEOEs-QLsvyeiVP_yYk,2095 +torch/include/ATen/ops/sinc.h,sha256=Ua_anfs1YyPwVc-1HEc0GR5s7Myx6ZlL1vUu3a27ddc,1120 +torch/include/ATen/ops/sinc_compositeexplicitautogradnonfunctional_dispatch.h,sha256=EKtHwrscBiEEOnQ1JHHKfTLAH3L0B8F6ALt0WtZqHDQ,837 +torch/include/ATen/ops/sinc_cpu_dispatch.h,sha256=GOS-7yL0DplD2PgWP2CYkwAHuTH1-VvXL5P4HcPu7Lg,920 +torch/include/ATen/ops/sinc_cuda_dispatch.h,sha256=BIV7_Q8rLDcOjtnQjPHdi6q_vtsmByrHmOMUi4olkB4,922 +torch/include/ATen/ops/sinc_meta.h,sha256=OKjXqg3PR8DspzLydyQdn4zeGyi3unH-6qKwd2j5iys,573 +torch/include/ATen/ops/sinc_meta_dispatch.h,sha256=oCzuPzKN9t28M61qlSg8r-8NDD3EQBpEHCXi4dMAHC4,922 +torch/include/ATen/ops/sinc_native.h,sha256=ObXGiGBpyuMR18NKqDQFIrbHavNWj9wKze4_PXyDuOA,590 +torch/include/ATen/ops/sinc_ops.h,sha256=JXWYkZHrllAjNqV22KEGIzhzXcu8Vvu-WrVgInu0zWY,2104 +torch/include/ATen/ops/sinh.h,sha256=5ose_t2PBLZJXV_XJ23-94CzZpawCwYlNNb5PkIF9V4,1120 +torch/include/ATen/ops/sinh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Dn9LTkJi_S3ATtf0n85BJ8pNIzMktISmGDnvH8e_WXE,837 +torch/include/ATen/ops/sinh_cpu_dispatch.h,sha256=2XfrxA2i-RFpcoXYAgiDSDF-T2jKMKSn1Re9x-tEZVE,920 +torch/include/ATen/ops/sinh_cuda_dispatch.h,sha256=YTqiAu91XPCxq3bi-2QU5WAJXMOAIJ8XoNE8_Nmsf6c,922 +torch/include/ATen/ops/sinh_meta.h,sha256=ugoM3pfnPGr5Elnk8HptOf_IYWfOjmBh4F9Opwf2kU0,573 +torch/include/ATen/ops/sinh_meta_dispatch.h,sha256=indTCakEIpKc7xjFjNcUu4CkXNSYnkjFlk0JkzsoNxU,922 +torch/include/ATen/ops/sinh_native.h,sha256=IK8iwEPi-wWAt42BTvVFoti0XIZZ8DeP_wnt7Tis9Hg,998 +torch/include/ATen/ops/sinh_ops.h,sha256=OjUm6RXDu2QTAF-7V_ifwM37k5UoXl3LUvgPGvRcFmQ,2104 +torch/include/ATen/ops/size.h,sha256=kU23uLKxh90A1xkPkilMMWhJOD6IliuSDlYoJYaihWE,820 +torch/include/ATen/ops/size_compositeimplicitautograd_dispatch.h,sha256=lfBzXTWLGOAo9DfG-KPVaOTcT7h5ydQWZqIo3W8vF3k,838 +torch/include/ATen/ops/size_native.h,sha256=yLSilFYn6AYnZrpZ6LHiZ77tTtnK8NhOGmf1DMAeg9Q,550 +torch/include/ATen/ops/size_ops.h,sha256=GmBa35PoRIDqV9cy1S3w3nJUQ6gT3wQKd0diO5JxrOA,1602 +torch/include/ATen/ops/slice.h,sha256=3UiCGypk7fZ8eFV6ynv38rrOf60kLBj7sCFYiDIBAdI,2225 +torch/include/ATen/ops/slice_backward.h,sha256=99jq-Wo5Yty8rCRj1MNx2zgHJ3VL-IcM_ld8ZpGaKks,5462 +torch/include/ATen/ops/slice_backward_compositeexplicitautograd_dispatch.h,sha256=bxgVa7Z0R7s3TzNHNwGTITuhC7ZFeD5qhSlLIj0hrFo,1795 +torch/include/ATen/ops/slice_backward_native.h,sha256=2prhNcuuQQ0qDrtFQ_ZUerzHvb8d52IYOueu6oGc580,775 +torch/include/ATen/ops/slice_backward_ops.h,sha256=YzdSdclWGKkdrs6pWbNfB-HKDn5LBDM2c9zM8Ag6lk4,2358 +torch/include/ATen/ops/slice_compositeexplicitautograd_dispatch.h,sha256=qEZmTTwQGrl5d8V6_v3EWHXq_TfFYzvcsVnIFvvsrVw,1081 +torch/include/ATen/ops/slice_copy.h,sha256=n_q2iF6NlLSvo9tb6Y0rAy_YR3kke1sLWFI0sGsTcDk,6078 +torch/include/ATen/ops/slice_copy_compositeexplicitautograd_dispatch.h,sha256=wL23zOtj7Zy_EoC8c6sORGc0lPtJNMN7X2DL_QR5UzI,1502 +torch/include/ATen/ops/slice_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=As5hCaP0NH39_bSDijVs9jlmpKP0DFV0c9-Tk_QrVAc,1117 +torch/include/ATen/ops/slice_copy_native.h,sha256=KEbvWcvGaZZvzSQuG3nLFvdB6pEe5_SXzYbAixOOEug,826 +torch/include/ATen/ops/slice_copy_ops.h,sha256=L1KWdFzqPu2NJKgwwdd0PYlXy0IPvKB_74hI3XneF44,2351 +torch/include/ATen/ops/slice_inverse.h,sha256=k0lWZg-OkBOw7Vd0Z8F640TvonHcviTZrwXzBGXUrYg,2411 +torch/include/ATen/ops/slice_inverse_compositeexplicitautograd_dispatch.h,sha256=IS_6lCeEdZ3bvMmiaoOJQC4Yte0oWmdu1dgb437dv-4,1145 +torch/include/ATen/ops/slice_inverse_native.h,sha256=TxuQpQ9wVT95HA54kLVPBnWkkQW5nivHmtuKJBvboA4,649 +torch/include/ATen/ops/slice_inverse_ops.h,sha256=ha_VXBpIDBAp1y9WVeryt5Q4ZWRHJzeR06LaBMccKTk,1422 +torch/include/ATen/ops/slice_native.h,sha256=6P61MTQTDyZq3egJYtKxhTEXIT3E0YKULtT3Ins9tSk,598 +torch/include/ATen/ops/slice_ops.h,sha256=wR47bwr9BFGDfWpn_GhSviX0ukR24YnS7J3bhpmmhmk,1338 +torch/include/ATen/ops/slice_scatter.h,sha256=MthJg9OojNysyVYU5yZZXHUtuAdLoMCBsdJPRpz6m4g,6465 +torch/include/ATen/ops/slice_scatter_compositeexplicitautograd_dispatch.h,sha256=opvWloVfs25y2-IEy5914V5zPp8xVOat2xh3bSYnYEs,1610 +torch/include/ATen/ops/slice_scatter_compositeexplicitautogradnonfunctional_dispatch.h,sha256=_xbk7oUkaSgcVBjqL9mpc5TrtC9-WShncg4EKzFS8dI,1171 +torch/include/ATen/ops/slice_scatter_native.h,sha256=2Qs7IuRAwsJhxkXD7y0FtFfXEHCi5dWj0rLLuZ9vkjA,847 +torch/include/ATen/ops/slice_scatter_ops.h,sha256=QHNneIDr1jJRb8KPjMo7uCe1BNvV37yPwhrMtVLD0C8,2488 +torch/include/ATen/ops/slogdet.h,sha256=-3Whb9Q3ShvmAmWwWzbAAek1N5WV5ElMxDvwGmw3Fq4,1296 +torch/include/ATen/ops/slogdet_compositeimplicitautograd_dispatch.h,sha256=dFmaIWSJbfcGLPpGVKU0-g4__WSEsC_xANNcSTF_EiQ,1053 +torch/include/ATen/ops/slogdet_native.h,sha256=CjBCX9j3gECtn9fAfkF3AiVS5bkNRqivxgRQooFNVZE,633 +torch/include/ATen/ops/slogdet_ops.h,sha256=CyjUZ3gdow6QoILtRKDNZ7X8UIPd_3nBVbgsKai5mzk,1893 +torch/include/ATen/ops/slow_conv3d.h,sha256=-a-1Ix113F-lHd69Es_iKEwSeW6_Klxa1jn2pUlY8E4,6627 +torch/include/ATen/ops/slow_conv3d_compositeimplicitautograd_dispatch.h,sha256=7O0Fzb09NgMJ5aq2oM1R4XvA_iQTMqNVgDP7CzhUZGA,2181 +torch/include/ATen/ops/slow_conv3d_forward.h,sha256=U1ZZWu2yMsEo3CuskkcaTOAOo6RGhaMKM5YLDr_HCho,6757 +torch/include/ATen/ops/slow_conv3d_forward_cpu_dispatch.h,sha256=-pVDQFwAvBup5Au-mePSOcsVMRSaImtS3quv7eLVguo,2117 +torch/include/ATen/ops/slow_conv3d_forward_native.h,sha256=6qRkSeKTF9NsmxGDhOfVitl6j81YV6ycsx0wPywjTmg,885 +torch/include/ATen/ops/slow_conv3d_forward_ops.h,sha256=ulIKQ8BqWxH8EFn5lm88isb7w3co6VeSqY3zUUEoi5o,2740 +torch/include/ATen/ops/slow_conv3d_native.h,sha256=ooMknHxNMthlF4FoOVDa8BBraEPaWmIZVgIbCM8oii4,865 +torch/include/ATen/ops/slow_conv3d_ops.h,sha256=TwQaQUWxqct8fAxpR274sf1WTCcLZd2rOuMpzchywTc,2692 +torch/include/ATen/ops/slow_conv_dilated2d.h,sha256=gQE-luk5QpGbq84euJGi_Drm8dDc52VuN56ZD1g2hQI,7687 +torch/include/ATen/ops/slow_conv_dilated2d_compositeexplicitautograd_dispatch.h,sha256=-QSUHEpVTBzlXJ_k7FOHIZcNyuGgPteulZaECxbpDhQ,1871 +torch/include/ATen/ops/slow_conv_dilated2d_cpu_dispatch.h,sha256=dpfndg4BbIsgdRIdvqpXXoaRj_QZo3m712UC4tONQyI,1226 +torch/include/ATen/ops/slow_conv_dilated2d_cuda_dispatch.h,sha256=rOPEWBodJ9Xo3RmmiUkopO5t9rah15aWZ2UanHdh4GE,1228 +torch/include/ATen/ops/slow_conv_dilated2d_native.h,sha256=pFQs17MDWYRff5i7NSS5xU91EKkjZIcbEKc-tSDAosw,1216 +torch/include/ATen/ops/slow_conv_dilated2d_ops.h,sha256=1uDetMZUcibiz9iyBfDtOIYKX7qoMIWzTwxgIIdbPBY,2946 +torch/include/ATen/ops/slow_conv_dilated3d.h,sha256=0xRorSkUxJ-rJhZSob8LCBO-6vYEWSMABkG__vCmRwE,7687 +torch/include/ATen/ops/slow_conv_dilated3d_compositeexplicitautograd_dispatch.h,sha256=WZjAaiB594W1eQIeeaxvg0KbInaxgiNVc98VA67Kdj0,1871 +torch/include/ATen/ops/slow_conv_dilated3d_cpu_dispatch.h,sha256=9z7FJQAbVoWHrI_mLsz32t4QdKpHKImJam-Ccg5juh4,1226 +torch/include/ATen/ops/slow_conv_dilated3d_cuda_dispatch.h,sha256=Cbn_WElP-1ONK1EOSXwbOe87yxzKUsgmaVBeEoYMUaY,1228 +torch/include/ATen/ops/slow_conv_dilated3d_native.h,sha256=sZlmR_DM-P4y6BpCpQsJmfZXbo2pxJGl8hpEgT74iAg,1216 +torch/include/ATen/ops/slow_conv_dilated3d_ops.h,sha256=wksXXkR3kpAe55aaIKc88GfYsot3UyXdnK-4s-N6vPA,2946 +torch/include/ATen/ops/slow_conv_transpose2d.h,sha256=zdKfx1FsTrBAk9huWCzdUPXLUPIWVO_PSgDotszno3Y,8741 +torch/include/ATen/ops/slow_conv_transpose2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=GK3W0OlYjXHeW_hwmjrk34AO7ep2NQcGSU9dc17ih-M,1385 +torch/include/ATen/ops/slow_conv_transpose2d_cpu_dispatch.h,sha256=uLvd2Gvfsn73qYdBLtAxg7m7L7-rnuB9daaVQVsfdqg,2637 +torch/include/ATen/ops/slow_conv_transpose2d_cuda_dispatch.h,sha256=bBNXMzOMfJiPaVCedEEzOv9LTom5Rurvlkl3MwnSIlI,2639 +torch/include/ATen/ops/slow_conv_transpose2d_meta.h,sha256=QNQLXccu5pYQ8Fy2Vf7kw5lDf4X2XSBVV0i_1HKpdVk,811 +torch/include/ATen/ops/slow_conv_transpose2d_meta_dispatch.h,sha256=rjqWqnzvwC37KGAia-HEFW5PnaWg9NlIfVm3J_zABOc,2639 +torch/include/ATen/ops/slow_conv_transpose2d_native.h,sha256=Q6o9HNHKCfydLYZjQ0Sn0X5ehlWT-xj_j-WxUHIxvXs,1277 +torch/include/ATen/ops/slow_conv_transpose2d_ops.h,sha256=ZuftO8I4gNEi2UbuABvJJIHUmzfcQ56UkGRwQzFHg_s,3200 +torch/include/ATen/ops/slow_conv_transpose3d.h,sha256=VWDOC1BB1tCDg_Pk8bD6HBsCEB45HD8rsiiw-7k9Hac,8741 +torch/include/ATen/ops/slow_conv_transpose3d_cpu_dispatch.h,sha256=Orvj9yEdQbQHhXL8BZ2kY7ss7X472XVQG6Qb9f2j55w,2637 +torch/include/ATen/ops/slow_conv_transpose3d_cuda_dispatch.h,sha256=guQa-yX5FpxM1jUYgTwJ04viR_DPdn5BbJpiOxW3J98,2639 +torch/include/ATen/ops/slow_conv_transpose3d_native.h,sha256=n0FGRL1wc1okAaTaOU9l8E6PzBiHQ3JA7t2o5Nb2wwg,1606 +torch/include/ATen/ops/slow_conv_transpose3d_ops.h,sha256=49178fQElw_3KVlbQ__mZBq5CvPG2Q-GPgmrFtabx4c,3200 +torch/include/ATen/ops/smm.h,sha256=rdSKMHMOzP_Y_4Kcj1ae3BbUnMKBPi3GKB-n609dJNc,646 +torch/include/ATen/ops/smm_compositeimplicitautograd_dispatch.h,sha256=zh39RzyV2CfnuPVHLm9WN2xAy1HIvmcNqnC7MHe5zFI,786 +torch/include/ATen/ops/smm_native.h,sha256=TZ42SA_aEpHx6GIw2WiWQxvnwPYGVxe_A0WwHVw9rWQ,498 +torch/include/ATen/ops/smm_ops.h,sha256=Up4NdZ7cEij5zd95PozfFqph-uPEUVef5Jrfwq1cvdI,1041 +torch/include/ATen/ops/smooth_l1_loss.h,sha256=GHaJHV8fd_MbzBLe3Cb-ww6JC-0lbNwPaycaD9Ean74,1537 +torch/include/ATen/ops/smooth_l1_loss_backward.h,sha256=BiyfW8-EcsCcApARpTAMyMQX5r0DBYXIXzXe_5lg3yw,1817 +torch/include/ATen/ops/smooth_l1_loss_backward_compositeexplicitautograd_dispatch.h,sha256=vsKgPteWSvoSkOolydVw8wIQi628eQK6CMAGa6Hfqw0,872 +torch/include/ATen/ops/smooth_l1_loss_backward_cpu_dispatch.h,sha256=L1-VS0yBMrCTZThtrofIh7OYl39FeV_fkTRBHRF8mu0,1053 +torch/include/ATen/ops/smooth_l1_loss_backward_cuda_dispatch.h,sha256=S3dEktFSk5jv-cVvrpmgNY-7Tv2Hq0TSpe4upvr_u5w,1055 +torch/include/ATen/ops/smooth_l1_loss_backward_native.h,sha256=aI4XEFJjOdcs7dUxaiSA5sWpNSCb5iYUxl8k1kmXx3s,777 +torch/include/ATen/ops/smooth_l1_loss_backward_ops.h,sha256=N4jIqIJO3UMktU-JrvuLFoZHDXs3Tr-n-vMWcOGpxxg,2336 +torch/include/ATen/ops/smooth_l1_loss_compositeexplicitautogradnonfunctional_dispatch.h,sha256=QFDGg5V9bzZq5wAUY91lhZKYddS5-b-KTRFyH-UraU8,881 +torch/include/ATen/ops/smooth_l1_loss_cpu_dispatch.h,sha256=DdRIZPNl3E3o0ZyHWhe6h4fJRxcoMPF7XYN1KciQEN8,1126 +torch/include/ATen/ops/smooth_l1_loss_cuda_dispatch.h,sha256=D1DeBN2maEeNWynQLkZLW3TJ4YZePPtgrcdTY1uIoBI,1128 +torch/include/ATen/ops/smooth_l1_loss_meta.h,sha256=aJckcFd98G33jy1Fxj_sY0OuoDNMjy2hmjdcMRlUGlc,642 +torch/include/ATen/ops/smooth_l1_loss_meta_dispatch.h,sha256=OK6iuMA73rROWLuzfECUCATHlYFCpFtyw2EV_FsBgeI,1128 +torch/include/ATen/ops/smooth_l1_loss_native.h,sha256=Zpx9bANA9ochEHLS-TYSUu3fovKFqcDt9p9VHVG6wOM,679 +torch/include/ATen/ops/smooth_l1_loss_ops.h,sha256=Xu7Klz3bVBXAtpuaJozYbK5vLcSqHyvJdOkaoPX7nKY,2050 +torch/include/ATen/ops/soft_margin_loss.h,sha256=0rYtZYyFGiQXIq1fYJjyIPfz0LevM5SBLIUgRILFnMQ,1444 +torch/include/ATen/ops/soft_margin_loss_backward.h,sha256=tvcGQVudwXznOnloRCM2pRHuFRP6SW8nmZ3o8KrJ8uQ,1744 +torch/include/ATen/ops/soft_margin_loss_backward_compositeexplicitautograd_dispatch.h,sha256=mKz_T200YoVXHwVZm1EYe4W-Ggr1JntbpqZryBzN950,1226 +torch/include/ATen/ops/soft_margin_loss_backward_native.h,sha256=uN8A-lEVKbaW85dwvx_mtM5aBQNtOzzWSEfaAqULtEg,755 +torch/include/ATen/ops/soft_margin_loss_backward_ops.h,sha256=iuTKyAWTObRfNW6EBy-JyqnVkFsgsIobyMMSKbs_XXY,2256 +torch/include/ATen/ops/soft_margin_loss_compositeexplicitautograd_dispatch.h,sha256=IWQFxzXH4lpRClv65qSSE1EK0XZEPBV2QNpAxW7jZLw,1129 +torch/include/ATen/ops/soft_margin_loss_native.h,sha256=yEKreHilW5pIOEgargiwU82KpvvOkhSkcb0fqU2iMRQ,686 +torch/include/ATen/ops/soft_margin_loss_ops.h,sha256=AMd6huviPh_w-erho6ztcNIldp1ZSJYuCtcv3js-Z7I,1962 +torch/include/ATen/ops/softmax.h,sha256=6x1fmORpFKZZkv1LizoF9FHrO4aOnT0onTVsMBubsE0,1636 +torch/include/ATen/ops/softmax_compositeexplicitautograd_dispatch.h,sha256=-otz4faWijkzKNiSCJjB6QUHz1qkXMUeFFikDmUzDx8,988 +torch/include/ATen/ops/softmax_compositeimplicitautograd_dispatch.h,sha256=A-1Q6l8C1r5c8itbgXfslohXezEnnH595_J9jY6VpbA,958 +torch/include/ATen/ops/softmax_native.h,sha256=qgEjbrnemQD9iIYz2aQUm30OweyrB6UM3_6Hf_YCRJw,801 +torch/include/ATen/ops/softmax_ops.h,sha256=TOz4BSwwyuMnHXdND9UQwULj7J1KInIbr9ltTShrYG8,2734 +torch/include/ATen/ops/softplus.h,sha256=rp0txC2VO7CWKbgv1vFIeQKuVOoj8TOq2Cq6LYo65vc,1358 +torch/include/ATen/ops/softplus_backward.h,sha256=3cJZXRfIVBzPP3kIf1Hn40fIU-8RQfQiuulT3dDdEjA,1688 +torch/include/ATen/ops/softplus_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=yRyvBXIpd0TxdF7jjYtPtuP6f6DhcJK_jfDlaBnvjaw,888 +torch/include/ATen/ops/softplus_backward_cpu_dispatch.h,sha256=irVg4G8fiE6bKANPwE7laR_bQ1s2ajXO1TCxz1-W9Ck,1185 +torch/include/ATen/ops/softplus_backward_cuda_dispatch.h,sha256=Em2nFwaFRtj0glqW9N0eTAapqi8hL4uSnkXNMTMV4ro,1187 +torch/include/ATen/ops/softplus_backward_meta.h,sha256=4IeldXiU5Bw7IPsya__dNefZjjLd_09-1m7IzTh4HsU,673 +torch/include/ATen/ops/softplus_backward_meta_dispatch.h,sha256=7h0a9RE6mIXwiyuG1Qidxna_qUKXlmM3KBahwDNVB-w,1187 +torch/include/ATen/ops/softplus_backward_native.h,sha256=mhhEfE7d2W48M64FjdUztxskVnYj717_csp60sqMWvA,723 +torch/include/ATen/ops/softplus_backward_ops.h,sha256=C1AqMy3cL4ekYiKZmvtOI6nzUgDDlEhOoVQlBJbyvz4,2268 +torch/include/ATen/ops/softplus_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AxsmNLiuVnwsNx5q0qW15VTu5qb5lRxeNdLH1LIzFWo,852 +torch/include/ATen/ops/softplus_cpu_dispatch.h,sha256=RR3_VIToYt9OWM9n3oi9MXMKX8DoM-EQo6cSM69IDv8,1058 +torch/include/ATen/ops/softplus_cuda_dispatch.h,sha256=pOtF5h-A9QmnyCkkOgC5bhqimw96GuIS5IlT04KVgHI,1060 +torch/include/ATen/ops/softplus_meta.h,sha256=KZL3Y1HPbyJka81b1XQ0ZqJtm4fXZJYaollX3HXBm1g,632 +torch/include/ATen/ops/softplus_meta_dispatch.h,sha256=zFKHcO4RG0-KFa8alMQuM9vpEfdctrT8v_9zweG66Cs,1060 +torch/include/ATen/ops/softplus_native.h,sha256=6YMv7QgiH6PEkWvhGPQ1GBncnF07-M8JGINn6uWcL9A,657 +torch/include/ATen/ops/softplus_ops.h,sha256=3ZQLfCoQo77YkhbOc8MSl5B8tjAhPhPVGSpZTndI6mY,1974 +torch/include/ATen/ops/softshrink.h,sha256=cDfRdQ4CzkPa1ttRsbEumUv_yFkQYydGClStNEF2Idc,1205 +torch/include/ATen/ops/softshrink_backward.h,sha256=Z_M1YkLLYWNTsPhSmBuvm1M0SEMyWnEWwzV61k4N50s,1540 +torch/include/ATen/ops/softshrink_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=3wp6BPR3KZD_5wBtv441LjL-OBB9oifbc3gEt2eTY24,861 +torch/include/ATen/ops/softshrink_backward_cpu_dispatch.h,sha256=bLAR1XXgeCw7rD_sahvpKF69olVHZrtcaWagQpp3ooQ,1104 +torch/include/ATen/ops/softshrink_backward_cuda_dispatch.h,sha256=34HDbvTWDXHSe--jDmSWh5ofNMzvGEH4Kp2cBIhORwQ,1106 +torch/include/ATen/ops/softshrink_backward_meta.h,sha256=1H7OyNsBWL63sgnuibXpWz0atxGw5Dv4wSZg8TVfAx4,646 +torch/include/ATen/ops/softshrink_backward_meta_dispatch.h,sha256=RMrTtetoiE_Yb_tX9scUiwK7d8nLQO0y_L5ijIVAork,1106 +torch/include/ATen/ops/softshrink_backward_native.h,sha256=JTlca3O4ir5Z7_jmA6aJnL9iLr2lUp1Ra9axDuWfWKI,700 +torch/include/ATen/ops/softshrink_backward_ops.h,sha256=xx3oBlj-88azQAliorEBgpQ7WJCknm2CiqHxaRGoRZA,2090 +torch/include/ATen/ops/softshrink_compositeexplicitautogradnonfunctional_dispatch.h,sha256=s5FhbiA_3ZoBewO0Sk-kh3SROi6CbYuhjf1SfRI5vGM,824 +torch/include/ATen/ops/softshrink_cpu_dispatch.h,sha256=lt20fAFHALyCo2tleiMU_VihjRPcuouBRCnMs8VIFzU,975 +torch/include/ATen/ops/softshrink_cuda_dispatch.h,sha256=1YMOzyji6HWxejuF8e6L7YTrP6WHoHdBYmMiy0KwQYk,977 +torch/include/ATen/ops/softshrink_meta.h,sha256=NWda08SNHSwtd0WUFqoLINNIbzCQ_7ZjpfiG34c74G4,605 +torch/include/ATen/ops/softshrink_meta_dispatch.h,sha256=pGH6GMMP-sbvAxElBzqHkObdJOYA7WUisRru5CFnKt4,977 +torch/include/ATen/ops/softshrink_native.h,sha256=egReq5AnUnZGb3tPOO5JrDEfFB5Bj0FcEdKDPMLme_s,634 +torch/include/ATen/ops/softshrink_ops.h,sha256=h3hpYB4xgoFTq5X16z3tMS6tP9PwN_-HW1-MYpIBHe4,1794 +torch/include/ATen/ops/sort.h,sha256=kXaSBvhmhuRSeM3HxVIvUzZAuUYRBjRyJAe08d339no,5253 +torch/include/ATen/ops/sort_compositeexplicitautograd_dispatch.h,sha256=OLvtUB29TbaRQl1_YxyXDxJEFQaW3oZqE3qb45HwbXg,1152 +torch/include/ATen/ops/sort_compositeexplicitautogradnonfunctional_dispatch.h,sha256=7ez2nJVR9ehASlCCkromlQC0OJSf_YSiJCZPSFguPfM,882 +torch/include/ATen/ops/sort_compositeimplicitautograd_dispatch.h,sha256=SlBcdRExjLotUKtQgnuJpnEkLyXnXJgRTvEOgso5cQg,1696 +torch/include/ATen/ops/sort_cpu_dispatch.h,sha256=O25uhAS4BQZdELtPdKzubbCWFNKRf5GkMF0nphDFDcM,1198 +torch/include/ATen/ops/sort_cuda_dispatch.h,sha256=bpEkVLAa8mo6DqNREsKPLnFrWR6MHONfzOTZlR55nrI,1200 +torch/include/ATen/ops/sort_meta.h,sha256=JxhKBEctXN99LPdaJSDOebVj2TXQzJmqCirmGomuYVQ,640 +torch/include/ATen/ops/sort_meta_dispatch.h,sha256=E19d7kFL_fXSMjU6HLr1zkUJcy5g4iRvTFxYGdrlzak,1200 +torch/include/ATen/ops/sort_native.h,sha256=D247gvJNGZ1pfw3So1m91i3hD7mDx4-RZ9SXT2CaizA,1754 +torch/include/ATen/ops/sort_ops.h,sha256=5hD8z6YZmNpAfOF9rDgC1fjpRl6zBwHa_Ja9mVy22Zo,7737 +torch/include/ATen/ops/sparse_bsc_tensor.h,sha256=bkNmReC3VrIgx--8XOMC3BKN5Z6bM4kvHPYk6b3YtL8,2970 +torch/include/ATen/ops/sparse_bsc_tensor_compositeimplicitautograd_dispatch.h,sha256=Oaq_2bhCcPXsOGDhfd09JEuwuKhp45D21DPQhDfDnFs,1626 +torch/include/ATen/ops/sparse_bsc_tensor_native.h,sha256=0G7JSJmWVsJsX5z309I9HNuBYXDqe7NsvalDJOLBkE8,1022 +torch/include/ATen/ops/sparse_bsc_tensor_ops.h,sha256=Dsw5yrgxkgFdcqqUFgacj6TUJAXkKFng1YGtQdv6GcQ,3170 +torch/include/ATen/ops/sparse_bsr_tensor.h,sha256=xIwNTmGnse_6Rs61tMGbouSmXbWsMHfJZ8pPO1jzu2A,2970 +torch/include/ATen/ops/sparse_bsr_tensor_compositeimplicitautograd_dispatch.h,sha256=ztfBuW00EU5D6UWEYC9srfHMx_M8GBqc1Se2V2ie7os,1626 +torch/include/ATen/ops/sparse_bsr_tensor_native.h,sha256=YciAwxn91wLW250-3T_ruoB_9bRhexviUjFg2sxd7r8,1022 +torch/include/ATen/ops/sparse_bsr_tensor_ops.h,sha256=8rWalVerirHBBCdWWCZtNLrHYzyDKtSgmnJzNc0POzU,3170 +torch/include/ATen/ops/sparse_compressed_tensor.h,sha256=rKl0X6-b2Wz4XYIlZFC3XpswG1BXLNU61eh5xju2QXs,6967 +torch/include/ATen/ops/sparse_compressed_tensor_compositeexplicitautograd_dispatch.h,sha256=Ua6jqejXfxUPFU7b1goUu1Bh1Kf6u4-Q6O8gMUaUObY,2218 +torch/include/ATen/ops/sparse_compressed_tensor_native.h,sha256=36gkG2SDo9aDE7AIByHfpGFRyvdiSzitdgjVRxg60k8,1052 +torch/include/ATen/ops/sparse_compressed_tensor_ops.h,sha256=yWEqC6TTd8Ceii2DrVodI3FHyheKl3DYUqNYbrRApeA,3287 +torch/include/ATen/ops/sparse_coo_tensor.h,sha256=8kppnEjNxfsF3SYDXDQRWdRDdNY8n00IqjSErdljwwE,4258 +torch/include/ATen/ops/sparse_coo_tensor_compositeexplicitautograd_dispatch.h,sha256=6gmDu71oxT_L139x66xJ7s2sBsTKz0nYrzrD5MdG-mg,1179 +torch/include/ATen/ops/sparse_coo_tensor_compositeimplicitautograd_dispatch.h,sha256=KhxI_NuAknTqPjCFGwufmpVE0FCYtgashGPRg_mtri8,1658 +torch/include/ATen/ops/sparse_coo_tensor_native.h,sha256=U6kWUz0hJvdHrb_KTBfrr3hk-vovCJpa0_YIyVjfGLo,1360 +torch/include/ATen/ops/sparse_coo_tensor_ops.h,sha256=BL_nh57QmBpWptqueeeNhNsrHcGiCUwd2cjbmlGK1_U,4856 +torch/include/ATen/ops/sparse_csc_tensor.h,sha256=K8aU1WnhNPpG-xHJNlvHB3jAVCKqhsbpYUyzFXpGqNQ,2970 +torch/include/ATen/ops/sparse_csc_tensor_compositeimplicitautograd_dispatch.h,sha256=1RwKhdA6DbR3fTgB3iFu6VO5AvWO70koWG3V9T7k518,1626 +torch/include/ATen/ops/sparse_csc_tensor_native.h,sha256=KdhFWa8Vp90wvUscghYAJ1MPEwg304dsvs_WK9Ej1Uc,1022 +torch/include/ATen/ops/sparse_csc_tensor_ops.h,sha256=IK_WMQ_hK2VRUyyvw8KI8_1wpFjI-dIjnbZdZBZ2rMk,3170 +torch/include/ATen/ops/sparse_csr_tensor.h,sha256=v9LSLYT5UxdxEe6IUHCZSwYGNLR_AGwkJKyOnx3Q22Y,2970 +torch/include/ATen/ops/sparse_csr_tensor_compositeimplicitautograd_dispatch.h,sha256=tg5pdKL2iiApucrbPynYr0pF1HHl7mvAsvqqGcZoHUc,1626 +torch/include/ATen/ops/sparse_csr_tensor_native.h,sha256=pTHwkai1ZJAcs1Qx93IOSm3cnWpNtZlmj2XMWJLv9_k,1022 +torch/include/ATen/ops/sparse_csr_tensor_ops.h,sha256=NW54-8cxVvvLkb9DIC-9QQFh3ASJK4XucDt35OayeW4,3170 +torch/include/ATen/ops/sparse_dim.h,sha256=lNFvNVFV0aCLDeULqxgvu8zR2gvZu1qFu42Kf5l0tn4,484 +torch/include/ATen/ops/sparse_dim_compositeexplicitautograd_dispatch.h,sha256=gz-2bPdDY3EXY0b-Y_UGcVMVfIEkA6XoBwhm5TABTtE,765 +torch/include/ATen/ops/sparse_dim_native.h,sha256=f32Fv_e04mXR0JaljNraqOsE4r7D9nKwzoVOC4gI-Ro,613 +torch/include/ATen/ops/sparse_dim_ops.h,sha256=rUJnBEHQgb4RrLw2RaoZb0OCnKX-uy2zA2F7zes7sHk,967 +torch/include/ATen/ops/sparse_mask.h,sha256=YEL2ZKmE1Wn5jDiCoDRqOtqCpylRQxEW2gILDQfY0ag,992 +torch/include/ATen/ops/sparse_mask_compositeexplicitautograd_dispatch.h,sha256=lZZ8zflSDKJfLpXu_gdweD8WAC1Dx_sysbqpu161Rl4,927 +torch/include/ATen/ops/sparse_mask_native.h,sha256=7kv8HCtEr1b37ugxB_WyrVC-fiKLoRYI8QVdOEiUWTk,716 +torch/include/ATen/ops/sparse_mask_ops.h,sha256=u6I6w_lbYdBbW9TAql2rG6-5HcMgeX-gD-cJNIsO1V4,1786 +torch/include/ATen/ops/sparse_resize.h,sha256=YN6-waWJ8rdoPhEP_yG3zBOxH4WKWt9U2ye4D4Yfjb0,1497 +torch/include/ATen/ops/sparse_resize_and_clear.h,sha256=lruCnJoaPLp8-HnZMFgc3EA7vnOV2Nl1IjUp0seuofA,1597 +torch/include/ATen/ops/sparse_resize_and_clear_compositeexplicitautograd_dispatch.h,sha256=Ls6kS5ml4uGfjC8K_A38ow6ICk7nNTWQCrnhD03DJEk,1179 +torch/include/ATen/ops/sparse_resize_and_clear_meta_dispatch.h,sha256=Km3K7YrJCKVTiJ0gwwqE2nwwLTBhECQcFGYHRg9zdcA,809 +torch/include/ATen/ops/sparse_resize_and_clear_native.h,sha256=4HmF_xA3kA9aKL6nDgHg4PVdF8saRmZFgrOYeU-7txQ,863 +torch/include/ATen/ops/sparse_resize_and_clear_ops.h,sha256=hoRh3_Qo02RYuw2cvRBa4k9WFq5DhfYieE3dFn5b9SY,2947 +torch/include/ATen/ops/sparse_resize_compositeexplicitautograd_dispatch.h,sha256=n_ZcqJ29w16qtM-k4Cpydy8AXe696IkrSfxNiXrxO68,1149 +torch/include/ATen/ops/sparse_resize_meta_dispatch.h,sha256=TIW_sMk1jfmUGdgJQGhLs8nk-zLUkVj68Rdgvrr8sfA,799 +torch/include/ATen/ops/sparse_resize_native.h,sha256=vzXoDtjqZetOddohZMiSnYlOaf3ubHjCuGWrCDM3A1Q,833 +torch/include/ATen/ops/sparse_resize_ops.h,sha256=xaB_SUGzP1fmFJ5QZVpsq9P5korOx8Kp_AGZI0MWB3Y,2857 +torch/include/ATen/ops/sparse_sampled_addmm.h,sha256=r0XleiNcWoQH7Yy2PyoRNknaFkJHYH5L63ZLmOx8tNc,1704 +torch/include/ATen/ops/sparse_sampled_addmm_native.h,sha256=_xV7o3UAg_0EkmHJQURCHn8ljuTVgQTV3II519CpKJw,1216 +torch/include/ATen/ops/sparse_sampled_addmm_ops.h,sha256=1PHsPSkKEBdWyqh6uLl3sxC41WOpnKezmTi9oTqrOiA,2355 +torch/include/ATen/ops/special_airy_ai.h,sha256=crpHdhRCA67t0aMeRbqxNVPAv2TmfyZj6-NdN9jnFfU,1067 +torch/include/ATen/ops/special_airy_ai_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wXLDK7Z1CclTtHXRXETveaiueJ9157ffmycygpScvQ4,796 +torch/include/ATen/ops/special_airy_ai_cpu_dispatch.h,sha256=5eQjksGWxSERuaWFRpjU31bFZsmTRZQBxMC0IHO0v4U,895 +torch/include/ATen/ops/special_airy_ai_cuda_dispatch.h,sha256=2TW-ozb54TT126PSh6oF_e1X_bH_t79z6Fm4RipvYTQ,897 +torch/include/ATen/ops/special_airy_ai_meta.h,sha256=qyFfgkuli9ctnDOXfYdUw2pU7zA23woKNkF0MKo83Yc,581 +torch/include/ATen/ops/special_airy_ai_meta_dispatch.h,sha256=NmhGhErGzoVEo213-MT8hjnQ-Cm_svGG81-JHCOvp5w,897 +torch/include/ATen/ops/special_airy_ai_native.h,sha256=zTfeguwsfrJLsH3Nu6_PhmtrXENwiNyjIcyW8HC18mw,620 +torch/include/ATen/ops/special_airy_ai_ops.h,sha256=AIYe-2JeFHayAhhoe66l3CKv630FqhAS9GjF_23rh4Q,1626 +torch/include/ATen/ops/special_bessel_j0.h,sha256=YPR2XMIKhy5AEjQ_HW_PHUsaoYkpai38iLbJc8o40kc,1114 +torch/include/ATen/ops/special_bessel_j0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sUN_-xH6BJrL6zS4TgVstPSH0E3pwzo9MZNAdRWF6fQ,801 +torch/include/ATen/ops/special_bessel_j0_cpu_dispatch.h,sha256=md-EpgEyXeXiy_mvwyWT4yJDS19AqOjGpjVwmJJ48Kg,910 +torch/include/ATen/ops/special_bessel_j0_cuda_dispatch.h,sha256=Q7HboI7OlqdoCdybyuVMqtNP2XRV2Y2UnmZLG6sUaUo,912 +torch/include/ATen/ops/special_bessel_j0_meta.h,sha256=XvF5X77Wyfn8r_fETkL6OgIIovE981LCqJupFZbYfuM,586 +torch/include/ATen/ops/special_bessel_j0_meta_dispatch.h,sha256=4uQZkdZ072Iv32obwR3IqoBqZGG0Cik43tI4jMRfy54,912 +torch/include/ATen/ops/special_bessel_j0_native.h,sha256=VnSLoq16DZr2k3hb15y5kp-LaEypZigTcQiE7MS9oXU,629 +torch/include/ATen/ops/special_bessel_j0_ops.h,sha256=cy--dhlatCGaB78du1l01I-tP-eb2vaXgLcVUG_rfxI,1656 +torch/include/ATen/ops/special_bessel_j1.h,sha256=aEDQS-CvfXUwmuNMLxsYI93IaYsxXJIAE-2l_kSTRjg,1114 +torch/include/ATen/ops/special_bessel_j1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ErD3nUgCo-p6Mp4VCz45ATz-DlKhU8fuxhmZEyewvI8,801 +torch/include/ATen/ops/special_bessel_j1_cpu_dispatch.h,sha256=8TOzZvWYktdR2TZMd4LK26IiLRQgKhLNmYQFYiY05-8,910 +torch/include/ATen/ops/special_bessel_j1_cuda_dispatch.h,sha256=hqBs8c0fMmyeml0Jr9AapvDNe2qzynZL9RTSr3xGvMQ,912 +torch/include/ATen/ops/special_bessel_j1_meta.h,sha256=GtwGLVix5Zrbkt4Dg05bfIADt4pkNjBse2doly7vAfg,586 +torch/include/ATen/ops/special_bessel_j1_meta_dispatch.h,sha256=yeICsiXvjKMibwalDoSne8ec9E3uMijJIAyLl8FdZOc,912 +torch/include/ATen/ops/special_bessel_j1_native.h,sha256=zSKL3erH0OYSXWb9hEdYdTA8ak2YYFlgMaATR326wS8,629 +torch/include/ATen/ops/special_bessel_j1_ops.h,sha256=JWmJAutuBk0bgmt7PqTEGAHs8osimQg6WeITAtZqHVg,1656 +torch/include/ATen/ops/special_bessel_y0.h,sha256=a_qgof4WlItEPNkO0OPpBVuFpGNyDsylOOOQv2WRDus,1114 +torch/include/ATen/ops/special_bessel_y0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=yi6XCUAj2a7WKU-aNabw6gk_sjRdkz_Gg-dwijd0n_w,801 +torch/include/ATen/ops/special_bessel_y0_cpu_dispatch.h,sha256=xy4oxo3wDUSeNWnm1X4Cq4EcZZY1Ex1Ybl_tzyfiW1U,910 +torch/include/ATen/ops/special_bessel_y0_cuda_dispatch.h,sha256=LbI3vgcJWlxI0NAdQbexwcYN-eBzFSQbUC1YIjFKoyk,912 +torch/include/ATen/ops/special_bessel_y0_meta.h,sha256=sIhT3qDwsYy2kbdcPYn9WkUXFJbjIIjv_4ce6_KfHNc,586 +torch/include/ATen/ops/special_bessel_y0_meta_dispatch.h,sha256=6EP2Q1rec2ncPr2f9KnicSI68aC5VcvbxwSlM8WC-Vg,912 +torch/include/ATen/ops/special_bessel_y0_native.h,sha256=mHtFsJaRxH9qAzWTZeAMa5mlOWSznHsyyaxWsDIJ8SY,629 +torch/include/ATen/ops/special_bessel_y0_ops.h,sha256=jrC5FsFWgmfVRSqyCDMlHQkNagsoJz_-7VTYnAfd-co,1656 +torch/include/ATen/ops/special_bessel_y1.h,sha256=zF2BPPLC5R0lqG7SOfL3GygNVXCOkrK3DcVeFmG6kRQ,1114 +torch/include/ATen/ops/special_bessel_y1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WIZS4xuLu5ouddWjpdxEia00so8BiNpaOlUXqm9dnDU,801 +torch/include/ATen/ops/special_bessel_y1_cpu_dispatch.h,sha256=k87LEe8Edev-dG_fp59c_d14FnWlD_yi6mmwvLH8niI,910 +torch/include/ATen/ops/special_bessel_y1_cuda_dispatch.h,sha256=2iNoCFgKh3L0DU7aX-Ig34eKG2qpoBLISuOX8z77BC0,912 +torch/include/ATen/ops/special_bessel_y1_meta.h,sha256=aWlgedSLjHElSk1tUp1SKgqZyJYT8xR0UxKg0s3O2tM,586 +torch/include/ATen/ops/special_bessel_y1_meta_dispatch.h,sha256=TI8SUHiOBuSLjevY95zeT1NnmCCMRRhKtlnbmdTml-0,912 +torch/include/ATen/ops/special_bessel_y1_native.h,sha256=kYgzPqCZtA2VZ8_u1cVf4VVSW7qz7nBtdlUFURLRAfw,629 +torch/include/ATen/ops/special_bessel_y1_ops.h,sha256=BMo9LV0RmzcsE2BdrirFpnr7QU_ukNeCCSm8AHSedbg,1656 +torch/include/ATen/ops/special_chebyshev_polynomial_t.h,sha256=Yv1DYBxFZc0BlfpnmVAyEtUdm4NW4BvcPzYAW6OaVwI,3068 +torch/include/ATen/ops/special_chebyshev_polynomial_t_compositeexplicitautograd_dispatch.h,sha256=VpDsIQwFNz9o04cPtTZ2pPp7LbTIgbQ_rmxgVy4KQdM,1390 +torch/include/ATen/ops/special_chebyshev_polynomial_t_compositeexplicitautogradnonfunctional_dispatch.h,sha256=2uRZ8wcT-VRmiclvi7iRTIZzwODjUEn2Ml7F0mPzl1A,833 +torch/include/ATen/ops/special_chebyshev_polynomial_t_cpu_dispatch.h,sha256=Pwops8mIBXgfxvHO9g9TE4CA7JKLBB6rupzSH62rIlM,1006 +torch/include/ATen/ops/special_chebyshev_polynomial_t_cuda_dispatch.h,sha256=3GdWBz8i-WDyvLvim-oiv2zz9ijf7Bo40U6yPyn6wc4,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_t_meta.h,sha256=uIxDmN2W5JRjxIkSwo5f1m45QC_wV6EjPICRnlFM4Ek,618 +torch/include/ATen/ops/special_chebyshev_polynomial_t_meta_dispatch.h,sha256=3rpaIPQMlrDAYT0928BMWPgPLGpCpnCcGTDa3YRYdNs,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_t_native.h,sha256=7y7UoQX0AgJm0Ri6aLhpIBfiwIJbciiKVY79TUPGTfs,1123 +torch/include/ATen/ops/special_chebyshev_polynomial_t_ops.h,sha256=cMOUsI8YCSTJaDxx5Oh2uHkc_XDOWYLyEBdOM_UAreY,4830 +torch/include/ATen/ops/special_chebyshev_polynomial_u.h,sha256=Z-gt-0H8SY1WTLBOvt8o6bwkuQ8thLLRvVJSCO-Jfv4,3068 +torch/include/ATen/ops/special_chebyshev_polynomial_u_compositeexplicitautograd_dispatch.h,sha256=0heZjuv7fa0_5AVSsrmT-P6AJLCiqpvMYHmflp6mY-Y,1390 +torch/include/ATen/ops/special_chebyshev_polynomial_u_compositeexplicitautogradnonfunctional_dispatch.h,sha256=MBfUz24uIavssctq6hpI1-nu8GMYnIIxWxIs2WzJFPA,833 +torch/include/ATen/ops/special_chebyshev_polynomial_u_cpu_dispatch.h,sha256=j_7L8vpq0C99C7cuWaLL6Bpa9WSS51J24ZBD1bhX6XQ,1006 +torch/include/ATen/ops/special_chebyshev_polynomial_u_cuda_dispatch.h,sha256=yvk6Ai46pk8DrABh6vsAXnV5dVvdFRyYqts6Th2FEyc,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_u_meta.h,sha256=VNK98xplHf8bdNFO_TkONJ8IOQAdxDbfYrNbt82CNkg,618 +torch/include/ATen/ops/special_chebyshev_polynomial_u_meta_dispatch.h,sha256=7JSc9wJBBM5Wnw0IasGJPrXaT-B5rUFKOSH8iss3AOw,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_u_native.h,sha256=_pGZIABNPT1gX32fNgKKxdrJsiP_xzjow_9meuhXcTE,1123 +torch/include/ATen/ops/special_chebyshev_polynomial_u_ops.h,sha256=bLKWwrqeyACCLugJ-g2oLEkuruLjj7B6J6wxqFMSFko,4830 +torch/include/ATen/ops/special_chebyshev_polynomial_v.h,sha256=b_36fN63BsziS71_bt2RdHQ042Ogudb3o7t2WHvl_hY,3068 +torch/include/ATen/ops/special_chebyshev_polynomial_v_compositeexplicitautograd_dispatch.h,sha256=6J2FsUG9MKVeATJbjKUtk8z8My8Ph_s3j9j3RxXCSzY,1390 +torch/include/ATen/ops/special_chebyshev_polynomial_v_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9cvNUDRc88CLFGJgbTPmB5VFqI3PA63r6V8N3O0TO00,833 +torch/include/ATen/ops/special_chebyshev_polynomial_v_cpu_dispatch.h,sha256=J7-BFd21sJu1WT5axGZtIttcx_kks55fmQCFeAOC2zA,1006 +torch/include/ATen/ops/special_chebyshev_polynomial_v_cuda_dispatch.h,sha256=U3YvcI9RAhQv7aDOUzvZna8m1Vdezpw3tU6u6RlpxoY,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_v_meta.h,sha256=XsAIA4IwlRVmNdQr7cFcxLjXzENuM_sg_IN6Ca1mcA0,618 +torch/include/ATen/ops/special_chebyshev_polynomial_v_meta_dispatch.h,sha256=uOtifp6hnOSb3ckBjqNUxA-hjs0RRxfgRyU5cgb-zrc,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_v_native.h,sha256=hKFOjZEQoFWCzr2HkLhg-1LDae5HnjI5JBXsQahFwgg,1123 +torch/include/ATen/ops/special_chebyshev_polynomial_v_ops.h,sha256=CV8SH3zVzAUpXpelUM1U8Y03pcRk9nogwXo1MIwf5M4,4830 +torch/include/ATen/ops/special_chebyshev_polynomial_w.h,sha256=xts3XSLr8x9llhM5aZ9eKJwEHbYnvetBTq4OiDfodwo,3068 +torch/include/ATen/ops/special_chebyshev_polynomial_w_compositeexplicitautograd_dispatch.h,sha256=1uYSq3PS-9ey1SThcT-5qEdiDcrf-ecImvEsyZzf3cg,1390 +torch/include/ATen/ops/special_chebyshev_polynomial_w_compositeexplicitautogradnonfunctional_dispatch.h,sha256=u5QPrFza1uFoaT5OaloEFEHLJTjGrqxobHkOM63mfm0,833 +torch/include/ATen/ops/special_chebyshev_polynomial_w_cpu_dispatch.h,sha256=YEvt-BjUt2aS_cqqekfex0HNkwDOQ7vM4e426fYePZM,1006 +torch/include/ATen/ops/special_chebyshev_polynomial_w_cuda_dispatch.h,sha256=9BNxFogiA1iKlXdhkbEHIqPciA8B_o5iUKij2g95jo8,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_w_meta.h,sha256=rTzzsrL3vqc-uX5s2crjd05RoR4xnoS2MDTLwvcFM_g,618 +torch/include/ATen/ops/special_chebyshev_polynomial_w_meta_dispatch.h,sha256=QoBjRSr8ZP5i8nFIyOCdUt3dz9GLBKdXehj65hsnwpw,1008 +torch/include/ATen/ops/special_chebyshev_polynomial_w_native.h,sha256=sFLZV7Gbf3ta8exoFULa0siwsp7GeAns8YxqAuwBuLo,1123 +torch/include/ATen/ops/special_chebyshev_polynomial_w_ops.h,sha256=0HDmhkqyXi_7n6Lr9OV304fnJKsrgZygTRJb6p3nXDM,4830 +torch/include/ATen/ops/special_digamma.h,sha256=n8KQ4Cti2MBjXLHeTTD30Z2TJiIBxqaZLG2CMnUA7PY,1094 +torch/include/ATen/ops/special_digamma_compositeimplicitautograd_dispatch.h,sha256=6w44bSSTZmKE1ZDj3hDLW4hkMwRlnDMFeP-ewFOcl8g,948 +torch/include/ATen/ops/special_digamma_native.h,sha256=opRU0XoTJZ0RRvVTwKgCrOJ8ORod6-u2yZehP2LuIaE,572 +torch/include/ATen/ops/special_digamma_ops.h,sha256=zvyNnX6tj-R9vMi9MgjRA3WOPS-EpD4rmmtXpGBSCT8,1644 +torch/include/ATen/ops/special_entr.h,sha256=6zeLxsO6UIXVNaiKNZcvxM5DhAdi8Alvkp2d7eVgM7s,1064 +torch/include/ATen/ops/special_entr_compositeexplicitautogradnonfunctional_dispatch.h,sha256=KjDfuVMyVbkHAwN2bSyu2BCmbaYdD8tiG_OjT_P_x6g,796 +torch/include/ATen/ops/special_entr_cpu_dispatch.h,sha256=McR2Tw-PN0ls7ZM9RiN59USoxJnD2IIsfGZCyzwzrCI,895 +torch/include/ATen/ops/special_entr_cuda_dispatch.h,sha256=uTIkz7f9Md5faUax6Vbf_aX3vd4gycNxVcEwaEBhnWY,897 +torch/include/ATen/ops/special_entr_meta.h,sha256=scE0b5WD4_oUlvmO1egybohia0Z3Y_Pr_FsUmYObbAM,581 +torch/include/ATen/ops/special_entr_meta_dispatch.h,sha256=qBJhu0Z-NTtoEhORfsjVO_gy8uXspvtIafFig3bPjhs,897 +torch/include/ATen/ops/special_entr_native.h,sha256=3nyD7Yyt00H6_Gq0IDWDl-FrA0ulELOpEqgnwj_40rw,614 +torch/include/ATen/ops/special_entr_ops.h,sha256=xVOKZWhRpQNqf1bWHldXPBQvVAp3OJjDNNhBvRDca5E,1626 +torch/include/ATen/ops/special_erf.h,sha256=x2wK92qr_QXHCNrlyRJoZSY5jX7pZdpAedcikxSO8ZU,1054 +torch/include/ATen/ops/special_erf_compositeimplicitautograd_dispatch.h,sha256=LXyIpJtkmL8sWHfvO4zyFbhysvzz-HDTaoyOACIdlzg,936 +torch/include/ATen/ops/special_erf_native.h,sha256=dBlUCq8lFXLZdFL7wlBJntZl5nlueoeD1_sSgRBc3BI,564 +torch/include/ATen/ops/special_erf_ops.h,sha256=uM5_oNMDyUx9hBGHBvRCxlfxqet1y4TtYcZDSpyMG94,1620 +torch/include/ATen/ops/special_erfc.h,sha256=ZZ6-DaPGQOX8yZ3qNKsUwE4HCg9g-mDBU4F1PbqHpNw,1064 +torch/include/ATen/ops/special_erfc_compositeimplicitautograd_dispatch.h,sha256=7VfX4Vi5s0HJhP1JBbeCaPpfd1ToMQ2CllbU8mMuZoY,939 +torch/include/ATen/ops/special_erfc_native.h,sha256=_QeQjuOVVv71RWEhdH1uvK7DxGP87A-8pGrMqWTtno8,566 +torch/include/ATen/ops/special_erfc_ops.h,sha256=vubcDnnx_6VuFANCDUo8CHyuBBfRTmbK8_dyVWebuW0,1626 +torch/include/ATen/ops/special_erfcx.h,sha256=DbaSx2s_VMxOBkdJA7fylA35SpcACYtxc22B-QgwCQc,1074 +torch/include/ATen/ops/special_erfcx_compositeexplicitautogradnonfunctional_dispatch.h,sha256=oUGC1DgJYkoP7bsHoaoErmaG2Rh0IJGSw7TkuVdbORM,797 +torch/include/ATen/ops/special_erfcx_cpu_dispatch.h,sha256=-PMyWWGc4qpjY2VsgmVsW2rN3NzjFMJlA24EOBNPbgk,898 +torch/include/ATen/ops/special_erfcx_cuda_dispatch.h,sha256=h64IJWxMneBUlrl1qBI260TcM9-oujtC5gJ-nTKji1I,900 +torch/include/ATen/ops/special_erfcx_meta.h,sha256=XQSbQX1G_bfpqRfF89m1qoY8unLvssVGPkhYc3SUJOU,582 +torch/include/ATen/ops/special_erfcx_meta_dispatch.h,sha256=YsnhB4hBJwdoRCUgoZ9GJHp9Zq7w0pkZaD1-TOFpTtY,900 +torch/include/ATen/ops/special_erfcx_native.h,sha256=gt-o802TP3z9w0Pyri8RwD6zcrcWe-zYmh3xlJYEHs4,617 +torch/include/ATen/ops/special_erfcx_ops.h,sha256=cSKw_y-fAPBWI2ksgLe8SNTOh0lPM0sza-Izv_GhnTU,1632 +torch/include/ATen/ops/special_erfinv.h,sha256=QwyaSdtnlJwYW6jjRycrzrPK08XFharLfCJtJLwms-A,1084 +torch/include/ATen/ops/special_erfinv_compositeimplicitautograd_dispatch.h,sha256=RfLsNZLPvJFL_W82R-WqbREDQheGBxutpM2tLKCt-bM,945 +torch/include/ATen/ops/special_erfinv_native.h,sha256=CMGlaPfxuZ5KpDUcFQzdtaN1uS9NrZ9wqeKOtqtpDR4,570 +torch/include/ATen/ops/special_erfinv_ops.h,sha256=LKzCkmIxYOSMCkCOBClyXRDYRMk7y_NI3OWM3NBYtUg,1638 +torch/include/ATen/ops/special_exp2.h,sha256=c77Hlu2hU94TXOJo5NkZIjhPMaUIpGzNAA6UWXR5PRE,1064 +torch/include/ATen/ops/special_exp2_compositeimplicitautograd_dispatch.h,sha256=F0aiY7TNNLmvPFPF1kBI-9yw0MfNKAAYJ_iZJb_P1CI,939 +torch/include/ATen/ops/special_exp2_native.h,sha256=zE3Pn1cDgabbukQKMa1HWEaOTtDif8vemytbQfVFcs4,566 +torch/include/ATen/ops/special_exp2_ops.h,sha256=hES-uw9i4UNMSv2hkIISLeruKdb5EmVnjun5yZa95Ms,1626 +torch/include/ATen/ops/special_expit.h,sha256=2g7QKpIZXsmJ8wQb8aDjME37W-myHHWiDVvj9ZWmJ_I,1074 +torch/include/ATen/ops/special_expit_compositeimplicitautograd_dispatch.h,sha256=zLWyfpT6oltRy4hTiJCODeKw3F4sEl7O6QLlkdPhmRg,942 +torch/include/ATen/ops/special_expit_native.h,sha256=-YEvqNrRG47mikqPmvp-jqpb1DqA8A9TcxRenNfz2LE,568 +torch/include/ATen/ops/special_expit_ops.h,sha256=4ywvtPRCyTFbpeiuFA3AmwX_fI8-xaPONMqPgoQjT_8,1632 +torch/include/ATen/ops/special_expm1.h,sha256=WRoWj4SkGxYO_QxoarYEFySe0S-g5bzFOV4_YR8u_uA,1074 +torch/include/ATen/ops/special_expm1_compositeimplicitautograd_dispatch.h,sha256=jbGsPGS3jR9Dw7iHdLF_pwpkyI3JN4QcQvqzCDOE1G8,942 +torch/include/ATen/ops/special_expm1_native.h,sha256=l3Nyy8CqNjSuUgaOqqxaIJFMUvwGJR7hmvJ0X6HGxkA,568 +torch/include/ATen/ops/special_expm1_ops.h,sha256=Ic9IUX5-IvheDCfhGNbAzl1UHicKQeuBxdyApsNiwtM,1632 +torch/include/ATen/ops/special_gammainc.h,sha256=n5F-ymzQ02wkD7JM4QLYsud4s0LC6VNegav7BjvwZQI,1245 +torch/include/ATen/ops/special_gammainc_compositeimplicitautograd_dispatch.h,sha256=UR3bqb2Ns6VYkHEJQTdM31uRgNXNDWEgFIH7aJPJxuw,1029 +torch/include/ATen/ops/special_gammainc_native.h,sha256=cJPW1GkAZiVhCzgQcJkJh0I0Qt2RqAdgkhT9sdglcr4,626 +torch/include/ATen/ops/special_gammainc_ops.h,sha256=YFoYTS-Vui2IXs3TSDtI97GShGK62Hy6QQlVOh_75Uo,1822 +torch/include/ATen/ops/special_gammaincc.h,sha256=RIrGHJd8q-EvNUizZSMG_LtXJS670EvzfLSQrrJtpZM,1255 +torch/include/ATen/ops/special_gammaincc_compositeimplicitautograd_dispatch.h,sha256=WjrBNFqp5eMRJMcsGkTf5PfwKYnh7jyVEs7RmKc24Rg,1032 +torch/include/ATen/ops/special_gammaincc_native.h,sha256=9YvSpC8XuBCJBc8ua2B6_n0fA95glLV-8hGfCWyFbyE,628 +torch/include/ATen/ops/special_gammaincc_ops.h,sha256=15v-RjiX_HMKSmA6OoHGpfMqEjEKo2scQcMIMDPS9rU,1828 +torch/include/ATen/ops/special_gammaln.h,sha256=zr9hvcI4ZnUCwZgCnr0Dz4pw5GGYpqk3Ke1T8cifw7E,1094 +torch/include/ATen/ops/special_gammaln_compositeimplicitautograd_dispatch.h,sha256=hAn1wR_BMq-a8NzzClfSKgaCpw_kjo1LQGEtMyk-8Lc,948 +torch/include/ATen/ops/special_gammaln_native.h,sha256=mwnrZhJVrvhDjHVKvuC28iWA9zJCGDaqUo3d9iObSi4,572 +torch/include/ATen/ops/special_gammaln_ops.h,sha256=scQvLM18aOXzB5AZugfebq7Dy5QkECmdw9MclOgv7FU,1644 +torch/include/ATen/ops/special_hermite_polynomial_h.h,sha256=6lKSiCgAW97G44du-78EeGf1KRfSFF9s7fNYEhcCKAg,3012 +torch/include/ATen/ops/special_hermite_polynomial_h_compositeexplicitautograd_dispatch.h,sha256=AanUrIkI37hSFJTv10akpZmhHxKqGn_0HsK3h-s6SV4,1378 +torch/include/ATen/ops/special_hermite_polynomial_h_compositeexplicitautogradnonfunctional_dispatch.h,sha256=swBvyPyyLRgoA7CsP3p3he2c5T8-ZIupTO8674pbJB0,831 +torch/include/ATen/ops/special_hermite_polynomial_h_cpu_dispatch.h,sha256=aHvWfOMs6yuooiZy1D0ibSnXiLZVg1ZVvsg0_uV4B2k,1000 +torch/include/ATen/ops/special_hermite_polynomial_h_cuda_dispatch.h,sha256=iG6l52b3uKWMUm33dOgBF0FFgfpdi7YcHuLZcZNyjz8,1002 +torch/include/ATen/ops/special_hermite_polynomial_h_meta.h,sha256=lNqYosJhIN3FO9NXYMT9RkmUjtjMf2W0EMKbKl68faA,616 +torch/include/ATen/ops/special_hermite_polynomial_h_meta_dispatch.h,sha256=gXkYp_tbQc_G5MHf3-DktLl8mr2HSVUZ4Vkl954Ircc,1002 +torch/include/ATen/ops/special_hermite_polynomial_h_native.h,sha256=spv6nBiKRwRYDCuiMFGn6F2p4hWmcdnoIjw_ipjINp4,1109 +torch/include/ATen/ops/special_hermite_polynomial_h_ops.h,sha256=l9G-1UuNOWLPDg9RjqZu87Fey_QJCCc1I7xaI-g15zk,4794 +torch/include/ATen/ops/special_hermite_polynomial_he.h,sha256=kDTVIDjKuXTelf1I0lw3827VEWlEM9ExDf_IQ8kx1V4,3040 +torch/include/ATen/ops/special_hermite_polynomial_he_compositeexplicitautograd_dispatch.h,sha256=A316dOAobAqpM6DHX8vYkn-UL1bR964IVXUGLTUVFpQ,1384 +torch/include/ATen/ops/special_hermite_polynomial_he_compositeexplicitautogradnonfunctional_dispatch.h,sha256=S_If2JhnUk95N3pFflFSFYixYO_wluYXF4VetvNyuhQ,832 +torch/include/ATen/ops/special_hermite_polynomial_he_cpu_dispatch.h,sha256=EAO8DuqY7mNku7MlGD0WRgJA_tl6oK5jEsUsP3rXZNU,1003 +torch/include/ATen/ops/special_hermite_polynomial_he_cuda_dispatch.h,sha256=z08zVsqCjdwEqxt6yP4ZCpzzebXaVill_eTrbTQw8kM,1005 +torch/include/ATen/ops/special_hermite_polynomial_he_meta.h,sha256=2GwXhw687LgqQtOTH_DpyvJhHZ_D57lct-S-oozHsAU,617 +torch/include/ATen/ops/special_hermite_polynomial_he_meta_dispatch.h,sha256=IKHgF0NnGFI0g7m81VIBs3Ow7mA6Pqaxa7S9c0yAttM,1005 +torch/include/ATen/ops/special_hermite_polynomial_he_native.h,sha256=ak9nPuraPehVsZ7rXjJV7v1dIUB9luaj3tZV6M6o2vY,1116 +torch/include/ATen/ops/special_hermite_polynomial_he_ops.h,sha256=FTrLv8U7BXWNVwxeK9FBN0hROpneh-97Ldpp5LEMmCE,4812 +torch/include/ATen/ops/special_i0.h,sha256=33RjGbp5HiuKVM-k5wu85AjlkKar1QQeuP82jsSy6GY,1044 +torch/include/ATen/ops/special_i0_compositeimplicitautograd_dispatch.h,sha256=ibY7ZlRGRZZ65j7WpN2ayj6wjEb2mfp2NXVoP29m2Ls,933 +torch/include/ATen/ops/special_i0_native.h,sha256=JN3m7TieWRUiWGMmGJPA2IDT988Q0EHhf9DeoscX_ug,562 +torch/include/ATen/ops/special_i0_ops.h,sha256=cuIFuamyGK402XKJ75dsKEGyldGU1R2mDyjPPUkxyXI,1614 +torch/include/ATen/ops/special_i0e.h,sha256=fAB9Ye-V-wx1tRvy27RIYqU8UlltFW_uUFKcYR-k3eg,1054 +torch/include/ATen/ops/special_i0e_compositeexplicitautogradnonfunctional_dispatch.h,sha256=NSQBbBZMMw7qtku-SGq3OKeAZtr8G8WeNuLBN6hAbrk,795 +torch/include/ATen/ops/special_i0e_cpu_dispatch.h,sha256=PZYXMweF5rHdNV56LKtYyIhHzQ-d123DOUMVvCgZ0i8,892 +torch/include/ATen/ops/special_i0e_cuda_dispatch.h,sha256=-5xBTZnY-Qq714DWuItlqQgOCuSc4jIYzDvJgqfws70,894 +torch/include/ATen/ops/special_i0e_meta.h,sha256=xVnJ5gX5UuMdeU1GCm_0tBZPv0sshlN5MdLzfwY75EM,580 +torch/include/ATen/ops/special_i0e_meta_dispatch.h,sha256=ntwsVqswnn85egNYDVgPU_pEkNWAtNIDrPgaQOWlL4k,894 +torch/include/ATen/ops/special_i0e_native.h,sha256=0tBG7zcN4XjzWUZkVELABVlVbkOX55mBpYciDoDRWGQ,611 +torch/include/ATen/ops/special_i0e_ops.h,sha256=EQTREZ-Y7tnd6gXkjMpNB-tPbmAH_C_T5Wr6MZ4Eal4,1620 +torch/include/ATen/ops/special_i1.h,sha256=40SvqhzqPqA3eMhaJVevUQ0ix_k6ZHUnr0dH2CYNtho,1044 +torch/include/ATen/ops/special_i1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=fQxWBf1SGPf8nNY2M5yagDETxXeIjGkwUJd4IvdUzDA,794 +torch/include/ATen/ops/special_i1_cpu_dispatch.h,sha256=i4E72MJwNH617AOJcu4xUNqSJJwR32hA6RXpLBkmhVs,889 +torch/include/ATen/ops/special_i1_cuda_dispatch.h,sha256=5s4xuDgy7sscUnwtIPZukrcB9KORrj4iVZTj1Kjxxfk,891 +torch/include/ATen/ops/special_i1_meta.h,sha256=aHHW4C6yivWp-FEfWnVfB20lWTu8MNKPSE0Jl1HAjD4,579 +torch/include/ATen/ops/special_i1_meta_dispatch.h,sha256=HeD0y25IzkJ51K16PQqlKQ5njfSwMRx7XvdYH0LySvs,891 +torch/include/ATen/ops/special_i1_native.h,sha256=wg5cS2ikvsdLkQqTI2QXc3ocXyFNX7ONEcryaxZaiNQ,608 +torch/include/ATen/ops/special_i1_ops.h,sha256=7Jv716TUciZ-hlfkjph92DhFHWL8DhbW_QXueFybOtE,1614 +torch/include/ATen/ops/special_i1e.h,sha256=l3lld0fgwYLUW7WuuKrGhUHlC6xSEUJ7SFU04PH8aKw,1054 +torch/include/ATen/ops/special_i1e_compositeexplicitautogradnonfunctional_dispatch.h,sha256=o05RBzmqTnWMQs5ONUUBiCrWUz_T4YvJihoQWInsC7Q,795 +torch/include/ATen/ops/special_i1e_cpu_dispatch.h,sha256=ibSYEnHh7G-MvopZMljLKQy8tpzgSZ3-ZCELTXPooAo,892 +torch/include/ATen/ops/special_i1e_cuda_dispatch.h,sha256=n_Me5OC126v09hW4lrzI1djyAlqp0CchnECh8a5mR5Y,894 +torch/include/ATen/ops/special_i1e_meta.h,sha256=OiPc7unPxDxVePVVf6e-J16mMEzlPNFu7Kx8o0VFOKA,580 +torch/include/ATen/ops/special_i1e_meta_dispatch.h,sha256=i0JPXilT0kpxzILbdUzJrXDA7Z8RzOkh7nr81lOhK0w,894 +torch/include/ATen/ops/special_i1e_native.h,sha256=keFQJSwXhFwBNwXTKahD_K-E5u78Ubs60D-2GretJDk,611 +torch/include/ATen/ops/special_i1e_ops.h,sha256=Ralx8u0c9kFGZ5vRy7HgyrTi0JQHbv-ID8G2dB6VPn8,1620 +torch/include/ATen/ops/special_laguerre_polynomial_l.h,sha256=HFtY7NtFenkESdKdGbG89BqiN2NkkFy5svrs96JTTqY,3040 +torch/include/ATen/ops/special_laguerre_polynomial_l_compositeexplicitautograd_dispatch.h,sha256=oL8oD-J6rBm1bMClvOFWRXtuUXMQPbsfhn3NXSErwv4,1384 +torch/include/ATen/ops/special_laguerre_polynomial_l_compositeexplicitautogradnonfunctional_dispatch.h,sha256=P2AYbfKeZoasu9IA5LnQXYMvKr4mcn4hSSrD9YhfTXA,832 +torch/include/ATen/ops/special_laguerre_polynomial_l_cpu_dispatch.h,sha256=8qWQhqj7r2vrbj4potQdc8xyGKhCzgCUqba-DeNcyC8,1003 +torch/include/ATen/ops/special_laguerre_polynomial_l_cuda_dispatch.h,sha256=xSUPWtXWQkUWh623zTcY-8q3QVW8D7koGmlIc0Pzx-s,1005 +torch/include/ATen/ops/special_laguerre_polynomial_l_meta.h,sha256=9vtp6U-8z7a2J0IIeXQQmtgbgfbN2j7fZwJKN-jrVuM,617 +torch/include/ATen/ops/special_laguerre_polynomial_l_meta_dispatch.h,sha256=PU-mKJD_HbTwLXZP37dPraUziXr7V7b6j5uQO8Q4Dbw,1005 +torch/include/ATen/ops/special_laguerre_polynomial_l_native.h,sha256=_JFxxouopvY_pOplikicwhjtxYM5r6XfuP8VUsnQJ6w,1116 +torch/include/ATen/ops/special_laguerre_polynomial_l_ops.h,sha256=nEHZDgRnXeQVxKeVjffxr4XXkBo-Qmczs-l_AJvXJmY,4812 +torch/include/ATen/ops/special_legendre_polynomial_p.h,sha256=LS8XzpnPT4lUKCvIJnST837pdCctAZnTxVIVtqd-634,3040 +torch/include/ATen/ops/special_legendre_polynomial_p_compositeexplicitautograd_dispatch.h,sha256=v4k_IW3h5mIVXC-Yld3gVAY0ZVSSzorYUEmcCw8zUmo,1384 +torch/include/ATen/ops/special_legendre_polynomial_p_compositeexplicitautogradnonfunctional_dispatch.h,sha256=WJ4NXiY-HODQcIPIeKKIqeCPGUSe-rtKCTG7eOoGJE0,832 +torch/include/ATen/ops/special_legendre_polynomial_p_cpu_dispatch.h,sha256=rMzkXojw_UFEbtv37ljvw39uSwCm_eneOUTSW8_ZPS8,1003 +torch/include/ATen/ops/special_legendre_polynomial_p_cuda_dispatch.h,sha256=6_aBcam1NOo0Rffz-2Onljww770_ifYYEh38tEsTRMc,1005 +torch/include/ATen/ops/special_legendre_polynomial_p_meta.h,sha256=DDyZOM3OhHZwGON4Gc_7GWklyhleBSoWPCppxdd-_Ss,617 +torch/include/ATen/ops/special_legendre_polynomial_p_meta_dispatch.h,sha256=F31HVELU-CO4jkvo0ia_REk_p1Ah24hHMsvgoF46eu4,1005 +torch/include/ATen/ops/special_legendre_polynomial_p_native.h,sha256=uATt9JOeqcjEWhxRw2t_ExzdFVwEsQ1_i3F8K8uKI4Q,1116 +torch/include/ATen/ops/special_legendre_polynomial_p_ops.h,sha256=u6voIVfiA5l5fif_8H8bbS7RTHIOJCJJ_JCkWZUuFE4,4812 +torch/include/ATen/ops/special_log1p.h,sha256=3DKC_0hxVpZ1-WqWjQVH1Y2SLeR-bv4psO5vQF0sx-0,1074 +torch/include/ATen/ops/special_log1p_compositeimplicitautograd_dispatch.h,sha256=dA5Md7NFpvtf9_Eq2S6Z9ZHG4BbGMp2mTOrtfv0WOCo,942 +torch/include/ATen/ops/special_log1p_native.h,sha256=fE77vhedrebUsoVLHUBDCLCxP7c32sn1tgiyeRq2CMg,568 +torch/include/ATen/ops/special_log1p_ops.h,sha256=ZaDyVd5p5twXVmIescoEiJXVIKOsHpLKRhtWNSfVdYQ,1632 +torch/include/ATen/ops/special_log_ndtr.h,sha256=PGjeExKUekmx0r2WE4m3Sfc5m4fFUVfekUQMwUeMA5U,1104 +torch/include/ATen/ops/special_log_ndtr_compositeexplicitautogradnonfunctional_dispatch.h,sha256=iiv-Xu799BLI1CcYa-xBmNh87WmM9YBYQlvxx22cz1k,800 +torch/include/ATen/ops/special_log_ndtr_cpu_dispatch.h,sha256=aZ-t2T-N3-IKF47DPQGKE0Yg65l-z09ds7aRJahEN2o,907 +torch/include/ATen/ops/special_log_ndtr_cuda_dispatch.h,sha256=ll7Pxu5LEo4IsYAvHiG5wNe7nRSJSxiAtboO4iagr1Q,909 +torch/include/ATen/ops/special_log_ndtr_meta.h,sha256=mRGnzDR8XaMfXpbPjr8qiE8NZlh7W3WXk2iBhjMsDmQ,585 +torch/include/ATen/ops/special_log_ndtr_meta_dispatch.h,sha256=p-cV0kYYDijY4J1dmWjtKEdAey60AsndLeWH5pL7VNc,909 +torch/include/ATen/ops/special_log_ndtr_native.h,sha256=U6Bc1Imgi0LNZmRHmNtCiNRkCXH2MhQzI4Ew4DOSyoc,626 +torch/include/ATen/ops/special_log_ndtr_ops.h,sha256=VA5Ev0Y-PPunDfyPGDdWOQPvz1t13z0-svB0Ktj66zk,1650 +torch/include/ATen/ops/special_log_softmax.h,sha256=twnxCG2c83UrHUEXJ7HOUvIhkJJeOKaI2TqFvHoPeoc,781 +torch/include/ATen/ops/special_log_softmax_compositeimplicitautograd_dispatch.h,sha256=OGLtHls428RQA0Pftp5csLjJnhd2JnDsTO-BxM66xvA,844 +torch/include/ATen/ops/special_log_softmax_native.h,sha256=K0C1hfGwyMe53ax9Ibfv_RuFqbh3JpahUsSNiMLhWME,556 +torch/include/ATen/ops/special_log_softmax_ops.h,sha256=9nGtQulQZF74u3lS786X9XCZqqz18ZFyYJzhcDiB3gE,1188 +torch/include/ATen/ops/special_logit.h,sha256=VEyp0vCuU5eSEnOnhBMSlAZRuxEjq_RUtivyitKXAxk,1257 +torch/include/ATen/ops/special_logit_compositeimplicitautograd_dispatch.h,sha256=vKqtq_hG87tN8H_xMTupuVnfooBgT1ga0VPxT3naSeE,1059 +torch/include/ATen/ops/special_logit_native.h,sha256=iICOoXImUc74CjSH_2szp9G07tdDOnFtDMELDysuHrs,641 +torch/include/ATen/ops/special_logit_ops.h,sha256=wi4Qp9rh4dQrUUS8Hc2zwFJORSsYtHRXAKdLdQpDZ5M,1832 +torch/include/ATen/ops/special_logsumexp.h,sha256=C5Iyx9oQJ9I-mG9z1oEUl-Gg9VicutqTzQ99_eHRvnw,1369 +torch/include/ATen/ops/special_logsumexp_compositeimplicitautograd_dispatch.h,sha256=mNTT1HyXqJ0v9CGT387VGXuERWo_ZOgAaGSt6rbUydw,1071 +torch/include/ATen/ops/special_logsumexp_native.h,sha256=vxrbJyniKCOuWUQEkFFLpI2ZsqPkgtf5e4Byz-ISQpY,652 +torch/include/ATen/ops/special_logsumexp_ops.h,sha256=KwxWCg8W37ocQ8ZvrRzwi-e6ICenu2h3yluGAY7mhkE,1906 +torch/include/ATen/ops/special_modified_bessel_i0.h,sha256=7z1FXUOZjPr5-OuSIT4AKFDWfw4_emDxTc_g8mzE7Rs,1204 +torch/include/ATen/ops/special_modified_bessel_i0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=pFTxQmlG8pJC5wjAVnCD2hF96-LxD9VhfYzncPFdNLg,810 +torch/include/ATen/ops/special_modified_bessel_i0_cpu_dispatch.h,sha256=OBkfCgQ-NHvj-F4xwBogc5kWLfZmg7iKfX84thPxVpg,937 +torch/include/ATen/ops/special_modified_bessel_i0_cuda_dispatch.h,sha256=ZD6Gc5h0xd2KfPrQu2dhB3lBItRroyDYc03KXiX_SOY,939 +torch/include/ATen/ops/special_modified_bessel_i0_meta.h,sha256=KuZr4Aw1KNSSNPgEENYAiGkfu0tve7IpLOlCHU4LCyA,595 +torch/include/ATen/ops/special_modified_bessel_i0_meta_dispatch.h,sha256=afJwqTFPtmST8QP15q0uBie5LuCrxi0Uf7vTKUZ9z0U,939 +torch/include/ATen/ops/special_modified_bessel_i0_native.h,sha256=EqCqFgpx1SgTZ7ni9hmUrWACdJ-Sw1m8gzKSsUlc_YA,656 +torch/include/ATen/ops/special_modified_bessel_i0_ops.h,sha256=0-dA6QjwpRS_Z8Z5DbE_10aiUmtO3dQ4yMyL49SN7s4,1710 +torch/include/ATen/ops/special_modified_bessel_i1.h,sha256=-Zxg113SNylJn3ljZf8ynxEoRn51NZ2cf3RKiwn7Usk,1204 +torch/include/ATen/ops/special_modified_bessel_i1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wM70WeVPkJ6u_izSY-YHTeYkNRsZN7ghmJ_Qgw4krkw,810 +torch/include/ATen/ops/special_modified_bessel_i1_cpu_dispatch.h,sha256=w-6Cr8smBL9DcjILWiwxEpeR0vtpRi5kYPC3SeRdkVY,937 +torch/include/ATen/ops/special_modified_bessel_i1_cuda_dispatch.h,sha256=UOpJamzlZR8zxJXHrsN2YKAi8KZBBxncTvAjGq5mBxo,939 +torch/include/ATen/ops/special_modified_bessel_i1_meta.h,sha256=CzW6MnEkHVVY1Ipxngliluw3A7qsK3Z-IeAmvkhfvEQ,595 +torch/include/ATen/ops/special_modified_bessel_i1_meta_dispatch.h,sha256=C-_cMGprsHLXNMaPg0xOHLyHP4a7Q4yzWMAsUqNQxnY,939 +torch/include/ATen/ops/special_modified_bessel_i1_native.h,sha256=QHh06fmNiUkK3ivH06ebDB5E1iQiySIDSkLX7ce1GXs,656 +torch/include/ATen/ops/special_modified_bessel_i1_ops.h,sha256=gH0X72KMqEDMf2bRDcjfrwNO7aKHMafK9fZECiiwP_A,1710 +torch/include/ATen/ops/special_modified_bessel_k0.h,sha256=Z9LaDkLEaaJee54Lm4zRZIOuPHX9asKX4CsglE6VQbU,1204 +torch/include/ATen/ops/special_modified_bessel_k0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=BYumZpNsKhBrXX_J3eMifksgroqksjwnkSec10VPqJI,810 +torch/include/ATen/ops/special_modified_bessel_k0_cpu_dispatch.h,sha256=cE8X1_CwSFgWTO0pUqkHC0L5aNGD-JKFSWbQBCIZLPg,937 +torch/include/ATen/ops/special_modified_bessel_k0_cuda_dispatch.h,sha256=BMacjV3y9H5IY81d42d9SL-gVcas3HALmemIqxWgQ9A,939 +torch/include/ATen/ops/special_modified_bessel_k0_meta.h,sha256=R0HSRmtqJm3UhWAb155fdvTo3TuufsgMq1lKsm9RDcI,595 +torch/include/ATen/ops/special_modified_bessel_k0_meta_dispatch.h,sha256=DgSCw1RtI4P7EFRrwYucRy4DXKZ07fWHts-C97x5qsU,939 +torch/include/ATen/ops/special_modified_bessel_k0_native.h,sha256=gL5W29KX0DCEHQXqTY88xrJNm6sA9NG9Nug_iujkgY4,656 +torch/include/ATen/ops/special_modified_bessel_k0_ops.h,sha256=VieDG8hQwvO7nsqv535KmvqlwhmsGfCs3r3oAqkHiQA,1710 +torch/include/ATen/ops/special_modified_bessel_k1.h,sha256=DPPNMhoCMe5iFATJxXlqcc3ulp-WrrdcBREnxlTAuNQ,1204 +torch/include/ATen/ops/special_modified_bessel_k1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=iKZaHnz44a25gWntfGs7NxDc3a4BQL0XmoERC02CRjY,810 +torch/include/ATen/ops/special_modified_bessel_k1_cpu_dispatch.h,sha256=dOLY08GTl629mUEiRTPl_iJO2Hc4TmMu4j_mX3hR_Tg,937 +torch/include/ATen/ops/special_modified_bessel_k1_cuda_dispatch.h,sha256=XKAPyZU2eE0VEQXdGQ5CPpH0twKZV2XzkyeVENoGZXk,939 +torch/include/ATen/ops/special_modified_bessel_k1_meta.h,sha256=3c1r8NjvR0444FR5RtycYrmNTj0ck9xxYRFMYwefiY8,595 +torch/include/ATen/ops/special_modified_bessel_k1_meta_dispatch.h,sha256=6IJ0EyYaXDwZupjgU6vnnEbrRQl7e-Q51IOkXRMtxyg,939 +torch/include/ATen/ops/special_modified_bessel_k1_native.h,sha256=XduDoH810lZlDgpyeqto1CT4_UYMIWaGqTIpT0NUWzs,656 +torch/include/ATen/ops/special_modified_bessel_k1_ops.h,sha256=Zd6DmwM6wi7H1-EqHUeOtr0PeFgGjmjQkWL2PTsI7vM,1710 +torch/include/ATen/ops/special_multigammaln.h,sha256=t0CZUTgObvSgJzGJ4LXNgLQ9trtXLGw5VH2ChrN8Ivk,1207 +torch/include/ATen/ops/special_multigammaln_compositeimplicitautograd_dispatch.h,sha256=82J93BYLEfBwmbqgSMnFghLX8DMk0T2u0wm_hq0mz-g,996 +torch/include/ATen/ops/special_multigammaln_native.h,sha256=xd-TzigqNXPc6S-nYScO9QCfLGQAJqCnlxHhuBUcwvw,604 +torch/include/ATen/ops/special_multigammaln_ops.h,sha256=Et-xQdBBvpXKZUIRviXwOVgGRwrBhTYXGK72IcZWR0I,1750 +torch/include/ATen/ops/special_ndtr.h,sha256=-fwmnhC69OxrSIGWzGRN7XbTOLmgkGu6m-HxP9YGV8U,1064 +torch/include/ATen/ops/special_ndtr_compositeimplicitautograd_dispatch.h,sha256=H82QwdT47klwN0toZ6bPtyQHFdpO_jWCpYVywACRzrA,939 +torch/include/ATen/ops/special_ndtr_native.h,sha256=xsEP1XBw73f62JHPx0Ld5LLZHj507q9LcXx0A6H9AkM,566 +torch/include/ATen/ops/special_ndtr_ops.h,sha256=lY5xcactJW_OfmmPJeOhXCRhbBiL01BVBw8S6_2_AUI,1626 +torch/include/ATen/ops/special_ndtri.h,sha256=x_9WNthWyh19z8pTAZkXfyY6zE37ozpAtvnWgzF-lh0,1074 +torch/include/ATen/ops/special_ndtri_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ENAJDN9CjseR0VgQFLywB8a94r20_LeNIsihKRqS0u0,797 +torch/include/ATen/ops/special_ndtri_cpu_dispatch.h,sha256=bM_BCYc949KX-D5eLsQws2ymflbbOvaISmEBAzNIA6I,898 +torch/include/ATen/ops/special_ndtri_cuda_dispatch.h,sha256=f1MFG28KQOXpkuDxKGjaVmtnhHGiyxniq2Qo09HYttU,900 +torch/include/ATen/ops/special_ndtri_meta.h,sha256=bKufo9HkLDaHtYCJKaFWv1kxFFV75OuUDOFPeklTQjU,582 +torch/include/ATen/ops/special_ndtri_meta_dispatch.h,sha256=x6pXJRL1UkZkXRIoJO4Tw6JWpULPkQLcVkFDuw3Cw_I,900 +torch/include/ATen/ops/special_ndtri_native.h,sha256=WwvrxAaU4IFPiddokRQS_Q40UvsnarL8oe-YzHfDKAk,617 +torch/include/ATen/ops/special_ndtri_ops.h,sha256=xR8MRHX7JoROzjQS15_BrpQy0EKO46jb8wiiy3dsOak,1632 +torch/include/ATen/ops/special_polygamma.h,sha256=DGFfDynx4PDgAurmjYreqxohXNaGROth5IixnnFfbnY,1177 +torch/include/ATen/ops/special_polygamma_compositeimplicitautograd_dispatch.h,sha256=URzm656vMhQ0cEV6jnK988gAb76F7OkveQE-QsELV5I,987 +torch/include/ATen/ops/special_polygamma_native.h,sha256=2SIoW5MRBCedv0TR5nngPSJouQC8R0XcnVAlcAXsbMA,598 +torch/include/ATen/ops/special_polygamma_ops.h,sha256=QybAlH-13uugMkaQ75HICpWdo7uz6XSViJDGEjbL8Ho,1732 +torch/include/ATen/ops/special_psi.h,sha256=zyMg3YTEJx0Xt08gpU52OkQD4SEvoAmF_PNimopdsDc,1054 +torch/include/ATen/ops/special_psi_compositeimplicitautograd_dispatch.h,sha256=fV2_XWltrtP-QOC2biahvv4YVPev9VFN_eI2uAskoms,936 +torch/include/ATen/ops/special_psi_native.h,sha256=du_OjamGGtgNw3gzxN0_xzyDYTCY3I0vBnUi6jXblDc,564 +torch/include/ATen/ops/special_psi_ops.h,sha256=81QQq0Ph7HeK_cYCDf92KgU6RgXy9qDC2-wI1wYcXyA,1620 +torch/include/ATen/ops/special_round.h,sha256=feXSVwKwszlL8StZEvp5DCFkoCV8FtaO4HhIlMan7h0,1213 +torch/include/ATen/ops/special_round_compositeimplicitautograd_dispatch.h,sha256=sXOLVpyyJ9aGYKDH3QlsyzYM-YujyOZSfYOrp_cPtUQ,1000 +torch/include/ATen/ops/special_round_native.h,sha256=EqKL9t7b2fI3edwI4IGmb7sNwSiV-Xsenua0zyHWocE,606 +torch/include/ATen/ops/special_round_ops.h,sha256=aFbXqeFC7wZ2PO3o5i3T5f-7uKTkZGbRwv2FrVi_uxs,1757 +torch/include/ATen/ops/special_scaled_modified_bessel_k0.h,sha256=AJFZh8LewOgmP-ItS5q-7HZ2MfGPtnOpAQJ4TgcrBLo,1247 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=RVFjNrgqHV1OUgeJLheVcKc-1i6XELYH8u_HR3-8Ewo,814 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_cpu_dispatch.h,sha256=67CKI6AU3pdHE1Q1KgIknBFB8khk4LCIHtFmqOkrLSA,949 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_cuda_dispatch.h,sha256=JRc29qO6gMHQxWPOJswm9IIaOFSd78WcVBdWUOxKrXw,951 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_meta.h,sha256=4YQfS18M7YBYMKPq_cLGl2u97M8m6q6KrXYb6Rr2Pjc,599 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_meta_dispatch.h,sha256=CH8pvStEioadt1rzmChjQatl4Smzmz8vHxjffF3IGQw,951 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_native.h,sha256=wm9ZbNN7RsDb9anjApzXMZ-sZcpzxHL2LSB9ZGHGgAA,674 +torch/include/ATen/ops/special_scaled_modified_bessel_k0_ops.h,sha256=PN9Au9i31DrMLJTI00EGVe5v_OIQuDS0JYMsm3LhHX0,1734 +torch/include/ATen/ops/special_scaled_modified_bessel_k1.h,sha256=FUMJomTsWa5xGVyiyehgDG4efyMeP6GzNHDWzF-g_8Q,1247 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ZnQ8DxoZW3sfdE-iZg6iTiQVHvpSsQZpAt6BrDevO-k,814 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_cpu_dispatch.h,sha256=uiB4nmbEBvF3xFj1g9O9aaosYNs9v1HStOJdqkQGbzY,949 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_cuda_dispatch.h,sha256=AQ-F41GWwijoMGAUz1ESa-ClVCz5AZmYDhYhxka4tb4,951 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_meta.h,sha256=OIEQYqX5dwNXsQ7GmuecF_MZtrMywSrDxpl8hwcqMGs,599 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_meta_dispatch.h,sha256=DeBURyTBajm0dgXy8ybjKOQXJJCFHiajuxkrejqqQLo,951 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_native.h,sha256=rRxpwhvxF3mmC3-4Fs-0BztY2Zf0_N64jcO2Eo-01K8,674 +torch/include/ATen/ops/special_scaled_modified_bessel_k1_ops.h,sha256=17XsMlWCdmS0OKHqzJ9rbd1vHhv6TXKSzoXcnf0C3Ek,1734 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t.h,sha256=VAfQGiXkwrcaItprtuive9nRqvwyTi_r1tW14Y7JoEE,3292 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_compositeexplicitautograd_dispatch.h,sha256=6xazY4mCNaB8N5FL__jMJLaZbtH2-igmFbBTW__yMOo,1438 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_compositeexplicitautogradnonfunctional_dispatch.h,sha256=nZTdl2AAQWxlw7eZrNQZolc2nTeiQogH8xA133xCAPQ,841 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_cpu_dispatch.h,sha256=m_uPktXJzxp12TpOqaFYAUu-mOXt34YvDhpTYHDd_4g,1030 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_cuda_dispatch.h,sha256=E-mdsglRgIQeI5o-C-Jh7LTfJccV8lq0gh0XAuPzdyQ,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_meta.h,sha256=oRKZ9MEA_KutMT5ZsxybK-GcvwtqxbEuFTj50U2TpGM,626 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_meta_dispatch.h,sha256=fuCarhRt0LrpUs4gDDN8LZ2LxP7HXkJIBeaeyVpBupY,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_native.h,sha256=oTf_6ZmDrF6vDKIoAVWbNv6nUL0IHBe2gWgawhO6RL4,1179 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_t_ops.h,sha256=CY5D5T7JkqFFnIde6-tA-ac52vkB36k0XLN7wnToEQk,4974 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u.h,sha256=uah7AMxEKRtE9ILf5Hudgsuz3kO0DqMEgkVEq6ubVWE,3292 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_compositeexplicitautograd_dispatch.h,sha256=q6ZDPq87qBHd_kIAnkgH2m2GlLfvkOwfOhv4AeEQI5o,1438 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_compositeexplicitautogradnonfunctional_dispatch.h,sha256=H1cJmN63ef7krFKYeNB4CXq1502TAeBGbp0h5xcqUU8,841 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_cpu_dispatch.h,sha256=5PysWKeTbh34L2PIxpF7ut1WMwR2HC9TKJ-AwaGFWH8,1030 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_cuda_dispatch.h,sha256=vefstgoAY3rLduRsJZLC0OMHefS8G3gqUwy5vyYFoTA,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_meta.h,sha256=-jY70Jj_I4umiFVsJ6lAQAEF4Afzk4w_5CbSW7uqXiU,626 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_meta_dispatch.h,sha256=6xf-klwCMubqJd7TZJmYRCXRDuCX6pLtz_w5d6oAQq4,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_native.h,sha256=Xkmfcs_PhGud6gt2OFDhzhFTLXCgfxPHHoYHJHmBUX4,1179 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_ops.h,sha256=Kl9T7btuYmQP0_QrLxdiQUG7wpiBDT9jX9GXHSAiWuY,4974 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v.h,sha256=5YPfnRwWUzQ1Q23Pq7Qq_Yqh033tqXG1WCpMB4SIERY,3292 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_compositeexplicitautograd_dispatch.h,sha256=sOpTiUvCOsBRzDbDh-lBZvKAqhBVjbmRozzwJrgHV50,1438 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_compositeexplicitautogradnonfunctional_dispatch.h,sha256=N87bt3LEf9gyzJ-RTHCUTPQUexeCjwhQZKr9uAFnoes,841 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_cpu_dispatch.h,sha256=7uzy9rIrtMbiq99L1i67CIVFevYFxr5UTism-DR1Y9A,1030 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_cuda_dispatch.h,sha256=wlY0h1C7igIHnWD7LJss0mUmOpSh8WoqRNJTr1kpErY,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_meta.h,sha256=DLq6hBOq_z5VldPYsNN8jtOCiDdrz_owMUJCRxMA720,626 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_meta_dispatch.h,sha256=IjmVgD6qvGYrncyDyhEJal7PDhovS3lLYsK3gHvg_Gw,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_native.h,sha256=9QK05UglLR7dsW7ytybvyZVk5Z2OL6Vb7ijXBVknkiI,1179 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_v_ops.h,sha256=hc58yTC0Q5680ChlO7Lv8YbGl351bcPj2mToPwNZUp4,4974 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w.h,sha256=6t8e9_e-h6KC0gwCnFZTTPqv0ODpkDfFP2NimkT9ZY4,3292 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_compositeexplicitautograd_dispatch.h,sha256=qoWodfM9DfmzjEKUE84lF6YTCCCpEPVa3JCz1J7e_P4,1438 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_compositeexplicitautogradnonfunctional_dispatch.h,sha256=trk_qFc1l31k8WiUOYb_slgS0Jn62XaVzUWna9KFMH4,841 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_cpu_dispatch.h,sha256=7k3JzOTdAuc5_yEZ2mCLEALLJU_d6mdHTNTKkZdiJpY,1030 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_cuda_dispatch.h,sha256=hkGprVCTc_LIVGGHfqLNc7kioRiLMkqrw5MD_WOXMdI,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_meta.h,sha256=q_FrR2keGiUbtgQqr8bV56XxIyh2ub35zYBAMIES2RY,626 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_meta_dispatch.h,sha256=1ahY_gKUyuI3wtsQEzrMQQCu6jA7BRibn0laIxjFTiw,1032 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_native.h,sha256=9VTQYhcHR1dAq1Xo9XM6giylBEeK39ef5lmRAeEuiyw,1179 +torch/include/ATen/ops/special_shifted_chebyshev_polynomial_w_ops.h,sha256=xEi-OZCMz-CUdRB9ADuHAyjkjkp4kOfuqMEr1Q6ZE1g,4974 +torch/include/ATen/ops/special_sinc.h,sha256=V0J3KuhdjXiKoNKOD15Re6T-NPRuNWQwSVPJ3gtjl-I,1064 +torch/include/ATen/ops/special_sinc_compositeimplicitautograd_dispatch.h,sha256=-92rIAhFtCSwofQNa9HWSw2WgXE9kPsrQU03c1rULF8,939 +torch/include/ATen/ops/special_sinc_native.h,sha256=d0sTYFMc9iuW1tUKfmAMDkaDNxzrNfDWYf5pqdHTRFU,566 +torch/include/ATen/ops/special_sinc_ops.h,sha256=YV_2ZdBqMcv7DpJ0C0X21ACV7fL8abeVu8u9qsV43FA,1626 +torch/include/ATen/ops/special_softmax.h,sha256=Md6u3VkAqRrCC5FbuEQZk7siGF1ReOnDdDPt1oOw5c8,762 +torch/include/ATen/ops/special_softmax_compositeimplicitautograd_dispatch.h,sha256=DK7PavCzVgDkNtjIPf-G2IAAiaceqaTxKmJzXiTIohE,840 +torch/include/ATen/ops/special_softmax_native.h,sha256=-Zz_fx-VPCNfI9hSOvi_79Uose3MwIlAgkx9lGm6-Ng,552 +torch/include/ATen/ops/special_softmax_ops.h,sha256=kWuZ_mTNLnMb8aHsqOWw-B1xzBGzn3GWUqn8EWBAuDw,1173 +torch/include/ATen/ops/special_spherical_bessel_j0.h,sha256=VApzFvnkBtZ6Tg-pt88w35v_3UNuDhzkOqqefDLOLiI,1187 +torch/include/ATen/ops/special_spherical_bessel_j0_compositeexplicitautogradnonfunctional_dispatch.h,sha256=wdGw9ThRcla9vNgNf0M2vn_cjaDxoU5lq20qPUs91Ks,808 +torch/include/ATen/ops/special_spherical_bessel_j0_cpu_dispatch.h,sha256=LZqxP_ytkHZQr0As-PXnIjojscydhpsqcE_AocUMHIM,931 +torch/include/ATen/ops/special_spherical_bessel_j0_cuda_dispatch.h,sha256=s_b49esmXDVuiSBTxspI2M2y5ddtIjV9mWxih20o65g,933 +torch/include/ATen/ops/special_spherical_bessel_j0_meta.h,sha256=afKVxFoBgMM-riy9XxNUqnQJTw1FsGpdo--_BhaB7gE,593 +torch/include/ATen/ops/special_spherical_bessel_j0_meta_dispatch.h,sha256=UrKP5GGlqlsMDnrdJ0dTSgvBk8b5LRUT8Aal7F59MBc,933 +torch/include/ATen/ops/special_spherical_bessel_j0_native.h,sha256=O5FSM9Pv1ot2nEBDVGgGwLXvjeQYr3oIb-W73RnyU04,656 +torch/include/ATen/ops/special_spherical_bessel_j0_ops.h,sha256=1CSyxK9Q_7EF1oOj0cx9V_no0dKtr_rfTka5FLrArHE,1698 +torch/include/ATen/ops/special_xlog1py.h,sha256=abd92trsQ9MZkJyw3VniRESR7L-ZxfXKFeiHZCLap5c,2879 +torch/include/ATen/ops/special_xlog1py_compositeexplicitautograd_dispatch.h,sha256=_gDw2kWKn4upryrgw9vOGT8FLgraxtOf5-ZI7wL7MRc,1342 +torch/include/ATen/ops/special_xlog1py_compositeexplicitautogradnonfunctional_dispatch.h,sha256=jxmxbeBWUZyFGaNMAlEhiV6DbgQIXjjPqng0dQkvBgM,825 +torch/include/ATen/ops/special_xlog1py_cpu_dispatch.h,sha256=S3G8lT6TRFjRiRTjwbQdPJlW_6ytAHf2yRC4hTVVpMs,982 +torch/include/ATen/ops/special_xlog1py_cuda_dispatch.h,sha256=yeqY-LOPFXPH7eLHXGSEz3RryazYgXI5NeofG-GT31E,984 +torch/include/ATen/ops/special_xlog1py_meta.h,sha256=P0091E3UYhtq5c3f9_6YTqT8gBwN4s26oZ7OFF93UL4,610 +torch/include/ATen/ops/special_xlog1py_meta_dispatch.h,sha256=Fj4QClUHtvPhmxkaRMzBQfbu0dvQHZN7ONLnN-4gqEo,984 +torch/include/ATen/ops/special_xlog1py_native.h,sha256=p6LmyQNgFUNA5c9PIqRPVY48C8ikR7eXHfZR8H4-97I,1053 +torch/include/ATen/ops/special_xlog1py_ops.h,sha256=Y5mi0Wvv68J1isuKRLvKRv2s8O5FU00pG6twqwmPLLY,4728 +torch/include/ATen/ops/special_xlogy.h,sha256=MCbyOA1KwREeQodfkY_F3Hb92gOZ79-Tau7N9n_40is,2823 +torch/include/ATen/ops/special_xlogy_compositeimplicitautograd_dispatch.h,sha256=BKjKr0lkc6Tva0JTdzlGlx6inXkKWaYKRtu5ucxZ2WY,1640 +torch/include/ATen/ops/special_xlogy_native.h,sha256=CiXDdaVV_Wt2lb2bTTMKE_qR7dvNCJvhw1-3wGC_NYw,1016 +torch/include/ATen/ops/special_xlogy_ops.h,sha256=_cWDUo6HlRiTm3caug4V_0RHkIuT_syUUh8EaJwoALs,4692 +torch/include/ATen/ops/special_zeta.h,sha256=D3aGaJi6wthJPTDtv9aYGccTGOWN6gHm8_BnDJvpi3o,2795 +torch/include/ATen/ops/special_zeta_compositeexplicitautograd_dispatch.h,sha256=ehMK90ElYBTqmC1rabkLsK6TIGH8_4dZ_d9gXoVLMF0,1324 +torch/include/ATen/ops/special_zeta_compositeexplicitautogradnonfunctional_dispatch.h,sha256=BrOByt3V3pxs7dPg41B5H3uJdFsiqG9RTvZeiHcuEqo,822 +torch/include/ATen/ops/special_zeta_cpu_dispatch.h,sha256=F9rNT_iWm0DDIsbtMqqpmoEtmFwxtnPwSKlgPacligc,973 +torch/include/ATen/ops/special_zeta_cuda_dispatch.h,sha256=_nRfDkaHmPPaHTKY_cNSr60lkuQh6ydQkZX-dA2M1WA,975 +torch/include/ATen/ops/special_zeta_meta.h,sha256=lOJEZ7PBd3_e8Pe7tLlUpj9o828mKJvU0gOlCgeuktI,607 +torch/include/ATen/ops/special_zeta_meta_dispatch.h,sha256=X52eMKbIW19vkkcFnfnYlG_p9gQ5K3XJZnBiBj-P30U,975 +torch/include/ATen/ops/special_zeta_native.h,sha256=G6h_DwqpsAiSLFULIk4kYNryGEFJAPHSGmxTLmnXEfw,1032 +torch/include/ATen/ops/special_zeta_ops.h,sha256=U1lPu4oavu_JND0v39MvD_EysiskSHhEjJ9uLZJFE0g,4674 +torch/include/ATen/ops/split.h,sha256=-LLhIX9OuQ9LmLr3ls9q9j8TMwP2jy5qIz5y7Fo4Cyo,2706 +torch/include/ATen/ops/split_compositeexplicitautograd_dispatch.h,sha256=IoO99_TxuXApxAA34yqo4UwJrz66fF32QSa81-50MLE,927 +torch/include/ATen/ops/split_compositeimplicitautograd_dispatch.h,sha256=yShvDGMSibNucwcyJaMl0Z45poWFlDz77thn-G1i3QE,943 +torch/include/ATen/ops/split_copy.h,sha256=jqUnftcuzWfxthRB4r_Q5bvMPPHpIdFE-Y_qTasbMA0,3954 +torch/include/ATen/ops/split_copy_compositeexplicitautograd_dispatch.h,sha256=iePmmIZybJDIg1z3DFLO8Dx-k4TsT2fLne_3zhylJ2Y,1174 +torch/include/ATen/ops/split_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=1Z_ZngoFAPAqvahJuvhzEAYKvqUSiYaBb3sNpmNWecc,963 +torch/include/ATen/ops/split_copy_native.h,sha256=Vx8nwITzJ85UpRbQTCIubAlkszoN3ewTWCSF2_0Riwk,664 +torch/include/ATen/ops/split_copy_ops.h,sha256=_h9q5cNLTYh6sPw9wAzfnMtRZi-6mr13YIC3hq-qa0o,1930 +torch/include/ATen/ops/split_native.h,sha256=7PUmF9km2HS179k1Q6JhSVwNQAtU7SwCJhPpVwOTTm8,647 +torch/include/ATen/ops/split_ops.h,sha256=DHadIIvL0NOi1mWRl-zNZQenglCgNQ6FR2qf7pn83MM,1925 +torch/include/ATen/ops/split_with_sizes.h,sha256=va_U58DnupoOW6l7E_afy42Ju9O91IZIyKIul_s5JXk,1728 +torch/include/ATen/ops/split_with_sizes_compositeexplicitautograd_dispatch.h,sha256=ZLqTTZeLViBbeFfMde3tboq_LN_DM_YorYGmAV0L2XM,967 +torch/include/ATen/ops/split_with_sizes_copy.h,sha256=oQN7G5-clJO1mpVqpwFcL8r7lTAQGNY277WEQgu3-k8,4463 +torch/include/ATen/ops/split_with_sizes_copy_compositeexplicitautograd_dispatch.h,sha256=h8-c2MPRMEgJKMxB2DxkMxvBDItB5arIFhge9AhrpOc,1254 +torch/include/ATen/ops/split_with_sizes_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=voL3MYIKgAW0aOb9BKoQX17weTprRc5tWkdKVJwR00Y,1003 +torch/include/ATen/ops/split_with_sizes_copy_cuda_dispatch.h,sha256=gLuYeUlzigRs6Ris9TvjblyJd6WlSs6FAIxpS1AIwbA,1212 +torch/include/ATen/ops/split_with_sizes_copy_native.h,sha256=gEfnEnjNviGdhUdRgBf1L9qj_wL3caP8uEHDhjO_yLU,824 +torch/include/ATen/ops/split_with_sizes_copy_ops.h,sha256=rT5kIAjoloz8hpNOwT1zlc2FyfzyglMbRGbv14vuSNo,2013 +torch/include/ATen/ops/split_with_sizes_native.h,sha256=vTXuC-0_f_uYaSchzCzU0zinmhpTOTX6HUOtBc0RruU,675 +torch/include/ATen/ops/split_with_sizes_ops.h,sha256=njXSWS7viLFWDJLlDW2DUWeNV9-wcP6E3T_ZfjSheSk,1210 +torch/include/ATen/ops/sqrt.h,sha256=CQewl1_u42xcfV1lf9mAWeCw6v9NXMTLY6wGRkTRjo8,1120 +torch/include/ATen/ops/sqrt_compositeexplicitautogradnonfunctional_dispatch.h,sha256=97jxEw3IBLKh7rAWcVQM4cFD7DYbQSPBouJn6uBZt6c,837 +torch/include/ATen/ops/sqrt_cpu_dispatch.h,sha256=FIzuC9ibPlGlRukNNXHYXwHr4uGhIuJIC3e5oc0MflA,920 +torch/include/ATen/ops/sqrt_cuda_dispatch.h,sha256=AG_H9LxQ22aS-YI9CaAwMdxvOIH-sk2oLT7mkOpkU4I,922 +torch/include/ATen/ops/sqrt_meta.h,sha256=FWc18tOxrB4OO7qDNUd4vDOeXpDxjzWQaRlQDASGbEI,573 +torch/include/ATen/ops/sqrt_meta_dispatch.h,sha256=BXqcvxE3iUMNkgelIAUU0UWcfXnMMyXnOFmcqRd_4PE,922 +torch/include/ATen/ops/sqrt_native.h,sha256=fa4zX0N04AWl7sCSghU3jtjiqES8nDqMfyttPQqWwms,998 +torch/include/ATen/ops/sqrt_ops.h,sha256=PYz_jShPifsIuOvfnXyTFumL2FRBpWGFOjRyRIgnGeo,2104 +torch/include/ATen/ops/square.h,sha256=j9X77BxTReN6aVwnHgT9I7e9e9GkEZXqRN55J1w15Gc,1146 +torch/include/ATen/ops/square_compositeimplicitautograd_dispatch.h,sha256=ArFyRehnUpDPKiX1cyyjno_6Tyecl6SC4XegaUSR2VY,972 +torch/include/ATen/ops/square_native.h,sha256=7G8VETBMhWDOIUoVgn23BwwM3hANh0RlPCkaUjNXbZU,605 +torch/include/ATen/ops/square_ops.h,sha256=UDDcO3lEzP7rQGx2GQTKm8blmzd_mUZBOnw3mWAIg9k,2122 +torch/include/ATen/ops/squeeze.h,sha256=4j3W882opW4fSrmjQ7_XWjcUAkO2Z9fVIRO1I08fOSc,1189 +torch/include/ATen/ops/squeeze_compositeexplicitautograd_dispatch.h,sha256=JedZgFF9yVWNAk5y50Ry92u5MBpkVHswD5ZujA-roTw,1099 +torch/include/ATen/ops/squeeze_compositeimplicitautograd_dispatch.h,sha256=S9SO_l3wPUVxsrUVq6U_cIwCZonl40hAe6FNk_UeNAU,851 +torch/include/ATen/ops/squeeze_copy.h,sha256=exgIE2yDuA7g4Ls_tytqQARCL8lQtIAXiy1ubh0bmDA,2468 +torch/include/ATen/ops/squeeze_copy_compositeexplicitautograd_dispatch.h,sha256=XOOhc9zI2HTk01FwjEtS1cXPWfHlT57Cqqq1c0pGRpo,1285 +torch/include/ATen/ops/squeeze_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=qsoDUAwDFa6Tb6Vh9IYW5iOBp6kM2PgBAZ8i4Fn0KFQ,950 +torch/include/ATen/ops/squeeze_copy_native.h,sha256=JlK7tYEgklM8132hDzWEWMhVrMy_9RmqAQMK_WT9dAQ,940 +torch/include/ATen/ops/squeeze_copy_ops.h,sha256=S9Xy2-18lLkfnbZsRjVF238Ztb_VpT8geC-bWsF_26g,4290 +torch/include/ATen/ops/squeeze_native.h,sha256=MSuCtw8G9kXtW6p9UiQDwh8_JiQP_sayFJgVRvrjka0,1409 +torch/include/ATen/ops/squeeze_ops.h,sha256=Cm6tPBW7mDPhjufbXnAeH6OQiYfMt_lpHBWGo-kGydI,5186 +torch/include/ATen/ops/sspaddmm.h,sha256=P_oppLxrbCqBBoWU4RTReTnv7c8vex3280oPIPXdbck,1584 +torch/include/ATen/ops/sspaddmm_compositeimplicitautograd_dispatch.h,sha256=nr_I9hBUFBQmfZOvwWeH9zLuQyBntS3slyqgcUqB8JY,871 +torch/include/ATen/ops/sspaddmm_cpu_dispatch.h,sha256=nAn1kOw6aJaflPQADEzUEiHtTRMxgKdbLCs6vxwsX28,1033 +torch/include/ATen/ops/sspaddmm_cuda_dispatch.h,sha256=qQrd39YejwtWGJnHD0VRTv1YWLFHgvWsgUpL9ylb9Yk,1035 +torch/include/ATen/ops/sspaddmm_native.h,sha256=6Km5-E7004oHuwr2c8cdtixm6w10J0ZgkUaGj8ygT_8,1349 +torch/include/ATen/ops/sspaddmm_ops.h,sha256=ao_4BU-8hQbmR0_552nHOt8XYHas414O-0o0UUez7Lo,2283 +torch/include/ATen/ops/stack.h,sha256=OwcuNcdQPVEk1bdwCsoNhtlR9kmXgw3SXjiVu22PL3A,1106 +torch/include/ATen/ops/stack_compositeexplicitautograd_dispatch.h,sha256=OXt2k3jhQ4jnVGNOl8MbEMQYqXNwA__R6NjcHjIst-U,958 +torch/include/ATen/ops/stack_native.h,sha256=QE6WoL_82j-AT-mAu0irZIhHRghKLlX49kcFPL4zZFc,578 +torch/include/ATen/ops/stack_ops.h,sha256=adoswbfBko6mD9hSLQzxIt0hoaif6sGVqNe9BlTSIAM,1674 +torch/include/ATen/ops/std.h,sha256=1GmX7sL0xG1p_HD19nSTZQ3yZnwizpR7Mwlqxq9q4fs,4848 +torch/include/ATen/ops/std_compositeimplicitautograd_dispatch.h,sha256=WhKbemRRHqqWk6ZMKFLA2peKQ9PHnCImXOb4ZLHYElU,2021 +torch/include/ATen/ops/std_cpu_dispatch.h,sha256=91LWqGLXL4stVSiWJNgCiC-fVogJcgT5iraq90Ybayo,1213 +torch/include/ATen/ops/std_cuda_dispatch.h,sha256=-DiKXpgGulYlPrVRtUjB5YwuZ8zibfXwL0phAkhDCZE,1215 +torch/include/ATen/ops/std_mean.h,sha256=J8vHj10A5w9R9EeJzIjCicrPB1HpxgR3fRk6ZxjwvFk,3194 +torch/include/ATen/ops/std_mean_compositeexplicitautograd_dispatch.h,sha256=VCpaG6XBA7kUd_nHk0P_Q4fy0Qlf4Y_RtBdbcVGTLak,1183 +torch/include/ATen/ops/std_mean_compositeimplicitautograd_dispatch.h,sha256=0t2CCz_BRz4vw2-lm74cNf65DAmRpPNGM8P3SxeJ8Ko,1273 +torch/include/ATen/ops/std_mean_cpu_dispatch.h,sha256=Rjd4e1_wDVt1nH12JogCPzxJITes6YLs0TzERJRQqsY,874 +torch/include/ATen/ops/std_mean_cuda_dispatch.h,sha256=7DHVQB0VmevNgu4dlp2kHE4CPYJQ34k223yCll47J1Y,876 +torch/include/ATen/ops/std_mean_native.h,sha256=Hjhy0NPJa8-XfmPxMsdJFf_4fb6JuHjHtgsEVAphhCM,1437 +torch/include/ATen/ops/std_mean_ops.h,sha256=oMWjeJVvxFexHo_UHTTAkcSWXBSQiC3Z47Ae06JMOqM,5871 +torch/include/ATen/ops/std_native.h,sha256=EY5BR-P8ic1qFdLDTxs2cH3r7v9G6dzLclVDXs5V-AA,2012 +torch/include/ATen/ops/std_ops.h,sha256=_c7cHnPMiS-w2POpu_ZW9U5BJ4sL9X-709mxJUXw0qY,7778 +torch/include/ATen/ops/stft.h,sha256=p5AzPnT8oIdiW_pUiC8gT5y_LXxz-mobyApvSKhR8ts,1851 +torch/include/ATen/ops/stft_compositeimplicitautograd_dispatch.h,sha256=U82lmwFe7mNGV5NDhK3maKs4u1h_ZmH4YSVEWaYkCiw,1408 +torch/include/ATen/ops/stft_native.h,sha256=p-4OxhpJxJJZCcrDXK-xzm-XJQNuVtpKRlPi6-BienQ,1159 +torch/include/ATen/ops/stft_ops.h,sha256=ooCGybrqK1E9kswNMQMrMZGHxjWwYKtrVMctRiMfleg,3130 +torch/include/ATen/ops/stride.h,sha256=rEcnMc_5V99CFnDSsCKG5CPyv_d3M1g7qUPgvUDKR2Q,834 +torch/include/ATen/ops/stride_compositeimplicitautograd_dispatch.h,sha256=PN5dIRTcMD-TajrInu1eREZxjdwEEkE5pCfXJS47R4s,842 +torch/include/ATen/ops/stride_native.h,sha256=hxZpCBdZ47UAQUQg4eChAKQl7wgdR19R5drzNuXNKjQ,554 +torch/include/ATen/ops/stride_ops.h,sha256=WMy1K7q5CgV0j3ePGUxB4IoUbA_7rPnpMlofRU3ryKs,1614 +torch/include/ATen/ops/sub.h,sha256=e-cx29zils0IpoDm4-06wFJfyW2B0THc2p1qruJqz5I,2115 +torch/include/ATen/ops/sub_compositeexplicitautograd_dispatch.h,sha256=6okzax7hRp3u7e6DzUg-ZoG_jEUvB2P94CKdfkaW0Ug,1174 +torch/include/ATen/ops/sub_compositeexplicitautogradnonfunctional_dispatch.h,sha256=OxvbKUWEyGxA1fKAq6Nl_M_a9_bzwTGFAgQe42YIjFY,943 +torch/include/ATen/ops/sub_cpu_dispatch.h,sha256=z338yo7XiXwDw1ho7R18dgS9aA0P5fNGfEPMP3s-kD4,1130 +torch/include/ATen/ops/sub_cuda_dispatch.h,sha256=Wmo_-nNh6xa4QvPSGoSWJf9Z2ReGrjwvKyHT5dxk_qc,1132 +torch/include/ATen/ops/sub_meta.h,sha256=rMgvt5xhXtDURDaLeloEfSNLG81oONCi0H9TGAeoZaQ,631 +torch/include/ATen/ops/sub_meta_dispatch.h,sha256=5HtfxiQj1pfoE2WZFLkUIRgG_3u-X6Iod_SFLsDOQ5E,1132 +torch/include/ATen/ops/sub_native.h,sha256=cjpu5Ee78cnbAL_sYpLKptjRs2jL4ukM-NEkj9zl5do,1583 +torch/include/ATen/ops/sub_ops.h,sha256=kVSXsQSs8jnCeIxjMjLRjWHW84gEyGg9f6uMGE3JxnA,4907 +torch/include/ATen/ops/subtract.h,sha256=VLqIMwGQE7Qd7m27A_xOwG0-WZ2r3jiwdivlt6Mrl7Y,1586 +torch/include/ATen/ops/subtract_compositeimplicitautograd_dispatch.h,sha256=9wHS4xcrqPU8tU7zQcD4_T0hhuiyEbJkB5uquKplJbE,1411 +torch/include/ATen/ops/subtract_native.h,sha256=g35e24XRCMhIzVys2KD0WyfK7XQ4GvN2nfg0JY7gJFk,988 +torch/include/ATen/ops/subtract_ops.h,sha256=YgFz6qqISkd0ta6BpLk6hDaEQThDicHH2mSSui2mLRg,4173 +torch/include/ATen/ops/sum.h,sha256=IK_Q2YFUnDyUw_stIz9I1JrN2HYum6D3HKxukBUYDkk,3384 +torch/include/ATen/ops/sum_compositeexplicitautograd_dispatch.h,sha256=IP9abaFWHQM8pk5nL2kCvQ9YpQsDrQhFUZEdaxMbIys,1059 +torch/include/ATen/ops/sum_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XUFiDFN3PwsGzNvzGpGgQ3KxW5vXJG0hilLs0WDJKS8,890 +torch/include/ATen/ops/sum_compositeimplicitautograd_dispatch.h,sha256=cEAQUg2ymOSiRrJl5fc6A8BlFzEmIqPZv_YJ_NuuoDo,1176 +torch/include/ATen/ops/sum_cpu_dispatch.h,sha256=YC4p_IgfR7KN1fHcQ-y1gYZpeb7OS9HTi0TmuMfMlJE,1156 +torch/include/ATen/ops/sum_cuda_dispatch.h,sha256=dLb5T4gtrupSaacM9T-8tUMOCpDagbQ0hP4ftfOslxI,1158 +torch/include/ATen/ops/sum_meta.h,sha256=afe2Y7BrSkQHG37iqNcYKXxvQNOvmeJ_vOhnipI5w8A,666 +torch/include/ATen/ops/sum_meta_dispatch.h,sha256=sCUq_nqWWrYa8ksexfJQcYlNoAGoXuk52vpeyRN1O_E,1158 +torch/include/ATen/ops/sum_native.h,sha256=EA_XvjXDQFxDNsgrDiT0b3ccMY3zn7ELuyN3vjWOf7M,1925 +torch/include/ATen/ops/sum_ops.h,sha256=9XefDcXFd_2dklwbi7L3dbVthuBt_TZDHbdsxfgIdmU,5367 +torch/include/ATen/ops/sum_to_size.h,sha256=0_RScitsYnuqYdgn6sHFIecfuRywye_mwCb4vWUA6-E,994 +torch/include/ATen/ops/sum_to_size_compositeimplicitautograd_dispatch.h,sha256=qcgFze61T-FTH-xR8GflgnxDysSHbyn5bhuF3sRYMrc,883 +torch/include/ATen/ops/sum_to_size_native.h,sha256=TU90_5drKIQO5QqEH_CfY5Xob37F_V8ujXTiceIhsvM,514 +torch/include/ATen/ops/sum_to_size_ops.h,sha256=DoR68dMx2-KbrSQbGaJGOjCIHd5DSDl2HyJNnEBXpMs,1070 +torch/include/ATen/ops/svd.h,sha256=eIX6XiYpPB8V3BcSOjDv6u1m1vJ_d4JD1YHk74ga7gk,1562 +torch/include/ATen/ops/svd_compositeimplicitautograd_dispatch.h,sha256=o7YTegUFrNQb6keDLJQlhU2YgNLkjn55nfO4OSBRgTA,1192 +torch/include/ATen/ops/svd_native.h,sha256=-nuHzhzDKofsjnqQ3iz2SfLUuQAT7i0W6ayYtnBnGNY,720 +torch/include/ATen/ops/svd_ops.h,sha256=NhMjg74VnektOVHPT1G1-EmM7kWyUvbDPvdpv0Y7yNw,2176 +torch/include/ATen/ops/swapaxes.h,sha256=21zn8echFpbps-iEpOuzOmANQMwqK4tSXXD19-xAZvM,694 +torch/include/ATen/ops/swapaxes_compositeimplicitautograd_dispatch.h,sha256=QKsdSRsNHmNLZMNM648kl4JIhtblxEy9qUCWiU0Wods,879 +torch/include/ATen/ops/swapaxes_native.h,sha256=Xj3-3UFJyLzigCOcG_bJRNmMAIY2_ezE0LC1QuagDpc,591 +torch/include/ATen/ops/swapaxes_ops.h,sha256=nHQ3CD6mr_vDvAqpDtdyEYXpAvQz67oPlaM9XrjV9bQ,1717 +torch/include/ATen/ops/swapdims.h,sha256=bpJP_Yhy1BWpWSpmoqj23LTZup-t_HAmmx4U0_5DHAg,688 +torch/include/ATen/ops/swapdims_compositeimplicitautograd_dispatch.h,sha256=D0IefFV_MtyAO3vvRsnbBUFu9daGc-Mt3sDIyhpo41E,875 +torch/include/ATen/ops/swapdims_native.h,sha256=MECkqOlF0OzAimDTWzv7lIwTBEyCtu6fVwtoxapG7CA,587 +torch/include/ATen/ops/swapdims_ops.h,sha256=QLQj9WjEeObaKx3V1z7HKHuwFvojmWHHl-cNRGC2izI,1705 +torch/include/ATen/ops/sym_constrain_range.h,sha256=TXi701ESUkSQYR5ycMJozJXVua7nnMhy6RwtyI9vOSY,789 +torch/include/ATen/ops/sym_constrain_range_compositeexplicitautograd_dispatch.h,sha256=zh4KdvzWMmek1t9OZq9u-M23Jvxj83ezqBgk7AzxZSI,861 +torch/include/ATen/ops/sym_constrain_range_for_size.h,sha256=DM7hvM8Us4tQCJmcPC_GEkHAP3Vjp8OacGooYuBAi2A,825 +torch/include/ATen/ops/sym_constrain_range_for_size_compositeexplicitautograd_dispatch.h,sha256=tkU62YCRhLEq4WPAuylAHyWSzWD51UlhcABVgsHHZbg,870 +torch/include/ATen/ops/sym_constrain_range_for_size_native.h,sha256=6kHBqJLN6yRQqkBe7VSJ72IcvjtA6xQ8oqYVQYElBHs,582 +torch/include/ATen/ops/sym_constrain_range_for_size_ops.h,sha256=cwBwtKr6lDiucu5BW4auy4_jJPCnoUKiRt_pj8hc4aY,1216 +torch/include/ATen/ops/sym_constrain_range_native.h,sha256=r-u-ZBHxhZs3Nthe6Zu9LcThKV4DNYFaCo5DfYvbW_o,573 +torch/include/ATen/ops/sym_constrain_range_ops.h,sha256=Re8XL-IzEcQ07swgWyEC2KxfbKaE51fmPUYdRzMpDr4,1189 +torch/include/ATen/ops/sym_numel.h,sha256=rIKWiZWUvbf1L4jdMeF7a2U-XnlwBMuxw3PaSojDvAw,638 +torch/include/ATen/ops/sym_numel_compositeimplicitautograd_dispatch.h,sha256=tXTb3_7AIwh0kcLzbKzEuiNbTOaq_j1Kk_xhsNaDAJA,768 +torch/include/ATen/ops/sym_numel_native.h,sha256=XqoILOm1VXTo1B-kaaUAPipny2HWV9R2WBrsSN-Vy1Q,480 +torch/include/ATen/ops/sym_numel_ops.h,sha256=ZX9kJT7Rz5NjQ7-_zEZSmwJd0TIj1btRZlXZEnBaEIc,979 +torch/include/ATen/ops/sym_size.h,sha256=8MXGPM2yRBknhlFFcanoDB8sZxJg9D5q2WEpkLO7a8A,669 +torch/include/ATen/ops/sym_size_compositeimplicitautograd_dispatch.h,sha256=BL6mxnuGiOQ05MHhNKLVcNSPHugjuNJZ_I6WwACuLz8,780 +torch/include/ATen/ops/sym_size_native.h,sha256=H0TXww8MaXRtOEomo-Hn41Ej96xK_2I-txxqD_D0m7s,492 +torch/include/ATen/ops/sym_size_ops.h,sha256=Fk3ALHIVSV4Cxpioe24bmgZCTeP2Uy1MKATQdt9V7UI,1031 +torch/include/ATen/ops/sym_storage_offset.h,sha256=BB0c90GgIt_KuLC_HbfpIGqJ8fLqzHkBHLAUdk_aIdQ,674 +torch/include/ATen/ops/sym_storage_offset_compositeimplicitautograd_dispatch.h,sha256=itfuP6Tt093Zrofo3boMiLhS3W4jNkg5q49PxJfryMU,777 +torch/include/ATen/ops/sym_storage_offset_native.h,sha256=ZKAa_Uzrgxxin1KVn5QrBq1rTBihxLAzsNasXruCpeo,489 +torch/include/ATen/ops/sym_storage_offset_ops.h,sha256=7mhdeH8eTv8OX2iuRDcC6XzYNiSU21febSSXMWLfGeU,1006 +torch/include/ATen/ops/sym_stride.h,sha256=CBKxLfe3kxR1WJL9d9kNrdwOQS3gZ_dm5SA0oTnvZKQ,677 +torch/include/ATen/ops/sym_stride_compositeimplicitautograd_dispatch.h,sha256=z320JcS739xpzhKp45x5gGX8P-RPD0U5yAqnDCNF4pM,782 +torch/include/ATen/ops/sym_stride_native.h,sha256=yU9AE-MYbIT0cF2vpXl7VKgMbw5qGZJiQYC6lzVCsZQ,494 +torch/include/ATen/ops/sym_stride_ops.h,sha256=ZuNBlqPv7uxUk5yDbk5BYhafxkOZBmMHtZATTMIZ-Uw,1037 +torch/include/ATen/ops/t.h,sha256=6BSSn329hTcmAhRcChyJEFmSPPgmtq289KumuMG0uLM,600 +torch/include/ATen/ops/t_compositeexplicitautograd_dispatch.h,sha256=zNsOwQ5j1KDg2w1HL2Z4dXWsQQzz1CjOBaA6wNq2oEw,805 +torch/include/ATen/ops/t_copy.h,sha256=5Q4F2PXxqBs3gQH87KYJwTwVecFB1Vp5wTAqZo1Bod4,1004 +torch/include/ATen/ops/t_copy_compositeexplicitautograd_dispatch.h,sha256=JmlOfgw8-Q077uwpeK-Qr-l-zLj746wf_GwXU5ykEqM,867 +torch/include/ATen/ops/t_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Tbn5mQk7oks1TQs9P7g977GDweM3MA2JYEKjnB4ztd4,790 +torch/include/ATen/ops/t_copy_native.h,sha256=r_NsrNli1pIZ6cL38LcZoKBUdLFU8qcdFLZ6U7o42qI,554 +torch/include/ATen/ops/t_copy_ops.h,sha256=SmjcK6eY8yu-TBm3PyrfttOGS_Ny9m1nbsPZ3Q4KFJQ,1590 +torch/include/ATen/ops/t_native.h,sha256=QGipuSEE4w6vJ5ha5iGKBqO1xNudd7y4Njj4ENN37z0,517 +torch/include/ATen/ops/t_ops.h,sha256=MqVQkBP81lbB2Y7zWOPxKjFcbWPf7Ax04KsqePw778k,1475 +torch/include/ATen/ops/take.h,sha256=XZVrBRMF0uhvc1yX7r8E5t_w-1_wlFZETrJ4S858lzg,1125 +torch/include/ATen/ops/take_along_dim.h,sha256=zMTjO28CZnW6PozOY7oQ8qrkQ7clZn65zXYUEFPOo-Y,1423 +torch/include/ATen/ops/take_along_dim_compositeimplicitautograd_dispatch.h,sha256=p6KUh4nZJJFkZGMyfONIRtjXatC4rLZDjkF_846eeFE,1149 +torch/include/ATen/ops/take_along_dim_native.h,sha256=GBpx2DEM2Rflev0cAipvvzZR1gCOS_bWN8vzSXD5ORI,701 +torch/include/ATen/ops/take_along_dim_ops.h,sha256=M5b1lJhOzSuOt6tgrZI3DtyUF4CrlrQJUFIMxQl-s58,2024 +torch/include/ATen/ops/take_cpu_dispatch.h,sha256=hbO7KFlRMYRrW6Z2utqVYJgaOAY-RPfl0c1M4YMm-rA,949 +torch/include/ATen/ops/take_cuda_dispatch.h,sha256=qFDsLb8TP2rQs8mawSUMWLeFm0IgPkGpE7CUuaPIfDc,951 +torch/include/ATen/ops/take_native.h,sha256=1zE2NR2WFKymOap-Ucz_lN0a-15V5z_j7QvpGgvPUQA,602 +torch/include/ATen/ops/take_ops.h,sha256=tckaYmHI1FAwmt8GXvjnOYtD21O-s9BAqhuSeVVwAlM,1750 +torch/include/ATen/ops/tan.h,sha256=NyK-s5kOP6a4QIR7PkoDx6ZCdEdrZRfQLxWoG6NxnlI,1107 +torch/include/ATen/ops/tan_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9msSFgcRjrg8bX6bNI5kCV6baqM0-A6C6H969OR0Lqs,835 +torch/include/ATen/ops/tan_cpu_dispatch.h,sha256=DimMQnXgY-7pND9M75hivgWEZX8K1ApIJYxXSM9Ue8A,916 +torch/include/ATen/ops/tan_cuda_dispatch.h,sha256=uY_V5HawHgc4ftTPpHtwN3pMEOP6rPQDosiUNLdS_i4,918 +torch/include/ATen/ops/tan_meta.h,sha256=3VKfaVE1wmhzkZi49PnzPtu__hc7qvhyd2Dpgdq7QHQ,572 +torch/include/ATen/ops/tan_meta_dispatch.h,sha256=ZHlw9zOpcjvJ0CGDflTwnhbGsY_MzNCMaZkckcrZDto,918 +torch/include/ATen/ops/tan_native.h,sha256=PlGInfGDyp-PqsnpMta-RCVQdFzk1sgWoshiQ9pxoUw,989 +torch/include/ATen/ops/tan_ops.h,sha256=vdU8LtsE1JuPL2kjtFLu2NY8lWXC0WhkBKM-YEul9hc,2095 +torch/include/ATen/ops/tanh.h,sha256=fmt-c03APVzPoI1F9gqK_VPnpt9m2yLmkeL-s8PeqcY,1120 +torch/include/ATen/ops/tanh_backward.h,sha256=2XSfcWGI2rW-KBMhjvwgFlKf4MgzakJcinGNlIStWUM,1357 +torch/include/ATen/ops/tanh_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=cewqqytrQiBMcumo4dIS19Pz-CcDTLuwRMPKd1SZDIw,831 +torch/include/ATen/ops/tanh_backward_cpu_dispatch.h,sha256=bTCDJx_2SfU3SBw37eRkerFIQoxFsr9nABzBG9etGyw,1014 +torch/include/ATen/ops/tanh_backward_cuda_dispatch.h,sha256=87yeDFFt_xrco8MecdA55JhYnFMPnpDr3rhNaZ3JiO0,1016 +torch/include/ATen/ops/tanh_backward_meta.h,sha256=q4cl9-PoG1iIsBywwUHXdbd17XhAK_ShRgOACCWBWWM,616 +torch/include/ATen/ops/tanh_backward_meta_dispatch.h,sha256=c5hHwffvqGr8pxGIIBCwIvE0v5ZdXWtTnfpsJ5gAexg,1016 +torch/include/ATen/ops/tanh_backward_native.h,sha256=jDhIebc4DOZ0jgs0FLS4LUw1GXo5-ZRMQL_kaRT7Bh0,658 +torch/include/ATen/ops/tanh_backward_ops.h,sha256=zVEtG9Vp2xTNzJp8dz2UWt_u-QRNe66Zk4-awKTt_GU,1894 +torch/include/ATen/ops/tanh_compositeexplicitautogradnonfunctional_dispatch.h,sha256=OQVmUti_spPJKlM5JmiXBvOTTJk6sDBboVm7n9Ufv3c,837 +torch/include/ATen/ops/tanh_cpu_dispatch.h,sha256=khq7PUKQ1THXtSJ0J-89jDHoCcUD-swCwAfQTIci5QA,920 +torch/include/ATen/ops/tanh_cuda_dispatch.h,sha256=yrcfdE38ouc1qkOXg1cjWUM-aKAHEPcmVNDPCd8xS2I,922 +torch/include/ATen/ops/tanh_meta.h,sha256=bkbrycbCJId9Go7NmdujPfWBlkzh-at9K5CKVlA_XdU,573 +torch/include/ATen/ops/tanh_meta_dispatch.h,sha256=c9uDlsNwtm-qtf6XIwxdWoyGye5tH3PhTIrJRg7IPKw,922 +torch/include/ATen/ops/tanh_native.h,sha256=EW0txBCAMM5rJt3BHD3oocj432YsjF1RU-_p7Ym1LQE,1306 +torch/include/ATen/ops/tanh_ops.h,sha256=IhWfUvfrLkCGTu7FjNUC62RBjiNX1QzRDyPiwcez4MU,2104 +torch/include/ATen/ops/tensor.h,sha256=isoes7wQlV4GjiBAgES1eu_hDJ5Z7Nn1eDIo-vJTBnY,1631 +torch/include/ATen/ops/tensor_split.h,sha256=duKkMWBXURJNaQbZFhZ5N6QMjgO9r6nf1KMuFTuxYug,3203 +torch/include/ATen/ops/tensor_split_compositeimplicitautograd_dispatch.h,sha256=H5JK7evExo6-K1LJRsox4fWUBFHg8Y_uIHfEe09kWmY,1315 +torch/include/ATen/ops/tensor_split_native.h,sha256=H4b5Pa-rfgkBtQe3kBCieviky57SxiLiBXoFRmLnhco,821 +torch/include/ATen/ops/tensor_split_ops.h,sha256=0HMJGNvwwiugZsEG2DGfzX0mwqHBzwRPj3RLWmwGXww,2848 +torch/include/ATen/ops/tensordot.h,sha256=aOYrYDPPWoXOcO3nkE2TJT7hLuKeeYzSZtiEMt15a8M,1514 +torch/include/ATen/ops/tensordot_compositeimplicitautograd_dispatch.h,sha256=GKHo5YJ6o4nCJYKxewCITKpRbv-GH9EhEwcssXAv5X4,1173 +torch/include/ATen/ops/tensordot_native.h,sha256=aANql91t3dWUrqQopDfhtZUsaLgncdTN99LwUaGduQQ,722 +torch/include/ATen/ops/tensordot_ops.h,sha256=xwOsAsz288xmk-aEGOX0yzlBlhEBmoPZHGGYFFki-eQ,2138 +torch/include/ATen/ops/thnn_conv2d.h,sha256=F5dkWCVvI_6l8KlB0AtX0odGS0j1w8e0RA6Zffx7dGA,6627 +torch/include/ATen/ops/thnn_conv2d_compositeimplicitautograd_dispatch.h,sha256=W652Ye1hYBsL1pBGBMgIBkYSCl6X9Ex8GD5ZLsJpGVg,2181 +torch/include/ATen/ops/thnn_conv2d_native.h,sha256=s9K2wGprx-IeiUq5nR0lTwzlrX_KsGfp9rqDV2cr1j4,865 +torch/include/ATen/ops/thnn_conv2d_ops.h,sha256=0URCrh9fjltVWMcD2E9lV0wBbUu28BS76sxWCjqbKVM,2692 +torch/include/ATen/ops/threshold.h,sha256=CPitb1jjgeAzg0bC_WU0H2ZMR5okAoi5LfSvD9nEYWw,1609 +torch/include/ATen/ops/threshold_backward.h,sha256=DaXGGCt2nDuSYsKkVFTUuN1C57PHBw33_EsekxRBjmI,1566 +torch/include/ATen/ops/threshold_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=plJpzm1z4Vb6Y6LEFRhFuBRpKZhh4sJu71uBs8uKM3g,864 +torch/include/ATen/ops/threshold_backward_cpu_dispatch.h,sha256=3RRYQvqNz0pGBKIpm5GLlDfPm9tYdQEXqpwyWEHpw0M,1113 +torch/include/ATen/ops/threshold_backward_cuda_dispatch.h,sha256=XYavrW3lmvvVr0r_7B56-EhFKrqvHWgrr6X9Oqljafc,1115 +torch/include/ATen/ops/threshold_backward_meta.h,sha256=F9wNQ_zb4kRhOmn0ZnySDTcMKZiVJmnJlh0SdDHHK44,649 +torch/include/ATen/ops/threshold_backward_meta_dispatch.h,sha256=Io856UNegr-z9rZofa9ul0SVMGHprgfINFefKpXNaLo,1115 +torch/include/ATen/ops/threshold_backward_native.h,sha256=fRwk7ksYzT2GlcRyuwAEc0tcQazBa6n5aAscB0OjvZg,1591 +torch/include/ATen/ops/threshold_backward_ops.h,sha256=9AA-V5Qegh5fxLtUSS1Kih8T79Uyx85tnSgpFW6UxUc,2108 +torch/include/ATen/ops/threshold_compositeexplicitautogradnonfunctional_dispatch.h,sha256=jI63c68gBq8bfVtw_ycGKPUGhN8DIeWiqrVTPvQRyW8,959 +torch/include/ATen/ops/threshold_cpu_dispatch.h,sha256=rHiezQrfSYXnJn9n68hpVpu__zfxjl-6DWeSY2DvVmc,1164 +torch/include/ATen/ops/threshold_cuda_dispatch.h,sha256=k47K89InvDrRAOHe3yQ39OxOjn1DIpukBrz5zzi5A24,1166 +torch/include/ATen/ops/threshold_meta.h,sha256=VTdeFgFH_k8tVtXboy_o5Px1FJGZldzJ-TRqHCFXt2o,634 +torch/include/ATen/ops/threshold_meta_dispatch.h,sha256=4hCfmDbySXTjcvz1NmZVFWE0DdFcM5HcIt--ImF8pHU,1166 +torch/include/ATen/ops/threshold_native.h,sha256=fj5srmnUx4Iff_1xeONVB30e9ZrVdPDYJEfRCvrB9Cs,788 +torch/include/ATen/ops/threshold_ops.h,sha256=WRhObMxfCsKb1K9TL9aqwoO65pjazNOGrr_bW70BPKc,2701 +torch/include/ATen/ops/tile.h,sha256=NL-5nDq2TVBSlYSo53EjY2_3sIA6ZZytum8H1AysZsM,1338 +torch/include/ATen/ops/tile_compositeimplicitautograd_dispatch.h,sha256=x2Ibfd3Xz2YWbxrJljyuI3hTpdJ7Pq-HRpOznFi5Nes,869 +torch/include/ATen/ops/tile_native.h,sha256=reTWCYCm4y0xAb5qTmAUNeSN854j3g2tkI_Ra4nyAqs,507 +torch/include/ATen/ops/tile_ops.h,sha256=zkhuW1i2z8v3u0U4LFyOMIIUAUGBq2BlBFwkQ6IuOF4,1049 +torch/include/ATen/ops/to.h,sha256=1b7MPiNlLgEmdT9xZzHPx-ijZKnm1JvHwY9DifA6rcA,476 +torch/include/ATen/ops/to_compositeimplicitautograd_dispatch.h,sha256=r2KeSPtakZVsWeKBqXu1v3rs9OHHro8a1upc-d5qD80,1727 +torch/include/ATen/ops/to_dense.h,sha256=NNFUXXADpZotf4DF6TdCjImAXmFJAG6rekQUiTwFj90,482 +torch/include/ATen/ops/to_dense_backward.h,sha256=UyEhAAsPAPX85mfyU0TFWlyfwVBHxz2ebqe__Ah9x6Q,792 +torch/include/ATen/ops/to_dense_backward_compositeimplicitautograd_dispatch.h,sha256=Wo9FBXmMIT_MGtN4krfxnT4R2QpMdf4Zv5Fm4XNqDxc,851 +torch/include/ATen/ops/to_dense_backward_native.h,sha256=vnvY3M8oRQ21GNisMGlW-r9kA6uHUZMpAsYqYLA2GTc,563 +torch/include/ATen/ops/to_dense_backward_ops.h,sha256=ZdQIRiGjO1YrtoHsDtjwrgSw690daZSv4ucYGRsSqms,1203 +torch/include/ATen/ops/to_dense_compositeimplicitautograd_dispatch.h,sha256=wQJUOfatuqr9neebNb3UPKDN3OjI_NJ49W2ETETV_qc,870 +torch/include/ATen/ops/to_dense_native.h,sha256=FiK27JUTLWTELeGIooCOSmHhO_-2NJam99TtG15enhc,582 +torch/include/ATen/ops/to_dense_ops.h,sha256=noTAt6WA3wYVAB9amHfuILzJbdqFa4lsGkxVUBH8nRg,1228 +torch/include/ATen/ops/to_mkldnn.h,sha256=amxq0N9k7C96zkYKPz4LWyW_xmKwu8r_IbivUA-1R3c,1045 +torch/include/ATen/ops/to_mkldnn_backward.h,sha256=1lT6LdH_2nlNtYGLdUxJ7-LSF01A4g_MDD3OIXuVaKM,709 +torch/include/ATen/ops/to_mkldnn_backward_compositeimplicitautograd_dispatch.h,sha256=o1U7p_HbsENzQVP_PpmA5ubxRWXo-UpTP8W794QHH7s,802 +torch/include/ATen/ops/to_mkldnn_backward_native.h,sha256=C63XoRi_ncto8aI_PgeWhY2AiPvJXV_Vsc4h16t5XdM,514 +torch/include/ATen/ops/to_mkldnn_backward_ops.h,sha256=zedYIO6QZXPp0nYymjPxoWQC3rKZamYlKU1pZHxb6Rg,1089 +torch/include/ATen/ops/to_mkldnn_compositeexplicitautograd_dispatch.h,sha256=XINlXwQ_SoQu5KFr-ZCPynpJPN1YaW1HJzYSXwN3ELQ,966 +torch/include/ATen/ops/to_mkldnn_cpu_dispatch.h,sha256=1sV3JVcp4MHRIBkUTpT6hvHTm0xuabZNpzBOgn28C0k,777 +torch/include/ATen/ops/to_mkldnn_native.h,sha256=R-m5fLQIPPmDXMgeLR2sn3-uboySBmf6OkwP-QIL5r4,659 +torch/include/ATen/ops/to_mkldnn_ops.h,sha256=-9yPVrQEvBWYnok9DciXbmEnkvGD_Xv4Xuu0FJ5yNJQ,1878 +torch/include/ATen/ops/to_native.h,sha256=XEUypKBPb1ddGClPiIRRC_l0QMEas2ld6l9G7VzUZWs,1292 +torch/include/ATen/ops/to_ops.h,sha256=pJyPrpgVm39_KlXjB3_ZLGwu1xDLrizxF0Uqc-Vt-8M,4561 +torch/include/ATen/ops/to_padded_tensor.h,sha256=1ekKYPmClmAauj2ZbbG0Ki7yQXT0bZkK-acE8ZyRzXY,4354 +torch/include/ATen/ops/to_padded_tensor_compositeexplicitautograd_dispatch.h,sha256=i8nAK-OA5xqKHmNVU7sT2MbTvpjNRhkK3nyzgrZ_Nh4,1326 +torch/include/ATen/ops/to_padded_tensor_native.h,sha256=G_jehco7KVoxQDplGf2_EAuvPl2Ana-28FCq8H81bso,876 +torch/include/ATen/ops/to_padded_tensor_ops.h,sha256=_9JDLrYzs3Xr64qcCZVc5nuKIwFXGP0gVCsxz1_Demg,2032 +torch/include/ATen/ops/to_sparse.h,sha256=Z9cZ-Wev4jlR8gcBkdTqnxnhfXNOHS2yieCKHWW90j0,483 +torch/include/ATen/ops/to_sparse_bsc.h,sha256=Ofap-bclCNRQj17c9nkqI8LCDmQ-JR5Z1jlXiVbk2r4,487 +torch/include/ATen/ops/to_sparse_bsc_compositeimplicitautograd_dispatch.h,sha256=BI0PUXycKmaVhFEpe-oSWETYtPinzGECUWEtYj30N4s,849 +torch/include/ATen/ops/to_sparse_bsc_native.h,sha256=3rm6r1RCUted9u2GZkdVQtUWmRI9pVsnO8U5aWQU4bE,561 +torch/include/ATen/ops/to_sparse_bsc_ops.h,sha256=Eagope9SLxPQfR-kbEgEZcSth2bTBbbA9dLkfwHDs7w,1196 +torch/include/ATen/ops/to_sparse_bsr.h,sha256=pwPOVBQwuddbURWUIVFnPb8jgpEtdxIfzbl5b-Z4Erc,487 +torch/include/ATen/ops/to_sparse_bsr_compositeimplicitautograd_dispatch.h,sha256=9bAip_9OMsdlQeugNnY179i9wl6TA4e3Ie7P_a5unG4,849 +torch/include/ATen/ops/to_sparse_bsr_native.h,sha256=Pe4767kwF0JbqNg0bbOkDzjC2mUe8gMc8eX8EEz2LXo,561 +torch/include/ATen/ops/to_sparse_bsr_ops.h,sha256=HTuBV6o7tyQroqDMD1dDg3tbqDpeI31e44VYJQJm2bY,1196 +torch/include/ATen/ops/to_sparse_compositeimplicitautograd_dispatch.h,sha256=hMGgKAnDH1NZO2gVQH0M5z4TTkW9MZxKCArbvnLsnZU,996 +torch/include/ATen/ops/to_sparse_csc.h,sha256=QlNL4I9aPdzRNq6T8e_bpEoOdBSc66kSrTtAj2sYoHo,487 +torch/include/ATen/ops/to_sparse_csc_compositeimplicitautograd_dispatch.h,sha256=sjRNS4-HcOwfmTgXrEp7Z1Vy0x809rQfl-rD6KxYwsg,822 +torch/include/ATen/ops/to_sparse_csc_native.h,sha256=rTlwHPJ1iSd9Kt6ceIl6uCEU6n5pBiL6FHS9zBNdTwA,534 +torch/include/ATen/ops/to_sparse_csc_ops.h,sha256=cgfpkopMevejEJZULeT815Xb2iYrf65ywgm-1ej-lBI,1107 +torch/include/ATen/ops/to_sparse_csr.h,sha256=fuygqtxoRkVh4j4kPh2aAVn9JDDhrzwQgK8k2U9b3rQ,487 +torch/include/ATen/ops/to_sparse_csr_compositeimplicitautograd_dispatch.h,sha256=0YpYHSKzhIXe8gjpwG7uUflK7TH5vzo9RinDjuEuomM,822 +torch/include/ATen/ops/to_sparse_csr_native.h,sha256=pmwG2dBeaReXUOU1NRhxKvfPe2e7ljv6BlIIe8FfEPs,534 +torch/include/ATen/ops/to_sparse_csr_ops.h,sha256=pXxJsmTosv069CRuogcmXdVMMYBKJlEqYPtqYIxntSs,1107 +torch/include/ATen/ops/to_sparse_native.h,sha256=e9759H1f8_xk2kfKBHhjAxnfas7aYaIXaKBrw80Zc0U,708 +torch/include/ATen/ops/to_sparse_ops.h,sha256=dOHCTPJdsqNeuD1uva4yhYSuzkbhsgLaKQPRYwD0uR4,1978 +torch/include/ATen/ops/topk.h,sha256=hGNwQWupbqHaF2BKiGASzwXPLAIJv8QqjzFAA8UWQgw,5130 +torch/include/ATen/ops/topk_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Qu0OyAdcsi7Ks4bmOM93_aOrfJ03amuShm2RqKLcI_k,1029 +torch/include/ATen/ops/topk_cpu_dispatch.h,sha256=ERn1_1XT0uAPbmK1lWlgdyvOj-Hd6_190nRgA4WHx6c,1725 +torch/include/ATen/ops/topk_cuda_dispatch.h,sha256=CD6MfzITyCeasLUBDCXzJlyRHKdInCNM0E7QGmAX_FY,1727 +torch/include/ATen/ops/topk_meta.h,sha256=kGjRLNc6h6a6PgIZp9E2Dx_H17nlPNgUlQUml29b7b8,624 +torch/include/ATen/ops/topk_meta_dispatch.h,sha256=XCFU7buxPpBvL5pAtkHyRr9GkCzBTWig_VjP03734kg,1727 +torch/include/ATen/ops/topk_native.h,sha256=KxMXnhbiJGGuNwGBOo27eNLMojOdEQ_iMzgOlwMOTXM,1055 +torch/include/ATen/ops/topk_ops.h,sha256=OVlMEXCgUOZLuWFw-C5Ra5GArarNbIoucWSJIMVljqA,2290 +torch/include/ATen/ops/trace.h,sha256=46ZudggXXeaYACyN8Q5i-jsShfVBsEoUFRd5INtvMEU,994 +torch/include/ATen/ops/trace_backward.h,sha256=uxtWan5O9cRTk9Wa8ScGCH1SSQhy5OPKHhPzHuBgfDc,1458 +torch/include/ATen/ops/trace_backward_compositeimplicitautograd_dispatch.h,sha256=tWr4wy15EycscUG1zc67fxftS1dMqw6CNkXOqIvdbpw,891 +torch/include/ATen/ops/trace_backward_native.h,sha256=A31-OnZaf9ZOJxfaOijxi-ZoPqKXg683fcxhaNFbjEQ,518 +torch/include/ATen/ops/trace_backward_ops.h,sha256=JMFandPNxtKUKJ8eJX9xeDYx2_ZbS8vdeb6KR1HJNMg,1082 +torch/include/ATen/ops/trace_compositeexplicitautograd_dispatch.h,sha256=F5a6MKncE7PD2pe2adeoMQ8wAyYfn7Rjwf3OPwOmgbM,865 +torch/include/ATen/ops/trace_cpu_dispatch.h,sha256=JXw0FbDz4QYdluTWpWIuOeS86zits71-T2xImJn-ujE,719 +torch/include/ATen/ops/trace_cuda_dispatch.h,sha256=s1JLCXg53uWtpfvz1nayHb7_NY9kfs6Fa9pkMFLGi-g,721 +torch/include/ATen/ops/trace_native.h,sha256=XyyYIBudHyoY-yImsr6jA-lYxzc5mxi9qd_gGRu4aeQ,614 +torch/include/ATen/ops/trace_ops.h,sha256=P3-IlZvjpwwx31wm5JI1JPmAD6BTLyh2X7WZ_DYls1k,1584 +torch/include/ATen/ops/transpose.h,sha256=BAX5XOX7v26h1gq-MbgWXXleo_tHNzyWrdiWb5cLnzs,942 +torch/include/ATen/ops/transpose_compositeexplicitautograd_dispatch.h,sha256=agl0kg8AFsNUCam_VNjFJUP0acOTmLhC9ibOP5O8WZg,877 +torch/include/ATen/ops/transpose_compositeimplicitautograd_dispatch.h,sha256=HDXuYGWsSldkOukUY8Jg5ldFKv0l8DBdbrkWhCxF4sw,803 +torch/include/ATen/ops/transpose_copy.h,sha256=WNGbRj_yRRmTag5zNvHhswtsyiliuM5Eg4NYspsq3Y0,1288 +torch/include/ATen/ops/transpose_copy_compositeexplicitautograd_dispatch.h,sha256=5j80UW3aOQTbObPFi5YKK6ElrRT-V9cpCi5qdYtfbM8,939 +torch/include/ATen/ops/transpose_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=XNWCxj8jCzhf3rX0tjlrlHn_TbHckzwH4pHBIkhcPLg,826 +torch/include/ATen/ops/transpose_copy_native.h,sha256=aG5mpyMAEKNLHgK6uv7PYo6SCL2byg6RX1G0xmPodkc,634 +torch/include/ATen/ops/transpose_copy_ops.h,sha256=nUC6D7yTEWFrmAa9-surUCVBpJvNLFqIsDbjaY5XOpE,1849 +torch/include/ATen/ops/transpose_native.h,sha256=r36GvXGIayJk2py-lqRjzUlWToXfBrTB0eLbZbM5b_E,774 +torch/include/ATen/ops/transpose_ops.h,sha256=6bkYie968JwSo9pLcqrijkbz_UZ8MUHKEqROQf_oTHo,2419 +torch/include/ATen/ops/trapezoid.h,sha256=GjRvwJ3IvOpKsMTU8UskNN-DTcQIiE8bJxfatJo_cWo,911 +torch/include/ATen/ops/trapezoid_compositeimplicitautograd_dispatch.h,sha256=q8UX2QQ2Ad6z46_Rzvu7f8bjed6-3jUa1CbufrObrvA,897 +torch/include/ATen/ops/trapezoid_native.h,sha256=ZNuieZTrjW-oEb7r3EayjfkME6_1mWa6IlzP6GXd6M4,609 +torch/include/ATen/ops/trapezoid_ops.h,sha256=UMZSCFa_q1IJDZWsxPgWE42TzB6-pNMdALp2PgM6wkg,1766 +torch/include/ATen/ops/trapz.h,sha256=Zd8mg7jA5rHm1zQDLnqF1j8VAvex9UpySPpnuvJjA1A,870 +torch/include/ATen/ops/trapz_compositeimplicitautograd_dispatch.h,sha256=wI6ESJPpPf_y3Z-Bdx9VteUyISKJxG3dmU28mTVKc10,877 +torch/include/ATen/ops/trapz_native.h,sha256=nYZnsUUpwmg27PZv8ZCBG1ZikfBjNGHTthc9WSO39rM,589 +torch/include/ATen/ops/trapz_ops.h,sha256=G7QdPGZjes52Kt6S7hQBynpL6sfiWLLKTD86WonjQqI,1705 +torch/include/ATen/ops/triangular_solve.h,sha256=MILGS2dbOD9IcYi6r0mmC_Z7eGJ-aj7Z9hU9XC5PaRM,1928 +torch/include/ATen/ops/triangular_solve_compositeexplicitautogradnonfunctional_dispatch.h,sha256=54BpzwCgBVYaCocIOnfpzoHGSxoeoeY5sxqEmA4PHNY,912 +torch/include/ATen/ops/triangular_solve_cpu_dispatch.h,sha256=4mDBERpVsAd87vv7zmPj38fvhI8oH71PvxnpdqozmEM,1258 +torch/include/ATen/ops/triangular_solve_cuda_dispatch.h,sha256=YGfgPUfS-ShS8mVX6Mgp0us3HTala9o7KfLjQMyBGhE,1260 +torch/include/ATen/ops/triangular_solve_meta.h,sha256=iV9FGMJqvtRjJ0DyHO9V1q3pmSQdlFqCAaPErpKPAFs,655 +torch/include/ATen/ops/triangular_solve_meta_dispatch.h,sha256=HsmQ_mvLK4TYiDJ6AzbGe-U-kcev6MW76ZZRUl4HVmE,1260 +torch/include/ATen/ops/triangular_solve_native.h,sha256=PReHzF_uD7nICyhhGysCv1zP9IoMz6yaPVvqw1MSE-Y,1145 +torch/include/ATen/ops/triangular_solve_ops.h,sha256=_jFSl6Hbvc3v3wEAGrxbbWABtvMpD-93OCx1RV1NYJk,2440 +torch/include/ATen/ops/tril.h,sha256=FFD7xsVKfw0MFw0svfNUW2yGtY7lmGLu7cXoYIoSghg,1120 +torch/include/ATen/ops/tril_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5-ohybdX3JN90qOoOeDGCnVmMZHuu6_5-9Z6ujwYU30,877 +torch/include/ATen/ops/tril_cpu_dispatch.h,sha256=j_cPObg-KxVFPU6Y-vIKSj0dozihDnRrV7B3jgqqjHc,998 +torch/include/ATen/ops/tril_cuda_dispatch.h,sha256=miRBJvkWAqXD_TJc29cPccMPu-VDeF6uu6LUSVr4wbI,1000 +torch/include/ATen/ops/tril_indices.h,sha256=fyPPZPQE77i6mHKeg6cQKoze0f1No2JjIcq41yLbOEA,1937 +torch/include/ATen/ops/tril_indices_compositeexplicitautograd_dispatch.h,sha256=UgI7ZUKrj4gfcTtZKisRtOPfGnksOxjaQNlk368jLcc,915 +torch/include/ATen/ops/tril_indices_cpu_dispatch.h,sha256=4WsHAA28nj8uQbgX0lGzzQ-onybP8mC_1mLnJNhPywQ,1004 +torch/include/ATen/ops/tril_indices_cuda_dispatch.h,sha256=WX5prJTtKgiy6TQd2nnRi1ao3wSwq5wyvS-fWVJuyFI,1006 +torch/include/ATen/ops/tril_indices_native.h,sha256=OlRCrUQXibaLZQLwrTLftHh0EV5v73rg-5MCGBmiWqc,1004 +torch/include/ATen/ops/tril_indices_ops.h,sha256=KtGed23MwbP1gsrftdi_V6BEkz7DFQQ-HVMSArcp4zo,2242 +torch/include/ATen/ops/tril_meta.h,sha256=mDKUZWd51R6S7EAYR7Is_grRsjqFT_0GrMy2MrqGgPs,591 +torch/include/ATen/ops/tril_meta_dispatch.h,sha256=PrOhBhmmnHj57L3DJK8aXMf6jwrDSYp39VtAHfBGztQ,1000 +torch/include/ATen/ops/tril_native.h,sha256=TL2251QL7QIrn9jqFBM7oHcJtShwhSY3NkibILJkadQ,764 +torch/include/ATen/ops/tril_ops.h,sha256=FZqlKS5ozvLTFrcY4RYk-Rk0GWKSJ7-sV2q_ZlNqWXI,2287 +torch/include/ATen/ops/triplet_margin_loss.h,sha256=xx4xVhiWt-jaqpuqZbAdfcr02nu-P4z56If0Nw-c0ys,1005 +torch/include/ATen/ops/triplet_margin_loss_compositeimplicitautograd_dispatch.h,sha256=EMaoyps0TftBzAgskERCPXzYrks9LmGR02cxsBjaSuc,942 +torch/include/ATen/ops/triplet_margin_loss_native.h,sha256=Fowmf9LVxCZ03X6Sj2RXMqGnTVIF2FZldrJrKV3BBsY,654 +torch/include/ATen/ops/triplet_margin_loss_ops.h,sha256=6Z3gauIedKrICmOAaX4DzQQEj1hbRlr-5XBSUIdiYDc,1458 +torch/include/ATen/ops/triu.h,sha256=ZDNIMZIcOMUXKAaWvU-tPf7YEaOAxfvM7ZWDeJIrifM,1120 +torch/include/ATen/ops/triu_compositeexplicitautogradnonfunctional_dispatch.h,sha256=PX9h-oZr-IeA5-SCyd50CqcHOs0RCEjPdxbglUIWNh0,877 +torch/include/ATen/ops/triu_cpu_dispatch.h,sha256=Nd4WAAsCxgo6iW_nybTuUmbAtpgBbF3H3ksogxbNG6w,998 +torch/include/ATen/ops/triu_cuda_dispatch.h,sha256=WP0LKqNl0j_S-p9X53c3UjVUqOxfuNA673ouNfSM92Y,1000 +torch/include/ATen/ops/triu_indices.h,sha256=vCXhuziLzHb_4AtgO8C8keqN7q7dRNgwL8xrF7gmegM,1937 +torch/include/ATen/ops/triu_indices_compositeexplicitautograd_dispatch.h,sha256=Q0aLJMcY8QNG1bYbVLkKUPuXQX3E-foqOf1YuXn1umA,915 +torch/include/ATen/ops/triu_indices_cpu_dispatch.h,sha256=JRsDpIhNb1XS3jrLjCZkRDQtH5ESiEQDkc9ZTSpLagg,1004 +torch/include/ATen/ops/triu_indices_cuda_dispatch.h,sha256=Bqv70mDTbPbW_ew3Y0xwbTxpzZyugZArZxP_QU_gSPU,1006 +torch/include/ATen/ops/triu_indices_native.h,sha256=RTgpNnPOS5AaTi8SqbNEtRmz31qJQIJJ2jegpSf5-XQ,1004 +torch/include/ATen/ops/triu_indices_ops.h,sha256=gDD_-enUV7MQ84IHdMAA_yEZz9KTRoejigN0PR0m7WI,2242 +torch/include/ATen/ops/triu_meta.h,sha256=PGzjAAYflV4FotAsPelb2HNtkjvVPUCPF5m7N8DYqgQ,591 +torch/include/ATen/ops/triu_meta_dispatch.h,sha256=zYdPTQ3dJWM0q7JcfWW1HKVpOJlqGtmisHjjVMEgPbI,1000 +torch/include/ATen/ops/triu_native.h,sha256=eIM84lbvH3YJAtBMudaRGYT8StIc1cwCOkM40OpUgKs,764 +torch/include/ATen/ops/triu_ops.h,sha256=5Fv53MtNBRPos7hHMM24biCweAcfBg0xm4U-EI36sM0,2287 +torch/include/ATen/ops/true_divide.h,sha256=SW7fhVeP6sRZHxUGxJIIG-Y1xSTydZS4U_F00p8ZLRE,1420 +torch/include/ATen/ops/true_divide_compositeimplicitautograd_dispatch.h,sha256=zWjpyEZbtEpfWk7ccj5FH8Q00U1Nb66PiHBunX1ZYLw,1263 +torch/include/ATen/ops/true_divide_native.h,sha256=-SQI6yFBP4Tty_wS5R_IAoin62l6xmfS0-cK-EowEF4,865 +torch/include/ATen/ops/true_divide_ops.h,sha256=sds1tD52MH7QC6EevBZxXzOrGLbY93cRrCuMglHp7sE,3772 +torch/include/ATen/ops/trunc.h,sha256=WlyFsBj2elKTfAynmAgY0veaI5qeLwJqw4149Wg5bKU,1133 +torch/include/ATen/ops/trunc_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dCO_GcChs0manJdXYG3KlV6SdApOOhvKj2NYp03Ecqw,839 +torch/include/ATen/ops/trunc_cpu_dispatch.h,sha256=7x287ZQuYusJI-GdJeLu_NOjQ5KwGCgIOUEPdBxqYos,924 +torch/include/ATen/ops/trunc_cuda_dispatch.h,sha256=2KiQVNSCQ77QcAck5fdOsL4p1QfoYrIrYzC_smc0upk,926 +torch/include/ATen/ops/trunc_meta.h,sha256=MRZCp4P0fZnIamOsuouw4jpKc7RLN3v5bRk4qN9Yhi8,574 +torch/include/ATen/ops/trunc_meta_dispatch.h,sha256=h1ynWuXUj0jZukKd3czkxuc5o-DmjfnjfAu56MeCzeQ,926 +torch/include/ATen/ops/trunc_native.h,sha256=TX-PPC4lqFRIEBTjVNub9FJbrjtUpU_rfga1srfC9UE,1007 +torch/include/ATen/ops/trunc_ops.h,sha256=9o8dYlUJGpFbmOqslZXm4XbDBg_g83jZAYUkBgNu0xM,2113 +torch/include/ATen/ops/type_as.h,sha256=P7T4JSLSmVxkKYSjJOELw7loEUYUbtoMogouplYZKCA,481 +torch/include/ATen/ops/type_as_compositeimplicitautograd_dispatch.h,sha256=CWUeUszFQE7v0_E3bA0qmhH94ZNtfbkBe8x6Et0I2gk,791 +torch/include/ATen/ops/type_as_native.h,sha256=nOwP3BvyewB6LC0kzB-XNEP7yQDjsTkWXAC-s99NhYU,503 +torch/include/ATen/ops/type_as_ops.h,sha256=Cwb4P3BnP7QaVb5I18dUJzBpUDKoTGV8FPH0geA9u8g,1056 +torch/include/ATen/ops/unbind.h,sha256=F28nAZ65Ne3UFrEOXA1P19GPGrvJeJ3HKvR_y_5Kn3s,895 +torch/include/ATen/ops/unbind_compositeexplicitautograd_dispatch.h,sha256=P5-8u0Hw_MalXZ1LzEQCnVuF13fL9PVqD_ZGryKK5Nk,794 +torch/include/ATen/ops/unbind_compositeimplicitautograd_dispatch.h,sha256=wKmUNYKWbty4rnXxlpkgupeVI0AYPjyCEIF682ucCLs,796 +torch/include/ATen/ops/unbind_copy.h,sha256=iNvp777B5HSHM727bUhGQztuKCIDOQucmeCAWQMAedY,1162 +torch/include/ATen/ops/unbind_copy_compositeexplicitautograd_dispatch.h,sha256=tZcnH7E_Rne2WwO-wZvuXod1gwYNnY9OzaJGbsK0OOQ,893 +torch/include/ATen/ops/unbind_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=BPbMKVtyBGozDLypoJSZ4YT3B0TYkXe8KJr9kPZ0AT4,825 +torch/include/ATen/ops/unbind_copy_native.h,sha256=U4QUiGk9Conx89U_oO8YYn1LqZ2zwUVDaHDFG7dV-fU,609 +torch/include/ATen/ops/unbind_copy_ops.h,sha256=JnEfSgUJ5EQVd2GpdvaJPSJ06h-RapJY1jnxcSU3RP8,1758 +torch/include/ATen/ops/unbind_native.h,sha256=gc4K1-gbhwIiTq4FZNFrXoTSYLIFiOXiKvWydIHJvu4,689 +torch/include/ATen/ops/unbind_ops.h,sha256=zdHxDshziLPW4ukER7cKZlOO-g1xRGuADHaVG3oo7b4,1756 +torch/include/ATen/ops/unflatten.h,sha256=YHII2FmYxnxhqpjjba7M0_QUfROHZdff5o5X0P5iIJA,2778 +torch/include/ATen/ops/unflatten_compositeimplicitautograd_dispatch.h,sha256=zHf393MQjxFSlIYna-UxDi_6z7-sx6qxLCQj7k4xlG8,1158 +torch/include/ATen/ops/unflatten_dense_tensors.h,sha256=E7cL7oivqdv7p87n5Acar4tNgCGrubW8VLeVHtpHecg,750 +torch/include/ATen/ops/unflatten_dense_tensors_compositeimplicitautograd_dispatch.h,sha256=cUSLTgLf47XSILcdecH2hWXx0hB7llik-5PY2Ar8Z90,820 +torch/include/ATen/ops/unflatten_dense_tensors_native.h,sha256=KWpTW71s4U14fhm_tSb7TM3r2XLIlQaWCCjcdX6G00I,532 +torch/include/ATen/ops/unflatten_dense_tensors_ops.h,sha256=1y2g41DONP_FkCw6yOc29k3IDwCEnTOJPsAShhroWIA,1147 +torch/include/ATen/ops/unflatten_native.h,sha256=0v0F3FFuLW6iLo1QLF6QGqTJ8Yej08_xxIzAetTeDtg,665 +torch/include/ATen/ops/unflatten_ops.h,sha256=gSIBZnVNbJRhlnoMhKPYCfJCIyuRjK3asEmlsJ9gcGs,1930 +torch/include/ATen/ops/unfold.h,sha256=5pvZarqo7yxNnVPFjWv30k7BgrZuy8kTJC9M2rIl1bI,480 +torch/include/ATen/ops/unfold_backward.h,sha256=xdA7ChGtUv6saUBNsYkSEkUdLdoIUSzPWSneVLYWD5A,4947 +torch/include/ATen/ops/unfold_backward_compositeexplicitautograd_dispatch.h,sha256=s21Jla9nR7AByxPwXA2TpPWa3kSZDuKexQsaREH-rEo,1374 +torch/include/ATen/ops/unfold_backward_cpu_dispatch.h,sha256=Cv4Sd5OPcEBovk7cKpvA5aFM2vjRZQ_Thgh3-PNP1Oo,949 +torch/include/ATen/ops/unfold_backward_cuda_dispatch.h,sha256=5CmCi4YmNeHvOoOtkB7yP7IIncUGYTo-rAW7joyHihA,951 +torch/include/ATen/ops/unfold_backward_native.h,sha256=tlVLVd1Lf7Ds4OPUoEPEJfH6IhOpzzFnw7Sya0T58nk,729 +torch/include/ATen/ops/unfold_backward_ops.h,sha256=nAqa4yPi_p4zXhVZ3ByaKNeIbk8BFjBrlAX2PWFkncc,2156 +torch/include/ATen/ops/unfold_copy.h,sha256=wZqzVAANze-W1qPVTG8AWVJL2X_pWKCWDmu7Iw3DsLs,1369 +torch/include/ATen/ops/unfold_copy_compositeexplicitautograd_dispatch.h,sha256=ZeVZzMP-Ic18S1lxjtH87mfqULiUckew3lODT8e6c24,971 +torch/include/ATen/ops/unfold_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=-LZwhut8sqMT6KgTHomKxgVBStqiFMcrpeOkx2BW9xA,842 +torch/include/ATen/ops/unfold_copy_native.h,sha256=Rp-9J7uO2GLiFRCvtFk_TiMSIJuahHOTjv7Yjwslk9M,658 +torch/include/ATen/ops/unfold_copy_ops.h,sha256=BjyS1WbnGMZ_3oeVWsXWLpP52_AmL_wtHu0nO0EnmkU,1932 +torch/include/ATen/ops/unfold_cpu_dispatch.h,sha256=_bfZWQWfJQ8tAT9OpM4rihJ-LvYjSnRWAMe_IlYrnvc,767 +torch/include/ATen/ops/unfold_cuda_dispatch.h,sha256=bB615aoWn898Okd3MLj9w4pqZjqWBhf4rtELvCYCP2A,769 +torch/include/ATen/ops/unfold_meta_dispatch.h,sha256=xQP1zG-31Yy9Y22_jHUHu-gBIUaeutQbSFUAfbUXcIM,769 +torch/include/ATen/ops/unfold_native.h,sha256=BasInAcwALJdQaRv8UaWAJd9g1Ggot6SwY51O8VJOgY,523 +torch/include/ATen/ops/unfold_ops.h,sha256=-u6058GVn6JGkHsI9t-GJlatDnsWU_usQ41406wbZ7M,1129 +torch/include/ATen/ops/uniform.h,sha256=3JYDqC2p3iNxlm5f1DaAtf_ZADWi4c7LGDjtNUqHxE0,1475 +torch/include/ATen/ops/uniform_compositeexplicitautograd_dispatch.h,sha256=Fi8lVle-xd7MgudIS8m6IN5P1vYvNtGMvje46ULGrJ8,1160 +torch/include/ATen/ops/uniform_cpu_dispatch.h,sha256=LEEppc_fwpkHlqeQxAV8yBFU7G2QErVMhRemDC2ewCc,803 +torch/include/ATen/ops/uniform_cuda_dispatch.h,sha256=C-5Fzxoz_3QYe2AM-9MvihvsY9Xt1jngjE74jB6rj2I,805 +torch/include/ATen/ops/uniform_meta_dispatch.h,sha256=3yeQV2vb162MOw-pdbXB2OtkzMV0zZIDtLlVxCNGz3Y,805 +torch/include/ATen/ops/uniform_native.h,sha256=o3rhBO4cMD9cTZc4rezEP9dZjk7ewnyTx_wxgE8kUVk,986 +torch/include/ATen/ops/uniform_ops.h,sha256=PsxH_Xr7vb8K7lpR91Gc40soZVzgH7IXj8H7PNTu4EY,2836 +torch/include/ATen/ops/unique_consecutive.h,sha256=sW-5SMCTn73d3nrqkUxLCKdWk9-_HA5pVytbHQpR0aI,2063 +torch/include/ATen/ops/unique_consecutive_compositeexplicitautograd_dispatch.h,sha256=iV2011uyNa-2rZIeixbnxNf2fTJSwz5BqKNvoLCPXhg,1218 +torch/include/ATen/ops/unique_consecutive_cpu_dispatch.h,sha256=TA4ars7KrDMTr-2BjEdmifI1pa1bW3cymXQ7uEUh1sw,866 +torch/include/ATen/ops/unique_consecutive_cuda_dispatch.h,sha256=YwVRs_SiOO2sq-TxsjGHVidcZNZy2ZYlwG7jzObJntA,868 +torch/include/ATen/ops/unique_consecutive_native.h,sha256=3GcSAhFttLwOc5YXL5I-Ck8adwr9aNaSOFdF6MgNHdk,1071 +torch/include/ATen/ops/unique_consecutive_ops.h,sha256=Ssaea_rJdgpZ9XQ60Gnh3q5Wlt2TsJBC75qCiUs1MN8,2571 +torch/include/ATen/ops/unique_dim.h,sha256=NBZREuxVt7BuYM0Qg-xT8G5tUGU4pQRMGVYkxjtQoKQ,2011 +torch/include/ATen/ops/unique_dim_compositeexplicitautograd_dispatch.h,sha256=mGvoKTDt91GrTz4tbwndAWq5NtzXbjcBolqf_6HLhrY,1184 +torch/include/ATen/ops/unique_dim_consecutive.h,sha256=oWFkW0WU_5cwflYGv2LWXxwE_Rt-lLTwoptL4_sLi6M,2004 +torch/include/ATen/ops/unique_dim_consecutive_compositeexplicitautograd_dispatch.h,sha256=fhDAveSIwKj7Jf2U3akTUpp5YhDdggQwCrof8RgaOAw,1177 +torch/include/ATen/ops/unique_dim_consecutive_cpu_dispatch.h,sha256=nLPzCmjdXHWrskuLfFHHf-luFDHNgmHfBBzH-yxSlDI,838 +torch/include/ATen/ops/unique_dim_consecutive_cuda_dispatch.h,sha256=Oy4vN83UYw6fY0ufd_hxUSYN-ZMLFOuS8Km1sazC3D4,840 +torch/include/ATen/ops/unique_dim_consecutive_native.h,sha256=HOVAaf9IuW70GPZ41nOZ2-LvLtU4MD_Rw4RFYTQ54mk,1002 +torch/include/ATen/ops/unique_dim_consecutive_ops.h,sha256=Axgxc3NESLfOVTg7abGm9ZM-_j3yv-dcPoZsO4iSEAo,2481 +torch/include/ATen/ops/unique_dim_cpu_dispatch.h,sha256=YhfPIoL_2AJ2djZBElHEruKQu_nSdIRtU3BL4UpP84Q,844 +torch/include/ATen/ops/unique_dim_cuda_dispatch.h,sha256=wm4zKSt_s3pEMtFyEep_TQDj-2-xGSn1aQ7hDPFdf0g,846 +torch/include/ATen/ops/unique_dim_native.h,sha256=KLvrFcCCqFjeY-lNO5gk2YyUidyzRJlwAxf3puMZeoE,1015 +torch/include/ATen/ops/unique_dim_ops.h,sha256=R0gJz2e80ErQv6xbXzgEde92h0XqrgorZAPbG21wDD4,2509 +torch/include/ATen/ops/unsafe_chunk.h,sha256=27iQBXyHK4-ZK7KmrDcgMntCTTF9Nu3J1Z1QG29O6Xw,722 +torch/include/ATen/ops/unsafe_chunk_compositeimplicitautograd_dispatch.h,sha256=pGS7-Ry2a7G1fkVO_TpKYV9brG-1kwH6wWrt0rWxDRc,816 +torch/include/ATen/ops/unsafe_chunk_native.h,sha256=NU2sjV1jxDC1xAYOyIjp5fpYsdWPbUVFQcOZ5CjEjxY,528 +torch/include/ATen/ops/unsafe_chunk_ops.h,sha256=i2YvZZ8sqs9QNoN0wQrRZRUq_fFaU7wObzo0xTdgCEs,1131 +torch/include/ATen/ops/unsafe_split.h,sha256=OWL7zBRtzerf20nzdYMTZ7ZQvP0Xti3DRGuIL9Pr2vs,4016 +torch/include/ATen/ops/unsafe_split_compositeexplicitautograd_dispatch.h,sha256=RrpF52NWxPQsFD6bfk-7I9mVUxBKOj_ItvZ9jw_3t_k,1413 +torch/include/ATen/ops/unsafe_split_native.h,sha256=GZk6V7HvU6Et3NkeTmYOY9U9SdLOfBHt9wPqb7588kU,661 +torch/include/ATen/ops/unsafe_split_ops.h,sha256=8PHAIfhDzEVBzGoUCgmtEDs6TeMubmRgd8OO8mtr8_k,1942 +torch/include/ATen/ops/unsafe_split_with_sizes.h,sha256=N4IPPnzGqVZOXmdA_G8VRDMThWL2ncexkt5aNjLITac,4525 +torch/include/ATen/ops/unsafe_split_with_sizes_compositeexplicitautograd_dispatch.h,sha256=06Bb0pwSFzqWLUeRmSWte2QvItAWKEpqenWOcSu22OM,1533 +torch/include/ATen/ops/unsafe_split_with_sizes_native.h,sha256=8uC7VeZo5A_6r1huFxjynl-vshhSXxE92Rvoed8ju2U,694 +torch/include/ATen/ops/unsafe_split_with_sizes_ops.h,sha256=ZOdxXR17SPmbWnpnkZJzAzX0Gs0GC2f9auku_ozy6C4,2025 +torch/include/ATen/ops/unsqueeze.h,sha256=A1hkUEhs0BRUxcCza-uPZn3q5NhR5lg1Is0uNyLOhFU,659 +torch/include/ATen/ops/unsqueeze_compositeexplicitautograd_dispatch.h,sha256=7lDGAOB3oT4Ws5A116arqp5AbXxIRXGkrbVI1cZN9YI,847 +torch/include/ATen/ops/unsqueeze_copy.h,sha256=o2sM8b0E7DIeUxjUYReLGWb7tqmiQAY38f-g6Ps-PzA,1165 +torch/include/ATen/ops/unsqueeze_copy_compositeexplicitautograd_dispatch.h,sha256=kYx0Dtzvdx26dxK07SQbptCeRRwv_db4JR0n1p4tYHw,909 +torch/include/ATen/ops/unsqueeze_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=FCD0pzttX8xpG4LFHpGw5e0Ml2QxY3wPSKaD1s9bkrA,811 +torch/include/ATen/ops/unsqueeze_copy_native.h,sha256=ALwfYkSzbp8YMLlw9KiK-yK_LLDUWqYi5Kd14t76S0M,596 +torch/include/ATen/ops/unsqueeze_copy_ops.h,sha256=Uh7lr8nz9pIBK6lEZKJ3cg_wH5pHJXtZP1X9u1Y6omo,1726 +torch/include/ATen/ops/unsqueeze_native.h,sha256=F6j3zk34uOl2LRqA-QTo70Swg_ixjh8upobTmyM5SHk,793 +torch/include/ATen/ops/unsqueeze_ops.h,sha256=DdYwwAIditYyg2wY8D44BGy-XC-9IZNO0_EAqD8LEss,1611 +torch/include/ATen/ops/upsample_bicubic2d.h,sha256=JGfhZkujjKYfg4_glb0RhIobJQa5q9gGpvB5FIfC1MQ,7955 +torch/include/ATen/ops/upsample_bicubic2d_backward.h,sha256=AntlQrUKMtbHG2_zmbJPI7E_9Ki7DDFBovcILO64B6g,7691 +torch/include/ATen/ops/upsample_bicubic2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=dLN10xguGKbEY1Wq99Zs95PKNl7wcRFahRq5BX3WNSc,1265 +torch/include/ATen/ops/upsample_bicubic2d_backward_cpu_dispatch.h,sha256=ek8SvOUZ7ARHChMG7vHHi1sYCZQKwjImSBlISMRT-oc,2319 +torch/include/ATen/ops/upsample_bicubic2d_backward_cuda_dispatch.h,sha256=tQCW1okclFFbP0Sj2aX5BQhLXXSxn8I79gpf59bWag4,2321 +torch/include/ATen/ops/upsample_bicubic2d_backward_meta.h,sha256=xg1tw-B6_8ijHC2QCxJwckWe8aynpnonevF7fNbkF7w,760 +torch/include/ATen/ops/upsample_bicubic2d_backward_meta_dispatch.h,sha256=t4wcPddspu0B2bXJNPeIIVrJgiJRXsRN1H4jFRjz5Jk,2321 +torch/include/ATen/ops/upsample_bicubic2d_backward_native.h,sha256=K2tx8zMjQ7qXlhhmGoR178yHK9zHvgVdJJcPcsMLgu0,1193 +torch/include/ATen/ops/upsample_bicubic2d_backward_ops.h,sha256=xL2G4psbWCNoDT9c0jRZ7yGIej8yDU8t9dXxqvu6O6s,2826 +torch/include/ATen/ops/upsample_bicubic2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=YQGrpKSVGipVCMeMW7bWxzF-qo4CEbuSwsMOlR_Ynag,1173 +torch/include/ATen/ops/upsample_bicubic2d_compositeimplicitautograd_dispatch.h,sha256=msHeZWZxQA0x0eJLIabzjULzNZZ0bkYXhjX79giE61M,1074 +torch/include/ATen/ops/upsample_bicubic2d_cpu_dispatch.h,sha256=CtpdVyjVgZqQy0KkuD6wpGfIxUjBgTLTzfBBTUr3CxI,2015 +torch/include/ATen/ops/upsample_bicubic2d_cuda_dispatch.h,sha256=cqoohvqdyTmOi8FVYR-Bl90JXtnM-lf1MG0-HkVdMCM,2017 +torch/include/ATen/ops/upsample_bicubic2d_meta.h,sha256=2rtT8-CAGOWZ36Lff-W4on3y3bOJM1LZOGCM9s52mck,710 +torch/include/ATen/ops/upsample_bicubic2d_meta_dispatch.h,sha256=gWKB8Eztw0Sn8uelAuC_DeVhaxHuZqtkz3SCzUD5758,2017 +torch/include/ATen/ops/upsample_bicubic2d_native.h,sha256=W8SdBw8j9op-3UCz6RayRHpPeS-2Uaj1fedT-p-Lwz8,1229 +torch/include/ATen/ops/upsample_bicubic2d_ops.h,sha256=7JmuJMAQL6ZXlTeHBpaxneMjZ7JnR6Ask1XeM0RniaU,3423 +torch/include/ATen/ops/upsample_bilinear2d.h,sha256=8x0Gpgo3xVxFyaOH6eI4QV2ecmqi8kHGaSHS0yS6mcM,7996 +torch/include/ATen/ops/upsample_bilinear2d_backward.h,sha256=AyHUtepVexGSMQf2SgIx-i0XJ3h7sTgNtddwbtpnmUw,7722 +torch/include/ATen/ops/upsample_bilinear2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=sG5ZlCVMucdjq-RkQAD8DyL0CoczeR5RyTTZX44dMyA,1267 +torch/include/ATen/ops/upsample_bilinear2d_backward_cpu_dispatch.h,sha256=TM2p8KxeW0YIcPfwA23XNZhvkq2NG-Kcmasg5JwfyCk,2325 +torch/include/ATen/ops/upsample_bilinear2d_backward_cuda_dispatch.h,sha256=i8MM9qu3HZb1cdiYjUHKGH7R-C3sZDgrttBquU6_vGY,2327 +torch/include/ATen/ops/upsample_bilinear2d_backward_meta.h,sha256=ELnV0-Ebwv3OKQ6zIb8JJT4MZ9kD9wR9meoUBFfyee4,761 +torch/include/ATen/ops/upsample_bilinear2d_backward_meta_dispatch.h,sha256=zfQ3iPjO5RoEcUsHbFiWmpRWq086x2nMmnAr9rX7GC8,2327 +torch/include/ATen/ops/upsample_bilinear2d_backward_native.h,sha256=L44HjS0Ta5hNtlbKbAGy8W4BOE8ABZm9cNv-rercaMM,1198 +torch/include/ATen/ops/upsample_bilinear2d_backward_ops.h,sha256=wOnAUFxdIkTdmU9qXlnEwJwZz9LfGZndFZc4lqjtjf0,2832 +torch/include/ATen/ops/upsample_bilinear2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=D_oNMKjQ09EvqWWHC9KC32EWUR8lJ2H6-Y-zhluvDm4,1175 +torch/include/ATen/ops/upsample_bilinear2d_compositeimplicitautograd_dispatch.h,sha256=WtSQs5sCNfo4XRCSnAlibyXnm3Y0m9KqzHAx4S2054g,1076 +torch/include/ATen/ops/upsample_bilinear2d_cpu_dispatch.h,sha256=hx2_mSwjsosbipvQ2sZ1FTcyV-AKCoimZiYt-rmTbFg,2021 +torch/include/ATen/ops/upsample_bilinear2d_cuda_dispatch.h,sha256=2w-1bX_OxRPbp_Gu6V0kaWVVtVVLphrwuapwAb5rYk8,2023 +torch/include/ATen/ops/upsample_bilinear2d_meta.h,sha256=w_K2jaQqnNh7JqBwHJU0xhAVNNZ3ryx2-1onNXRkkwI,711 +torch/include/ATen/ops/upsample_bilinear2d_meta_dispatch.h,sha256=BcczS1bkv9wJFbUqmLAPPTZ_HRUUeO_zelrCrB-b9sc,2023 +torch/include/ATen/ops/upsample_bilinear2d_native.h,sha256=m2saxlBKgwciSIS0wrgrccA5Qt7VEi62Z2oIOjsJjR8,1463 +torch/include/ATen/ops/upsample_bilinear2d_ops.h,sha256=oNChz9B7eI02h-uHPA9oefo12cP9Qvn_KObFYmrwD5k,3432 +torch/include/ATen/ops/upsample_linear1d.h,sha256=N2oqyPsLdO0zxC2nu8UohIYccXGcA_84Xn5kbDgzxWI,7074 +torch/include/ATen/ops/upsample_linear1d_backward.h,sha256=MykX4eVRH730L4dXBXVxDxXMTm2tyckLMVkXbBg2Rq0,6820 +torch/include/ATen/ops/upsample_linear1d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=xDwaqDgZv-99LIKvW7AqE3YEBu6PygdxZBNO4y_qEJI,1161 +torch/include/ATen/ops/upsample_linear1d_backward_cpu_dispatch.h,sha256=efpmAcvOnHH3l54XB-rTShh46nyhmHvFNnqag_Fdc44,2037 +torch/include/ATen/ops/upsample_linear1d_backward_cuda_dispatch.h,sha256=2EOky4bMpeN5MSRgbP3osKsaTTnCpr3UMSIVbV6DYKc,2039 +torch/include/ATen/ops/upsample_linear1d_backward_meta.h,sha256=n8x9ispJcc0-rPG-F4CYMfstKPlM7rnnco-676YY0CU,723 +torch/include/ATen/ops/upsample_linear1d_backward_meta_dispatch.h,sha256=JuguFDfbeHlk20uOPCCdXFa-c169W_ZvDrvDX_SLT8s,2039 +torch/include/ATen/ops/upsample_linear1d_backward_native.h,sha256=qUIk9sHVklsTJ333NlSKhNZtkBHX70cSuT7pYWAI8j0,1116 +torch/include/ATen/ops/upsample_linear1d_backward_ops.h,sha256=UUVzjdfEp4K6o5Ai57UNP_V5NeSkXbs2ZjozLTsQE_8,2578 +torch/include/ATen/ops/upsample_linear1d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=ldXj03z_Lvg1T3rK5cBMc1WM3gEWUxo_NfnjAke0cPU,1069 +torch/include/ATen/ops/upsample_linear1d_compositeimplicitautograd_dispatch.h,sha256=mFBdFYKiYCmIJGqVnRADGKGWccZXMklR2H5HYXLA68o,1072 +torch/include/ATen/ops/upsample_linear1d_cpu_dispatch.h,sha256=XzNVVySpBiS-jhNaphptKmSpSm3vxM4ES9ZSskfJGnk,1733 +torch/include/ATen/ops/upsample_linear1d_cuda_dispatch.h,sha256=4piemFWnKDhOoWu0sQVxWppUOrnlTlgkd_585XyJBwo,1735 +torch/include/ATen/ops/upsample_linear1d_meta.h,sha256=QZPEcI9kmsNhOHP3D_39nppa08U9mv3aHQuOTS-OQ84,673 +torch/include/ATen/ops/upsample_linear1d_meta_dispatch.h,sha256=4b3uIHRhB0EB87OArZvcvNe80vEnL1BPBYTHaXrQPEM,1735 +torch/include/ATen/ops/upsample_linear1d_native.h,sha256=0mYL-q3uBhsnOAORx2xirNAK-67cO9uKzPHt9NjcnEc,1151 +torch/include/ATen/ops/upsample_linear1d_ops.h,sha256=U8b_LCB5hODzYVNiqZ21DrCKnyxdXxO0RonCHm2TCio,3172 +torch/include/ATen/ops/upsample_nearest1d.h,sha256=Pfa74Czue4Iip1f2D-VMTWG1huaS_6z_cACvVpV3zJM,6395 +torch/include/ATen/ops/upsample_nearest1d_backward.h,sha256=5nIV_PeHwKzwGQbNDve__o5pcHV-leqxrKJLJ2Q0TE8,6311 +torch/include/ATen/ops/upsample_nearest1d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=s-pRUoHc8xUYGb8H_MeR1HkdAP6uqtFhkHRFfjdKPQk,1123 +torch/include/ATen/ops/upsample_nearest1d_backward_cpu_dispatch.h,sha256=rcuOLcDVt4l1PlAuuPnwmIDqecFMldeR0Y07TGmzwZg,1923 +torch/include/ATen/ops/upsample_nearest1d_backward_cuda_dispatch.h,sha256=n5aO9rZXJUvQoaZ8TXy2kckNfL_ykBQl6ddumlzr4Qo,1925 +torch/include/ATen/ops/upsample_nearest1d_backward_meta.h,sha256=huV0_5LSY--wKL0gNdpWR6jGZqsbs1wxtJbpo7HfzXc,704 +torch/include/ATen/ops/upsample_nearest1d_backward_meta_dispatch.h,sha256=3OcEZL3AShPG2tPk5aQVRYRVxgf6wy8ab-yInI11jiU,1925 +torch/include/ATen/ops/upsample_nearest1d_backward_native.h,sha256=33UIcU2O4tGS5OGy2rMpDh71yBJe_4FLrmk8yrbh2aU,1081 +torch/include/ATen/ops/upsample_nearest1d_backward_ops.h,sha256=GIP2cqaplhjd_JHz1AM37fjAC2Pfb4Cqo4mZPhB5f-4,2452 +torch/include/ATen/ops/upsample_nearest1d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=5eTJNoafaSlEvq3-HhWKGxSZ_wY3I2NgIXOCzS3yhNk,1031 +torch/include/ATen/ops/upsample_nearest1d_compositeimplicitautograd_dispatch.h,sha256=d2N5Bb8I8-P_G98xsTdl_KuH1xB0ka2hn_AXz_8EHaM,1034 +torch/include/ATen/ops/upsample_nearest1d_cpu_dispatch.h,sha256=obvt6jmEw3Nn14uunyrtxKEzunmH-zTgu9n2FVCaOKw,1619 +torch/include/ATen/ops/upsample_nearest1d_cuda_dispatch.h,sha256=NX95MxEoeZp4avVVxhlrc3G0OMxQWdCeKkm2PDBWDe8,1621 +torch/include/ATen/ops/upsample_nearest1d_meta.h,sha256=gut5DLrovQk_wyb3S8wEkksh35e8YSV2T-85miBmZvs,654 +torch/include/ATen/ops/upsample_nearest1d_meta_dispatch.h,sha256=lsi0knwK6de39Zo4sRDFbKIWLDIfPV0wYzs-Xz0yCRI,1621 +torch/include/ATen/ops/upsample_nearest1d_native.h,sha256=nACWY2q3hQXNesxUIwqV_PoQT5WS0eHC-ADfEDdmq7g,1097 +torch/include/ATen/ops/upsample_nearest1d_ops.h,sha256=z8_nMAn9KoBgDDHGnNDEt8KFJU1sqMc0rtt97OfiOvo,2983 +torch/include/ATen/ops/upsample_nearest2d.h,sha256=9WpBNIGG-dSp9JQpyDBkUbO_pRVErZno5ka0SacAKiM,7235 +torch/include/ATen/ops/upsample_nearest2d_backward.h,sha256=cgRgJJ4ngB7Xjo0ims8stF-y8h7K-lr9K_ZeVw0SnfI,7151 +torch/include/ATen/ops/upsample_nearest2d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=NwLpCZftg1BnIfMriZrvKHBk4UvsFUZ2G7p9DYRFWz8,1225 +torch/include/ATen/ops/upsample_nearest2d_backward_cpu_dispatch.h,sha256=btGri4XhpOAWxu6kY_oi_gUCwV3NRq_NPk6B6pSoCd0,2199 +torch/include/ATen/ops/upsample_nearest2d_backward_cuda_dispatch.h,sha256=71XLubnhtGPaSpERBFQ8LyJ0NR8Rb_meCyQ2vwuYAks,2201 +torch/include/ATen/ops/upsample_nearest2d_backward_meta.h,sha256=6yIOs4mYA9YxQF0Wc6T16XZ2BaF6yFEPJIipP_yc4Bo,740 +torch/include/ATen/ops/upsample_nearest2d_backward_meta_dispatch.h,sha256=pOHZFwOnp67owY2H-pl5XZGPo8-hBFkFwmwUkYK4XKs,2201 +torch/include/ATen/ops/upsample_nearest2d_backward_native.h,sha256=LaDRgFf54_W_VoSru_3gYkfg-BYCqIwAQItdsLWHsZY,1153 +torch/include/ATen/ops/upsample_nearest2d_backward_ops.h,sha256=PeZYbIWISYqkVtzlswwY6luwONMn6pbMSPM1BYvumX4,2694 +torch/include/ATen/ops/upsample_nearest2d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=UN-W3fIZgZkERLOYAkz60dy294Qbebb9amPJjItFdLE,1133 +torch/include/ATen/ops/upsample_nearest2d_compositeimplicitautograd_dispatch.h,sha256=0hF5chYaqZXcFMYuddnCNMmOt5d9Ys2TgjqTEhFO2Dc,1034 +torch/include/ATen/ops/upsample_nearest2d_cpu_dispatch.h,sha256=V8dOhP9aRQDUvMxvSY-hy9n75nFbOi47CqSJcFD0qTo,1895 +torch/include/ATen/ops/upsample_nearest2d_cuda_dispatch.h,sha256=i5KAUhDWBU2l1RQru4g7jdaReu5iVyy4wjyjdhg5dIg,1897 +torch/include/ATen/ops/upsample_nearest2d_meta.h,sha256=woxiZ0KLpmWpTrTar4V449J1-6hnO1Gs0VvEHFVKZP4,690 +torch/include/ATen/ops/upsample_nearest2d_meta_dispatch.h,sha256=vhWAkwKvwaDRAbYbzxElgdNI5st7oa0wZfp2Ooro48I,1897 +torch/include/ATen/ops/upsample_nearest2d_native.h,sha256=wAb50pqqcxBeay47njcPT6RkRHPNb53K3Z-5uSpCvQA,1376 +torch/include/ATen/ops/upsample_nearest2d_ops.h,sha256=j6Ua23SSrSAPBIoF3bo3kxXOBBdOhRRC0c9XS5eDUao,3225 +torch/include/ATen/ops/upsample_nearest3d.h,sha256=FeiAYca31WTYmBxj2HiiujlwZ3P-UosLCU73hMljiPg,8015 +torch/include/ATen/ops/upsample_nearest3d_backward.h,sha256=LrUPHZUOw3IZ7YmbsaNN-5DEgo0cMryYS_MPvXBDgMo,7931 +torch/include/ATen/ops/upsample_nearest3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=hMvi1awyh9z83k4OCf7-p-3j-Djs5WCXyUv4oMiUm0Q,1323 +torch/include/ATen/ops/upsample_nearest3d_backward_cpu_dispatch.h,sha256=-ldbWLjNrybJ1k4Jc8QDcqBHuPpYlmNwY-6TAWIjsx8,2463 +torch/include/ATen/ops/upsample_nearest3d_backward_cuda_dispatch.h,sha256=k_UHW8upbibFPU76qT-R1QkOUHum1okqh2kU1L0BroI,2465 +torch/include/ATen/ops/upsample_nearest3d_backward_meta.h,sha256=9BY4lcyEYbDBCandraUhYrZZgPLdNJcxEQtYAXvBmTU,774 +torch/include/ATen/ops/upsample_nearest3d_backward_meta_dispatch.h,sha256=WnRqi9cqQQjICxebVjRkvE6ynVZN8UKqxkLY9WUm3fE,2465 +torch/include/ATen/ops/upsample_nearest3d_backward_native.h,sha256=v8D9_b6e_nUVI83vFa8SKnUjdkXoDpcuDsywWfB030I,1221 +torch/include/ATen/ops/upsample_nearest3d_backward_ops.h,sha256=HK0uls3ErUndy4xgZJANARuZLbRxPMtKykce7q_rTmU,2924 +torch/include/ATen/ops/upsample_nearest3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Trg3MzxmeilGnwurNoe6uQ5cG_i7Xs45onWAAYeSTFM,1231 +torch/include/ATen/ops/upsample_nearest3d_compositeimplicitautograd_dispatch.h,sha256=uI3lvYRWSyTrVZY5Q6Kh-81MwyljbfumqjsAcsbnMkg,1034 +torch/include/ATen/ops/upsample_nearest3d_cpu_dispatch.h,sha256=9wh0zXvMzsLIZsLi1PedzxxnpG8KA7TqwVAiO2WOkgM,2159 +torch/include/ATen/ops/upsample_nearest3d_cuda_dispatch.h,sha256=Y7Zvb0KQqlmuIOzUAh-0Zxt6NMocHXmi_zDvIruqQmU,2161 +torch/include/ATen/ops/upsample_nearest3d_meta.h,sha256=vw5xB4B4aQzwNIavszJmGPY--DIPnJ9JH1Tnfp3oARM,724 +torch/include/ATen/ops/upsample_nearest3d_meta_dispatch.h,sha256=L-_qcQcb6BVSpZ52kkzRHRYHN7rlowK1aXkLmtAOGvU,2161 +torch/include/ATen/ops/upsample_nearest3d_native.h,sha256=b9lwvl5k-iGh3Y0v9vj8POAXilNkCmxCWT7FRTGk-fg,1493 +torch/include/ATen/ops/upsample_nearest3d_ops.h,sha256=kT7ZoGZT0sZgo9UpcxK-HgjX1icUyMMSGFQiu4VGzNM,3455 +torch/include/ATen/ops/upsample_trilinear3d.h,sha256=-o9fplsPuRNIr0nYY4kbDAHiF3-z3J37Edzlqo8igts,8817 +torch/include/ATen/ops/upsample_trilinear3d_backward.h,sha256=7QCiXwSHHSP81ItgRgRkvHedhOlTafiobbNlrh0dV64,8533 +torch/include/ATen/ops/upsample_trilinear3d_backward_compositeexplicitautogradnonfunctional_dispatch.h,sha256=GylhoeNOR_ATIoTQHIbmC3fQBX8v3x6mPxQ6Fpp1H4o,1367 +torch/include/ATen/ops/upsample_trilinear3d_backward_cpu_dispatch.h,sha256=sCVMYrmjLGQtckZYcaMXHhSPMLVouFkHI-ozMPnj46c,2595 +torch/include/ATen/ops/upsample_trilinear3d_backward_cuda_dispatch.h,sha256=wjd4BwR-kG4xWczEgborR5PbgTeEKHcj5OrW9ncS8YA,2597 +torch/include/ATen/ops/upsample_trilinear3d_backward_meta.h,sha256=iK6CF26EX0HoRXNQOihjEwQIbyAwz1_KTgqe8hdBt1Q,796 +torch/include/ATen/ops/upsample_trilinear3d_backward_meta_dispatch.h,sha256=ovD4GYR8T69fZx34hqHFQjxjV_euJUR--qOFE0zgTSo,2597 +torch/include/ATen/ops/upsample_trilinear3d_backward_native.h,sha256=VsxdpskwUIIZvQQPkWfcLZA5RfOcS6GUP3OmqY0O5bk,1271 +torch/include/ATen/ops/upsample_trilinear3d_backward_ops.h,sha256=w76wUFwwY1_9KyQHpV6cT0LEqH_adPpNHNjZrJ0r6oI,3068 +torch/include/ATen/ops/upsample_trilinear3d_compositeexplicitautogradnonfunctional_dispatch.h,sha256=AXhNNoqmwIozjvaOwSVvgayfPGE4wUco_71qNI-7tmQ,1275 +torch/include/ATen/ops/upsample_trilinear3d_compositeimplicitautograd_dispatch.h,sha256=1l-eOiL_SHd8EPKL5gvp8gBQzX2noQ_ShZAt882OiHE,1078 +torch/include/ATen/ops/upsample_trilinear3d_cpu_dispatch.h,sha256=RyeObxZmEu5b-SCZYdYr9rVcf66M9oJI_AHXk7ZDATA,2291 +torch/include/ATen/ops/upsample_trilinear3d_cuda_dispatch.h,sha256=9BllU3KVNb93Er7nod_74lwBWy167RlQkCGa_3191rg,2293 +torch/include/ATen/ops/upsample_trilinear3d_meta.h,sha256=LTud7t1xpj3ZSgRBvZrksLlp_s8g_7jsDmarAdtDjEQ,746 +torch/include/ATen/ops/upsample_trilinear3d_meta_dispatch.h,sha256=MvLV90YqyEdSt3U2o8M3WrftG-j9DzszYlIwC025fQc,2293 +torch/include/ATen/ops/upsample_trilinear3d_native.h,sha256=x0LOFAq8b43lN7htlnwoP4Ejbr300Cy3_Zom3-7d9jU,1309 +torch/include/ATen/ops/upsample_trilinear3d_ops.h,sha256=yvw64Gzj6lawRrNcD34AiN5YU85hS7tNNjFpgDsU2bY,3671 +torch/include/ATen/ops/value_selecting_reduction_backward.h,sha256=4tWgU3NOrYz1DtLODOKZHntZXlSqaCWrEjxWwauJrWM,2068 +torch/include/ATen/ops/value_selecting_reduction_backward_compositeimplicitautograd_dispatch.h,sha256=h_iR2Hdmg98W58Uj3b3HlEhI95kZ4s8kNMS4pQoYh8I,1041 +torch/include/ATen/ops/value_selecting_reduction_backward_native.h,sha256=wLd8iRqgRDwCfVzC0Gdarzg2rvpBhLndFJiLX_FA9mM,593 +torch/include/ATen/ops/value_selecting_reduction_backward_ops.h,sha256=SNGrsqVgNvUQoz2_mwDThgWWArCv5l0sZ-0y6byXoXE,1326 +torch/include/ATen/ops/values.h,sha256=ZIBFslM5d4SuYG2RE8Rx7rYDbKcGZX9LOwcSR6Wow1E,480 +torch/include/ATen/ops/values_compositeexplicitautograd_dispatch.h,sha256=wZ0ZxzoPlFmOiTZ36yHmIFaloD1qC_tyx9UrccsHMVs,764 +torch/include/ATen/ops/values_copy.h,sha256=4TFUKcRZ95u3M-Zf9JvKkhzh5zU7keR1ewAK4AKutdA,1054 +torch/include/ATen/ops/values_copy_compositeexplicitautograd_dispatch.h,sha256=cvPG7K4CH6Ug0zbQbmP947Iybrob5Qvqzqhk1z7cYB0,877 +torch/include/ATen/ops/values_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=OzdOH5gU0sK_qg3oBFTI5y-eNVvxY0oXjmcTDopxckQ,795 +torch/include/ATen/ops/values_copy_native.h,sha256=h_NJDkXMh3DqXqfBedNJnNMElj6xxgonZiY6waT5XMM,564 +torch/include/ATen/ops/values_copy_ops.h,sha256=8OSb5wef5f0U1tYxdbElqI1Wh5zEHIh11qUhZs-TLLA,1620 +torch/include/ATen/ops/values_native.h,sha256=8IZRVzzqhvLWsZtrOgNR65MEjKAjUOorfM4lrxSuXr0,671 +torch/include/ATen/ops/values_ops.h,sha256=U5zFwtKHTf3o3nWlGjTD4oEWTEINPz8iN4OSdeBEdjU,973 +torch/include/ATen/ops/vander.h,sha256=_joxEYvSj6prdlhLZY_y5ma0RbEmTQi17YaNnMSfLMA,722 +torch/include/ATen/ops/vander_compositeimplicitautograd_dispatch.h,sha256=Z-aWH_ByvCtoTh5hk_voZuky3pRIfOxS9VkXuc91des,827 +torch/include/ATen/ops/vander_native.h,sha256=y1dkYRxcAe74EZ7R99w7nA64AnV8HvO-euDgpUR4rWI,539 +torch/include/ATen/ops/vander_ops.h,sha256=Jty4J54glS4_g0Wjr69CrX2z4RutrrU-ETouNpWqPK0,1116 +torch/include/ATen/ops/var.h,sha256=Bb4JCNt-Jx_m8sFdKrGYohPuDvZL52Inzx0ddF4ZPNw,4848 +torch/include/ATen/ops/var_compositeimplicitautograd_dispatch.h,sha256=PyGjy3MZqsC30g5szjPU6VVsRJ1FqJNBb6vw0RAEC_Q,2021 +torch/include/ATen/ops/var_cpu_dispatch.h,sha256=pe1Qx5675Ek8otsiaFj4nVn63p-UV2JZ6GRX09cZaVo,1213 +torch/include/ATen/ops/var_cuda_dispatch.h,sha256=rr-EnsRDkhAGBw5DiwoqnXG-wpq600aV2sV9Zgv4g6Q,1215 +torch/include/ATen/ops/var_mean.h,sha256=LRrA3TWzXY5HPTXTPgEJY15_Xtxf94Xk43wCyCmUijg,3194 +torch/include/ATen/ops/var_mean_compositeexplicitautograd_dispatch.h,sha256=M4MUfT9vFM1WIRXVhBlvT8IQO1fDd7wUI3NswLfredc,1183 +torch/include/ATen/ops/var_mean_compositeimplicitautograd_dispatch.h,sha256=X61iT1Er85cDdXUeiE5FXrp9ZXtCooC-XVTHvxjnX7s,1273 +torch/include/ATen/ops/var_mean_cpu_dispatch.h,sha256=17C14_x3iQ3XVsd2fk8HS92Db7wQr475SqbalZnuh7E,874 +torch/include/ATen/ops/var_mean_cuda_dispatch.h,sha256=RGu96484viNU8J1snzAplLUkyTG6tQV2uQ94crVrwOo,876 +torch/include/ATen/ops/var_mean_native.h,sha256=nhNCgd-Sn2xwglWPDLgN3O9lsYfoaoYK9gONOBUapq8,1437 +torch/include/ATen/ops/var_mean_ops.h,sha256=cVn7soRfLNn9P6V3Jk2HV8AdUt6K-Sia3-H8_BYK0Oo,5871 +torch/include/ATen/ops/var_native.h,sha256=RMO35w-zj2lQLNS0bXcnnr2g2hLMwXgs6c_aFSzcREI,1640 +torch/include/ATen/ops/var_ops.h,sha256=ux9E8mLzhipfD3HZg9Qd-3mq65n7viFGjtV2iVMvpas,7778 +torch/include/ATen/ops/vdot.h,sha256=gH1wfPdQ_nIH3Vxjb3ZEGbdJVcmY-g0Qiu-QnR6-RfA,1125 +torch/include/ATen/ops/vdot_compositeexplicitautograd_dispatch.h,sha256=ZKCcA9apEkSe06mF5ORCbyv0P6j5S4ENgVwzy7Mmwq8,915 +torch/include/ATen/ops/vdot_cpu_dispatch.h,sha256=tpTH2sLO0-KyP3Ngte3Yy9I0hEdi_7Se6lGJ6rBI3f0,744 +torch/include/ATen/ops/vdot_cuda_dispatch.h,sha256=fuGVsu5H9Fk6zo37k2ikCA95uNMcvMIS2Rtvo25xdoQ,746 +torch/include/ATen/ops/vdot_native.h,sha256=HniU1ovs5DlDKN921SDbPoFoBZjL8VgYlK2KIIm3luk,685 +torch/include/ATen/ops/vdot_ops.h,sha256=FWARoxR-gt9f8y3SJ6ZbBQvhT_0Aygx8zREe01v9yIk,1750 +torch/include/ATen/ops/view.h,sha256=yuUTMpLvDGVQojAZy8SnQTQpYBmSOCIsPyYo-VNHFr0,959 +torch/include/ATen/ops/view_as.h,sha256=9eWPWTSIPoJH75i4wyvQpvPtulxijThDLSuUbHgeRiQ,481 +torch/include/ATen/ops/view_as_complex.h,sha256=DNXQIhRjzCep4Ze6L47BTmJSRlEKHkWVhBtngi4FVP0,656 +torch/include/ATen/ops/view_as_complex_copy.h,sha256=H8ulyhfYy0ikxFmmyBjMFkRj8Mwpab3wm-TY4aTG5tM,1144 +torch/include/ATen/ops/view_as_complex_copy_compositeexplicitautograd_dispatch.h,sha256=wFiEAFvEymVv5H_kfmHPg_6N0z_3xANyBVMVNs2e0CQ,895 +torch/include/ATen/ops/view_as_complex_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=_m9Kh4YzfcJAioObWETYd7o-s91GBL-CDkm8sOLmKfE,804 +torch/include/ATen/ops/view_as_complex_copy_native.h,sha256=f30IYkt7QgedBAELPgT_9IhRlLz54ZseEuxnZp6xrQU,582 +torch/include/ATen/ops/view_as_complex_copy_ops.h,sha256=XH0GcpTr9laktL60-LnRBSBxI5ib3c6zFnlTlM2afck,1674 +torch/include/ATen/ops/view_as_complex_cpu_dispatch.h,sha256=NtJGQ60SXJxFx61GVRv4faG_4N-WyTrPfgR_TOP5fzE,729 +torch/include/ATen/ops/view_as_complex_cuda_dispatch.h,sha256=ev_1isqi35w5x5Ukf6x4YpMWJ0-YFslSiUenneor1Lw,731 +torch/include/ATen/ops/view_as_complex_meta_dispatch.h,sha256=6Y1J87LXDtZeqPkLWcGl_zcFhQ1sr9Tl0k1j_UUjwpE,731 +torch/include/ATen/ops/view_as_complex_native.h,sha256=IQPC9zZt4rcKDnPiuNbcFCtdh9N7SknVk56ZW7lNmew,485 +torch/include/ATen/ops/view_as_complex_ops.h,sha256=r6KqNYnGlFsuTaa5AJEGy_XwctitKbrEjTAtSesVSmE,1000 +torch/include/ATen/ops/view_as_compositeimplicitautograd_dispatch.h,sha256=LrQARFU__8RnqvMfrRF-JJSc_WK039Ki2U261eDyFR0,791 +torch/include/ATen/ops/view_as_native.h,sha256=GMmX4B7qQ5989f2KCi1Aq5CB5c5yd3xBLkVoTaF1ubM,503 +torch/include/ATen/ops/view_as_ops.h,sha256=IGFYQdbQJwlplybTosbWpgz5wxPhfhic-XmA_Ra-R3I,1062 +torch/include/ATen/ops/view_as_real.h,sha256=Fg_y6ZiYeP9iMIwH21YkHZP9uazEgQwwv7ubvKTwvHg,644 +torch/include/ATen/ops/view_as_real_copy.h,sha256=8IdFDw-q9dhNjoz9eslSJ_Fu3XJGdlQTlDBdErmFQcQ,1114 +torch/include/ATen/ops/view_as_real_copy_compositeexplicitautograd_dispatch.h,sha256=LUR2CsUwZd32YCTbRwoM-6X2M0hLlqSvF7Vbcquib9Q,889 +torch/include/ATen/ops/view_as_real_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=RmDY93B4glkGQ61neiw9Q9Nki7gmthSlTxzVTDdb0HQ,801 +torch/include/ATen/ops/view_as_real_copy_native.h,sha256=3Q0VjPYNwVWCgTSnt_X4u92ZHeNkhtGHgB2Az6ez7So,576 +torch/include/ATen/ops/view_as_real_copy_ops.h,sha256=JrracnourxRUCwgy36bENAwb27NJTKCQ7zJbPcxisNA,1656 +torch/include/ATen/ops/view_as_real_cpu_dispatch.h,sha256=Dql2XNSBOkJ4zVPAVB5WduYxEWIZU4edsVuXRNFRqx8,726 +torch/include/ATen/ops/view_as_real_cuda_dispatch.h,sha256=VPtRv93nAOjecTei4HbGhXQ2UY9jG_018FH0Cwj92-I,728 +torch/include/ATen/ops/view_as_real_meta_dispatch.h,sha256=JoEI_BGzdoNeHnKzGTjl2AsrjAlmDI_kP3YFNGINKgc,728 +torch/include/ATen/ops/view_as_real_native.h,sha256=TX81CaHs9uejG__7z2_D4zPNK2uV6gNEFttmvV5rIzI,482 +torch/include/ATen/ops/view_as_real_ops.h,sha256=BHFyNqooHckpQJivtAdvqpn5JbjpakxoGBaYjGDQo6k,991 +torch/include/ATen/ops/view_compositeexplicitautograd_dispatch.h,sha256=Cpp23JscpJ7BQ2Vc8z46TsDx21aK0HVtrwbzM_FQ-xc,784 +torch/include/ATen/ops/view_copy.h,sha256=Syirxr7KvESu6c7MqpSmeTL7jqGEIQSeDcTmdBCJYo0,4320 +torch/include/ATen/ops/view_copy_compositeexplicitautograd_dispatch.h,sha256=6CRxBiPOyx_-nd4Ou6BZy3XKqtigTqAb9yBYBOTzg6k,1353 +torch/include/ATen/ops/view_copy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=Ym30vRhIo5vs_6E8hkcAEgstMdKTE1N_Zsyzl44hY-w,984 +torch/include/ATen/ops/view_copy_native.h,sha256=DpOWfbxeSY_9MDBfGoOU57f8IUFk3Hi02KGDeB22Uo4,820 +torch/include/ATen/ops/view_copy_ops.h,sha256=Yn-W028rOd8gjl_91xRo5ShqFzlWW4albgbf6sRAbS4,3149 +torch/include/ATen/ops/view_cpu_dispatch.h,sha256=zBKOO38ZDoGx8Oy7Lolpl5pQUJwI56GbjUv1Cef0ppE,825 +torch/include/ATen/ops/view_cuda_dispatch.h,sha256=9ryNyKP12NGA6cRxJ5yIq0n337Xhhr6R-PsvgZh-_Qw,827 +torch/include/ATen/ops/view_meta_dispatch.h,sha256=57oTuaCVEuzW_Nl4WQsIqFsm2TCKCmZVDgkhyh9JcYc,827 +torch/include/ATen/ops/view_native.h,sha256=T0ljnhMmy8qx6O2M8_-d1eK1IXwkhIXdrQ3lhd5QZJc,738 +torch/include/ATen/ops/view_ops.h,sha256=TxHgxUp81s03TGJqH_0dLQ522ZnX3gCj0j5CK8FXoBE,1683 +torch/include/ATen/ops/vsplit.h,sha256=UW9hXRVgtoAaUB_GpnN7ejJRFJqwxuQ18_v2J7x8z84,916 +torch/include/ATen/ops/vsplit_compositeimplicitautograd_dispatch.h,sha256=pvbVUDBJs2GmMP05mUqLH55iA3PmkkO5Sf14iDAOiw4,891 +torch/include/ATen/ops/vsplit_native.h,sha256=1nCtTpOw8Jk8fdV_HXHICR3ri_7XxFNbLo4CW5wkqzI,603 +torch/include/ATen/ops/vsplit_ops.h,sha256=2ovJ3WrnRm4tPROybiNxboFKk2rV_bNixjbCqQDkxvI,1785 +torch/include/ATen/ops/vstack.h,sha256=LpaFcpKYsKyP-_btpsqMyAmvINcBFxaSToaZ6wFyb_8,1025 +torch/include/ATen/ops/vstack_compositeimplicitautograd_dispatch.h,sha256=v0mm6a8MrJYi8e0ULYKziiEdQUdv2P7kvmgo6cFoNaI,918 +torch/include/ATen/ops/vstack_native.h,sha256=Ka91xnh6Hgbt-RzmQTseVuJNDSLYCq3kmRhvXerPyvs,552 +torch/include/ATen/ops/vstack_ops.h,sha256=xnK4IFZ61C4cOkWSknncTuWd9z0XQE95_Y8_8QCVPBE,1588 +torch/include/ATen/ops/where.h,sha256=k0-hgOJLUZ2TjKBwFlQwtVCtfiN5PW8NkQ1E2Z36qJQ,2280 +torch/include/ATen/ops/where_compositeimplicitautograd_dispatch.h,sha256=9c2jv_00MuqzYigsZrwDot2RsBmy_YKMVBIBCF_8OJU,1110 +torch/include/ATen/ops/where_cpu_dispatch.h,sha256=UyT7I7dXC067B8Qihp-mnUviKnIOIGmkN8HxLhkqtOQ,1042 +torch/include/ATen/ops/where_cuda_dispatch.h,sha256=3VoIl5JyvqVZAbuhq2D5dUGQ6287K2I5DHblvSByHqc,1044 +torch/include/ATen/ops/where_native.h,sha256=QJJvPr6LE-gwfcc4WGmn47RPeXnbg-d9QFWFNJzt2yE,1191 +torch/include/ATen/ops/where_ops.h,sha256=rz0uU3wCfROL-Qs24dm4JtI4ue__5usEFr0NO2bj5_M,4802 +torch/include/ATen/ops/xlogy.h,sha256=VAsRFk8PSGMtfRq13FWyXWSCd2KsESrv5dlNyNIpSHk,3041 +torch/include/ATen/ops/xlogy_compositeexplicitautograd_dispatch.h,sha256=aCndbKpFqyvidjXsftZOaQtqbVasfDgvH4TfNRNbqEA,1358 +torch/include/ATen/ops/xlogy_compositeexplicitautogradnonfunctional_dispatch.h,sha256=9469MOB2sbfC6ZX6-98d0VKBKAHUBeIijBe2x3X2YrA,891 +torch/include/ATen/ops/xlogy_cpu_dispatch.h,sha256=a_4jjJPvRKe8W58ffFF6X9jcc4Izlg3RiYDs6w9Kf_E,1028 +torch/include/ATen/ops/xlogy_cuda_dispatch.h,sha256=it3-VIwe08mGOYunpBmQqrR7u_WQDSdYsTKCXAz6NGY,1030 +torch/include/ATen/ops/xlogy_meta.h,sha256=VXYA_cEdKXJ3KEtzTzX2br-0_0p-HAzn0YrYtV1PjBY,607 +torch/include/ATen/ops/xlogy_meta_dispatch.h,sha256=1k5c4CPLwmdU2dMFGWZYNQf7JqHMG8L-Sp0OLZ6aTRE,1030 +torch/include/ATen/ops/xlogy_native.h,sha256=CIedCIvuUb1oZ2r1qdwp0I2OsAC1yhCqRg17ha3rg0s,1066 +torch/include/ATen/ops/xlogy_ops.h,sha256=fEtPUvfxnhSh9hWF6O58qtcX_eOjvMAuBDxOyHUrtvw,5868 +torch/include/ATen/ops/xor.h,sha256=Rc6Eq6zyQIBX2UsLe4CR3xZlBsKtRnsUYV4Iwby3OYQ,874 +torch/include/ATen/ops/xor_compositeimplicitautograd_dispatch.h,sha256=llZtzjLJvEQQuCrgVU0mvjT_ZGe8lEWekI2dEwYorgY,1028 +torch/include/ATen/ops/xor_native.h,sha256=UiYlWgWIGfEiCFt0PpAVg-00iHSvyi_51xqvgT00c84,740 +torch/include/ATen/ops/xor_ops.h,sha256=S3yBeGnvLlVRvcHTeM5xLAEQduRjaPaK4-lH65JYg1s,3000 +torch/include/ATen/ops/zero.h,sha256=GNheLAN_3nA8fjJYml0P805dc-4LoQbZpAA-5eSfEYo,1120 +torch/include/ATen/ops/zero_compositeexplicitautograd_dispatch.h,sha256=akQ1gLLIEeRjNWM6221-5HpQsE-FtekFlEkXLuNsMus,915 +torch/include/ATen/ops/zero_cpu_dispatch.h,sha256=ZrrM_UT_lg_XCh850CD4-HzzJ4joXroYO0MabtRXyuc,715 +torch/include/ATen/ops/zero_cuda_dispatch.h,sha256=s35FQV1MR_XSkax4X3GYTc6b7gKF4zRX_RXJCmloDcw,717 +torch/include/ATen/ops/zero_meta_dispatch.h,sha256=-QLfASKw9qhs_Yo9z5PlsI_3UwxT8qf6VS1JEWJelA0,717 +torch/include/ATen/ops/zero_native.h,sha256=N9kmBvDsXc1o2BO2RhtD42kNIXJRyMQpWiJh1XJkvSM,881 +torch/include/ATen/ops/zero_ops.h,sha256=-N_u-_aA5E2xKMDB8AqNJsk5jxzhVzjTef6jYClYy-g,2104 +torch/include/ATen/ops/zeros.h,sha256=IwnVdMRTs3ilLDX955Ri9Tq95p_DMZYnp5-xUgG9LH0,6899 +torch/include/ATen/ops/zeros_compositeexplicitautograd_dispatch.h,sha256=Kh6SRo2t9u5V6JaZozXgCjqVK4L66702z1-CgNvIILQ,2186 +torch/include/ATen/ops/zeros_like.h,sha256=eXXHmppBloxEdujEg2C61zhrAN37k3muBVnKSQZq7FE,2198 +torch/include/ATen/ops/zeros_like_compositeexplicitautograd_dispatch.h,sha256=o4-g6WoSh_8juhIpBqkDUINtroPdSMr0ByLElHU-XuE,1392 +torch/include/ATen/ops/zeros_like_compositeimplicitautogradnestedtensor_dispatch.h,sha256=CXh71BT8VgCxLoY2zwGi30vXT6vqlLnFCjnM6HiDLlQ,1138 +torch/include/ATen/ops/zeros_like_native.h,sha256=jgd086mEajSHwwN1zv9qsFeuoNlzwtQZJeZecLi11b4,832 +torch/include/ATen/ops/zeros_like_ops.h,sha256=wG_-yzvxdQZyg5mgon11pn1dKuqQ6P8zS-6oBdvtGlw,2444 +torch/include/ATen/ops/zeros_native.h,sha256=2gyEws-GRGXKl7DucDwBvXB2H-ORKXfRrYd3YIHAsEM,1162 +torch/include/ATen/ops/zeros_ops.h,sha256=LoSQIwQVjVWv_-s2MR2dZCCwxMB2NhpT7ezuCtxs1Dg,4015 +torch/include/ATen/quantized/QTensorImpl.h,sha256=O-XcKul65B9Ql8eeA4gXlJvz4IStdMkqTNzfefTReRU,4009 +torch/include/ATen/quantized/Quantizer.h,sha256=X6j4TQoXgD0l-o5rrLNOz7ilIjXleXf7sY_HazqgToA,9232 +torch/include/ATen/record_function.h,sha256=cDLYADampg-AIo-Zed-_YvO5llErqgD3_BjUMZenbxU,23049 +torch/include/ATen/xpu/CachingHostAllocator.h,sha256=dBqQYeBjML8PO-rVHE3JS4Q82Xcg2tuoGLpSVg-5L7g,540 +torch/include/ATen/xpu/PinnedMemoryAllocator.h,sha256=h8RnNKNiXVTOvOXKWEDyw-XTBR90nEAQw8Pg7g-jC_U,237 +torch/include/ATen/xpu/XPUContext.h,sha256=Eor_d5xBekYy1nwWkNa4MSo2aX6KY3QbODtvBONnAi4,458 +torch/include/ATen/xpu/XPUDevice.h,sha256=nosB3k5z5s6_y6MsUf7MkYmonuixvYGjWCACp-_sBp0,267 +torch/include/ATen/xpu/XPUEvent.h,sha256=VkcU569AYZEHHAgNMCpjn9gOnRJR4begIo2eAdAqxXw,5047 +torch/include/ATen/xpu/XPUGeneratorImpl.h,sha256=A5lm90JwyW5kzmTQnobw9PuFCvnUOmIo2WcFOm6N1wo,1232 +torch/include/ATen/xpu/detail/XPUHooks.h,sha256=ZQNT2RKLD5szA4Uh77Y7hpz77JMwMu8TtEkV0-ONs8M,997 +torch/include/THC/THCAtomics.cuh,sha256=sbRzB6GvRcYIN-1cCMaM0h-ig8M5tmqMmy-NfmojNfs,118 +torch/include/THC/THCDeviceUtils.cuh,sha256=hkOQh_VmHYP8Ir9HM5Q7oenfTLrTFygGFkGcnmcP06E,78 +torch/include/c10/core/Allocator.h,sha256=gzh4xw8V4ejIGNVpEgQC94nQtz4Y1FnRXdA4fF1u5IQ,10639 +torch/include/c10/core/AutogradState.h,sha256=COFnIXarRjxRmCxtEt3TVfdUQ1ETXydGBE233RhEme4,1591 +torch/include/c10/core/Backend.h,sha256=DxSxQsHgt_17BmGrjLsgZDl2lleHfch0ko3MyUhPGbQ,11262 +torch/include/c10/core/CPUAllocator.h,sha256=cUwX4Tp5hC1S62FypLhVGkisfbjQ-Wm27OjzNd2j2cc,1688 +torch/include/c10/core/CachingDeviceAllocator.h,sha256=fJADyEYGdqiLQq7VammxxHMa_0-wEtIgxvjWjTnIpLc,3706 +torch/include/c10/core/CompileTimeFunctionPointer.h,sha256=LE2HvZBcsZ_tRb-u43n7b5hnB9a4MUboRv5pJKCKmp8,1700 +torch/include/c10/core/ConstantSymNodeImpl.h,sha256=wFvqvVBLtfuyLQ2_unMj5tIChG4jVz6ZBBMCbp52Zzs,2990 +torch/include/c10/core/Contiguity.h,sha256=u-fZj8LBSecUWgDKcfVFQDT82Ww5uO7wd2kUpxiEwaI,3392 +torch/include/c10/core/CopyBytes.h,sha256=7W421Idnt23V7RkFT3AhQBS_F2JW2L9zQoERVg1IdCg,1343 +torch/include/c10/core/DefaultDtype.h,sha256=jA8dnRLdn4aC4enYeb1YiOXuQCzgQtQHFfHegFsmWtM,394 +torch/include/c10/core/DefaultTensorOptions.h,sha256=0u2cbxT6TMZBHA5_LOt6TJS1vEutjAFn8axX_MxzJfE,1064 +torch/include/c10/core/Device.h,sha256=jU_dtrNhrhMIcFLdLXQdQ-NDbMIVVfAVCbk5hZHcgFs,6895 +torch/include/c10/core/DeviceArray.h,sha256=H5OYVEtpnhGx6tN6BPJIG2RYV_HZAWWaDBdYDjWY-pE,688 +torch/include/c10/core/DeviceGuard.h,sha256=dgLyGQIpwtfpnKHoNl36t_GSb-PyhHplCbFyD9Yc5pw,7745 +torch/include/c10/core/DeviceType.h,sha256=gvyZ3F38yAUtcBx5cZcLO_h5iEuEVm6x2E1Q5Cgi2HU,4476 +torch/include/c10/core/DispatchKey.h,sha256=9Cce3ewTusrRwAlTq-iF7m-WWiXDoYn9fphnSWgTHKM,32503 +torch/include/c10/core/DispatchKeySet.h,sha256=XiVu3JB6uwnCEkoS4qShfOltWqAPypww8NvwkDY-yEc,40921 +torch/include/c10/core/DynamicCast.h,sha256=P6wetNebedxi0d99udXrB-Cc2l8drZocIqfzVoj4lbU,4444 +torch/include/c10/core/Event.h,sha256=OuaYD8wklfsRGQi9nZG6EvEHGsinpCXAYq1mvBztNdg,4463 +torch/include/c10/core/GeneratorImpl.h,sha256=Trunk1NHziOwZ-yZDhPcyFHUPGTEdbdj32pFEpYZfh8,3877 +torch/include/c10/core/GradMode.h,sha256=ojF7GFosLf9vySFUZqxMDP6r_iTIsSqx9Uu4Q5F_eqE,1253 +torch/include/c10/core/InferenceMode.h,sha256=x-F5kPhDJKZJw655mQSq2NYNNkjd0hgM7xyoaScMGks,3558 +torch/include/c10/core/Layout.h,sha256=ie5VbhR1SRHHegWXT-MxF4NGPwUeR7DZzunITlzLHfA,1943 +torch/include/c10/core/MemoryFormat.h,sha256=wsPYbHemnh_26P1-Yb-F_S4yIYT6kZL5q4mU06ocdJQ,9401 +torch/include/c10/core/OptionalRef.h,sha256=hWjNEDGGMt_kVVqsGhriRs1930q0J1b1fxbM3A8QK1g,521 +torch/include/c10/core/PyHandleCache.h,sha256=7Gb6mh98hla7kZlRVEE8jeVu_IRQsdfxx53cDzri64I,3101 +torch/include/c10/core/QEngine.h,sha256=-UoIngTGcXUXv2oe3LE5M0_n6WvrMj2dyeV1L0Okx6Y,1010 +torch/include/c10/core/QScheme.h,sha256=vcqI4auVfkZh-LwdTjdJmjUz_3T3jmdsiL2qyp_T68I,1566 +torch/include/c10/core/RefcountedDeleter.h,sha256=m9j4JCJeLF4GznXQRrf5Bpv8QDRsQmwmUgGiQYkF4m4,2223 +torch/include/c10/core/SafePyObject.h,sha256=PdFfijB5qZEsAjAw_4YX4CSet8U3FysVl4ZfH1venuU,3657 +torch/include/c10/core/Scalar.h,sha256=GMCJ_Mddn4Lp74j34yMjPQrHeGmFZA8jnEM7AccxTVc,14091 +torch/include/c10/core/ScalarType.h,sha256=HfmF9x9-KYaQ35xj9wrytgYCy2Hq8B8sfiZDViJtKVQ,23099 +torch/include/c10/core/ScalarTypeToTypeMeta.h,sha256=8yuBriniMRc2o5TcQrP2xZDE273uJXcf8vkhU7T6OsY,1357 +torch/include/c10/core/Storage.h,sha256=g9w0BHtZDCNkE_97nDUy3nK50EXlnJhV2yoecqqvE8s,7059 +torch/include/c10/core/StorageImpl.h,sha256=eCK8JW-dOn0no8k2grHB-G2kBRKvLH9qMkS7ophS0fg,9865 +torch/include/c10/core/Stream.h,sha256=ZsQOPW7woZ7o0Ea03DNnsjdID5-gTxaTOd6JAaDmSMY,6365 +torch/include/c10/core/StreamGuard.h,sha256=-qg_6FBfqDDetOFVYMR0B0zMgdnOoqJGrRkwUY1OSJU,6493 +torch/include/c10/core/SymBool.h,sha256=74eBqv3zRsVJEH72Rv8dcNO7zC7s6jW4ub_lDZtlgx0,2733 +torch/include/c10/core/SymFloat.h,sha256=a-_NL3pXDBgE3j1ZhDRc-vzDpABrUeaSxIdhcABi0vQ,3263 +torch/include/c10/core/SymInt.h,sha256=mO09U4BfURm1r2xM5RPvTZiqSlFpi0UycQXKsECnDA0,13921 +torch/include/c10/core/SymIntArrayRef.h,sha256=JU1wj8ZzsZRvR-jqiZm958FGtKnWBYChu4_TN_oo6MU,2714 +torch/include/c10/core/SymNodeImpl.h,sha256=LHJQacxcwc8szWU_G3FBHPTJBMBO4OaUetyPn3c43Yc,6611 +torch/include/c10/core/SymbolicShapeMeta.h,sha256=yLML_OXqErGzBV6Ifex6zM69FnQreFwa46tGYJPt5OM,7256 +torch/include/c10/core/TensorImpl.h,sha256=Fp6Ty2i6l7T9m8ILoWd-J7YXMY4D6l491xDNuBxBIFs,114883 +torch/include/c10/core/TensorOptions.h,sha256=SDOeWyucAx8ZYeLRCHkhwKueIOtF8WeMUGmln9AwMxU,27114 +torch/include/c10/core/UndefinedTensorImpl.h,sha256=CDmD4-Pb7c2M6d8yehcBuN06oOPS7C4p8x8c_JKWcrE,1221 +torch/include/c10/core/WrapDimMinimal.h,sha256=BZRtJfULHp6Dw2HlblnxQQ0xVfdwwxzaMKWqtbE7GnU,1360 +torch/include/c10/core/alignment.h,sha256=388Kxrz59CbzQ8Noj8oum3glnTyrbsyEvG8JGuXeDPU,564 +torch/include/c10/core/impl/COW.h,sha256=lFSX9LfHijJA-UYKSjUuqfO7Ve5-VDEKOZ59ysCeqK4,1057 +torch/include/c10/core/impl/COWDeleter.h,sha256=xZ1odJVNmSNpMoPs5yAFPus1ajeZ1MBcvITs6Aw9w-0,2098 +torch/include/c10/core/impl/DeviceGuardImplInterface.h,sha256=m1qJOwxsw-MOJ9_YF78Gmw41Zkp2MTD4n-za7rUnMJE,13260 +torch/include/c10/core/impl/FakeGuardImpl.h,sha256=GjV0OUGkF7o9CEszwkWGul2de6TQslAi8HEwHxWCwUY,3135 +torch/include/c10/core/impl/GPUTrace.h,sha256=NaGndfpzJjUYMSUkP30h08bUuRZioQBuymQL3SjpZjk,864 +torch/include/c10/core/impl/HermeticPyObjectTLS.h,sha256=BpG-7Cj-VNe1T7_VnnLycw5bdm8rzgluqZzozZcpCgo,2446 +torch/include/c10/core/impl/InlineDeviceGuard.h,sha256=mGG0eSslsWZIZMYoA-Bi_8AiwXMzolmBZ2LeElBvDLg,15701 +torch/include/c10/core/impl/InlineEvent.h,sha256=izfWT_YjBHYRSQJQqkc4gefd_3zqcowzwrsDS-6loro,3844 +torch/include/c10/core/impl/InlineStreamGuard.h,sha256=2BG9oq5rWJ0CXBLIeekZ7f-wf3NRwsQRsXtfF81D7aQ,9696 +torch/include/c10/core/impl/LocalDispatchKeySet.h,sha256=hgBEwKE35fIt7qS6Dhml2NHrxRSbkgO8DOOqX1kBAPQ,6255 +torch/include/c10/core/impl/PyInterpreter.h,sha256=B5hwgyO8Mu4Wly-b4Ua0lZzVi8-BAHUQZ8VnJb9fksM,11300 +torch/include/c10/core/impl/PyObjectSlot.h,sha256=boZSAkSGtLZQ51vP5NKxdng1iMkD-CO4M-DdAzd8LKg,8187 +torch/include/c10/core/impl/PythonDispatcherTLS.h,sha256=cEJvBpS47_ItVaRwOFG5Nz7jD4V2eB6wwZEM6d9sfbI,549 +torch/include/c10/core/impl/SizesAndStrides.h,sha256=R57cyvNQQqFbuWK-vWrXseBIa5M1sck5saOgGf60sLk,8378 +torch/include/c10/core/impl/TorchDispatchModeTLS.h,sha256=rzBcGog9-7cRtTtq-KhqyAHlHsWC_dTsaqq878IoU70,2281 +torch/include/c10/core/impl/VirtualGuardImpl.h,sha256=jFAaKKkPgN07Ojjpbe5nQl5khEaAf-KyULP-2d5D3Vs,3148 +torch/include/c10/core/impl/alloc_cpu.h,sha256=r726N5hIEezpRC-cdGuSJJ7FD1wtLvhKvVS8JoQns1A,178 +torch/include/c10/core/thread_pool.h,sha256=HU9cBi8jQVypSFnbrFaT4fhGeuCx-rE57mqsxDjjDao,2997 +torch/include/c10/cuda/CUDAAlgorithm.h,sha256=_osQlsXB3DJO07B4TJLXnMyjkHip-DJNSxvZCmF8EjE,1041 +torch/include/c10/cuda/CUDAAllocatorConfig.h,sha256=GVS9RcwlHxNCJGELsMc-3ISNv3PFE5DqA9J42_Qeh6Q,3821 +torch/include/c10/cuda/CUDACachingAllocator.h,sha256=5lI_OlxR9zaT0pDceqkBmKBjivCk0W16MHcRKLWXxq4,15970 +torch/include/c10/cuda/CUDADeviceAssertion.h,sha256=QmHUlOlnY6gfQchorxedjp8XFzO27tgksxwtow9Y4-8,4071 +torch/include/c10/cuda/CUDADeviceAssertionHost.h,sha256=5OF5BNqxcu5wk49Trs3fSsE_BoUwInXfF0xisAq5qL0,6617 +torch/include/c10/cuda/CUDAException.h,sha256=8XF-UL75Mw_JrCNvDlRdgaiARcWyktn5nRFL_ueFH9E,4552 +torch/include/c10/cuda/CUDAFunctions.h,sha256=DU3ByFyb9U_4HUMXQx5v6gfTkJDxJGELEaKtzO6fg7o,3911 +torch/include/c10/cuda/CUDAGraphsC10Utils.h,sha256=DKh3OqW4oUBYUwLdJoPj-UfDGfKPY1JjeQo7L-NFta0,2677 +torch/include/c10/cuda/CUDAGuard.h,sha256=u6k8FP6SW034olo1uBcjvjrdrGOIt4K_-4XFNDA7dAk,11222 +torch/include/c10/cuda/CUDAMacros.h,sha256=XGYaw_5z8tkveMtcSCXBKNMEtJ34j16LKiMVDv9Qr0k,1479 +torch/include/c10/cuda/CUDAMathCompat.h,sha256=zXP8jldgwXmP3I7hnp5lrEEZGYa7troS74crEwun0t8,3546 +torch/include/c10/cuda/CUDAMiscFunctions.h,sha256=VYjGElQ6pE-FHwGjhLaf0KOQKpapciFPXmW5mt_xTWo,306 +torch/include/c10/cuda/CUDAStream.h,sha256=LJYQ0hpvjJurYtl-9lyQH110cQFhTbdEzqKUiJCRhME,9628 +torch/include/c10/cuda/driver_api.h,sha256=fNN4_yD2bnwEgQi47xs6sYugc9IEz771aH3cDqYrKvY,2426 +torch/include/c10/cuda/impl/CUDAGuardImpl.h,sha256=Bjyl1MN_nZb7Dz20IYz_Dw8Rlt1yWQnwXpldmd2uORg,8608 +torch/include/c10/cuda/impl/CUDATest.h,sha256=pmj2IDoGqxUBvCohXPrEkoKZn4ZEwgUDXAcN27a5bNg,114 +torch/include/c10/cuda/impl/cuda_cmake_macros.h,sha256=5SZTOQCUZPH9z5lM3IUfEXcEt6BdeTLUt-TeEEX0PPU,194 +torch/include/c10/macros/Export.h,sha256=hitlLtM1vpAv9sxrl9XoXFDrWEBoWP-ymiHT0rzGmKw,5802 +torch/include/c10/macros/Macros.h,sha256=54O9z_M6C9Bs2afPVxaRfRwBZ0olYZnt9H9rwzORquw,20877 +torch/include/c10/macros/cmake_macros.h,sha256=VHySeoBhgBgNh01VyiCoWmoW8nTLITV7m5G7MyYW4dU,437 +torch/include/c10/util/AbortHandler.h,sha256=tbM4KWBdYwbCq1ArehgWpkZwweDKNvG0aoKJb-dDVbU,2090 +torch/include/c10/util/AlignOf.h,sha256=P8ZzdW8CGo583mnAw7NsgcnRAHREfBUEzVgzvofGB7Y,4906 +torch/include/c10/util/ApproximateClock.h,sha256=Tsl6PkjeqJ9K4Ice8WL5A1pYmEw1Dm9RawHGDC0278M,3483 +torch/include/c10/util/Array.h,sha256=PUhHWAOldn51e4l5qRuFTqZJxQLpxS56lesi6OSJw1E,450 +torch/include/c10/util/ArrayRef.h,sha256=DLbru3ncFxqox7CwlzF5ZTzQLihiK4YFwXOxbISgh5M,10876 +torch/include/c10/util/BFloat16-inl.h,sha256=Pxn4YZ1PFm0oVyg9py4vhr2nKmH15OOnyNpS-ifb8mc,10994 +torch/include/c10/util/BFloat16-math.h,sha256=8FeAbNzFxso_OB4hmNA3hdRwrt6XmZU21F3sLjQYHcM,8121 +torch/include/c10/util/BFloat16.h,sha256=Sb9BFigBekmOgWxCWVCIM_p5IQ4pRdAQhnT_kSypdec,3402 +torch/include/c10/util/Backtrace.h,sha256=lZXha4pz5h8PHcz1EA6BBKOGwV5OGhPQmQgF7sPbJg8,797 +torch/include/c10/util/Bitset.h,sha256=4tgpGr0oTix8UiTPhem2njwOrzhJRECCb0bO_DLcDTg,3339 +torch/include/c10/util/C++17.h,sha256=OcQJQdGsY8qJWFhTM3C7nwX-yilMYe-IVOLUpeiS5vA,4027 +torch/include/c10/util/CallOnce.h,sha256=bi_enFQ_NBH6PFeBbQAHVTLcwHZZfQGIA5biLlwxBBA,1941 +torch/include/c10/util/ConstexprCrc.h,sha256=3X4JO7ClQRAEmuwuWFjk2WyEJq2XbG4uMOgsEIwU5Xk,6638 +torch/include/c10/util/DeadlockDetection.h,sha256=lqJDsGUL7U-lDSXJNTX45EskIG-dwOl0ZQEZXVzbhNc,1925 +torch/include/c10/util/Deprecated.h,sha256=tyNcwEnjhdHs3LbgKssdAQqInoDJXNhgcSg1z2sj3nc,3579 +torch/include/c10/util/DimVector.h,sha256=kQxjIMzErk6BLDVVpLVCrzimZaZr-WG8pBjDePV6Njs,444 +torch/include/c10/util/DynamicCounter.h,sha256=oawmDGGf_eC7GwTPrEUEMMr8HVmB2QOSY1Uz9EY2XL0,1304 +torch/include/c10/util/Exception.h,sha256=KPdD7dEG8fERBC08SvP-XyjPSaBCRDUuxF4CyWNU1RU,27305 +torch/include/c10/util/ExclusivelyOwned.h,sha256=qwRVoJSLwNgKSoGTTRUfaF7PA7l8TGwJSCM9qg3I2a0,4453 +torch/include/c10/util/ExclusivelyOwnedTensorTraits.h,sha256=ik-PJqouBR29T6XDuf0QGhthRXQ69xpg81fuU3-8h4Y,2194 +torch/include/c10/util/FbcodeMaps.h,sha256=8vAkfHp4na6an2L9cXNB45k1POA9sLEkv5QZDcC3eHY,728 +torch/include/c10/util/Flags.h,sha256=3IIt2P8VEqadqcYmHpGNHdGz3mTe9UeciwBkdvZQFXA,10054 +torch/include/c10/util/Float8_e4m3fn-inl.h,sha256=qNQjkWi7W7hTQtZ1RdGrccNW8hX3rWe0ymI2FD6Q1G4,8566 +torch/include/c10/util/Float8_e4m3fn.h,sha256=q92WBVDCBWEVd__fLEmlysB4jvv3q6U2osXt6Ef7lz0,8123 +torch/include/c10/util/Float8_e4m3fnuz-inl.h,sha256=cttH-JCo5d0lwcZw36F2JORYNf1mjnKUzma5t1Vgp84,8996 +torch/include/c10/util/Float8_e4m3fnuz.h,sha256=vbsDet8MfGF02FctTOv_EQmbhOG4uYQr-XFSAkmex1s,3814 +torch/include/c10/util/Float8_e5m2-inl.h,sha256=aCXYsCL6rLJy4qFlz_1kcmbKbBsNJMCL-lQmomOh7sY,8652 +torch/include/c10/util/Float8_e5m2.h,sha256=jzObT3uozffiPe0dJQHWjn1TT5FcHvZX_kusGczjUCI,4324 +torch/include/c10/util/Float8_e5m2fnuz-inl.h,sha256=lCpwXEIQ_-_yQHCOfu0yBGu_T4w-GHP5Dg61Qso1bfs,9225 +torch/include/c10/util/Float8_e5m2fnuz.h,sha256=P_yKQ4nOnWvr0c9mQqL3D36sDVaJrBsdd5FkMi7onPI,3843 +torch/include/c10/util/Float8_fnuz_cvt.h,sha256=9mj750VZ1H3zKWp1-5Y6ZkiRv5gI2YdzmVMsMwn7l74,1732 +torch/include/c10/util/FunctionRef.h,sha256=L2sIQ56zatHFPgwVDlVM0cA-KzBI4M6-Tv3M8G5WHSM,2296 +torch/include/c10/util/Gauge.h,sha256=JidqImsjtLLz1nprjNfTKqTx1fMM6DN_LwxnB93WOkw,1105 +torch/include/c10/util/Half-inl.h,sha256=2J-jM1hlLZ-Ixq6XrjiCbsSasVZGjrt5g7EmDs-6Ey8,10152 +torch/include/c10/util/Half.h,sha256=mkIoFFxW25Ti_VbZSumhkL5ydTKEFuTEqrO6pgy6xzI,20273 +torch/include/c10/util/IdWrapper.h,sha256=uYBOWQaBbtb9DDcerY46K4BfnM1bT3LLnIM6rae4iI8,2336 +torch/include/c10/util/Lazy.h,sha256=Q97mZlbfrcl7V8QLgBXmFAEUPCbomFq7OWX3_ju2TN8,2818 +torch/include/c10/util/LeftRight.h,sha256=qz8I2fjmp88vBb6qxq8zzOds57jutVAAle1MtY3G5qw,7072 +torch/include/c10/util/Load.h,sha256=onxMgIHtTe1Q8bbmT8tx7miGGdpBjTG9o8B-WF7s38w,904 +torch/include/c10/util/Logging.h,sha256=bItCV_v_uiRWl46RlMQMu77AbLsUs3YM_ie2BWYcbqM,14035 +torch/include/c10/util/MathConstants.h,sha256=gyZaVPY6yUN2V2c1pdAkjtXy6UpD51eDmDixsacR_uU,3690 +torch/include/c10/util/MaybeOwned.h,sha256=JG-HdRwRjypc-liDVqV3r0FHmCq11gy9pKx2droIzYM,7155 +torch/include/c10/util/Metaprogramming.h,sha256=RWrjybE-bjBO5gUsAUDbR1vGHYco9r8Sf2i-TG7BtUk,7031 +torch/include/c10/util/NetworkFlow.h,sha256=5j7cCqBWNfsZahNdyLHI6vlnO0H6eTWnVi7WkC7ebfs,1137 +torch/include/c10/util/Optional.h,sha256=RYAvXifXLGZepAN2UQjqgksziGyLZVXc_0_ScIHE99M,1685 +torch/include/c10/util/OptionalArrayRef.h,sha256=q3bwR6k7CgzIwdNa2zAh--5IDUaHIfuny1E0gK66ZRo,7104 +torch/include/c10/util/ParallelGuard.h,sha256=AClhBs6-_lFnvdUTtuDKKmNp1Rdf_CbLOo-BZCPJoUI,373 +torch/include/c10/util/Registry.h,sha256=0Ub5zzGxOT1OQwXoWvmgoz553g-o-L5VsV-W0mcvjvg,13158 +torch/include/c10/util/ScopeExit.h,sha256=7IV_xwvAerKLsjzHhU_TAFsjVlknfN22tOarz1mFjLc,1259 +torch/include/c10/util/SmallBuffer.h,sha256=8FhfpVXad5SXYTyeIvLrQMyf7xwkBK5lsOammWGbHRQ,1762 +torch/include/c10/util/SmallVector.h,sha256=JG4jg_ToqV_jptoaJzCyy1cmF889LVM4RMcnIYv1xeA,49012 +torch/include/c10/util/StringUtil.h,sha256=pKkK9JpTAkndsm0S1NJV_N4aI861Vtf6gkYgE1nHYns,5178 +torch/include/c10/util/Synchronized.h,sha256=RYgOiIrZVwnXmMHeoZrHpv6-t383r39dotD-VZnlK1A,1896 +torch/include/c10/util/ThreadLocal.h,sha256=wW74FnH_mjIDvOKvzr6HTvxsPLRBI8HwWMuPcDFGsHk,3883 +torch/include/c10/util/ThreadLocalDebugInfo.h,sha256=ILDUXGApD0wOXy7FuQMSVnX9N-bnIeBVHR-53tvjFKI,2548 +torch/include/c10/util/Type.h,sha256=-GVwlf-O6CsPhLC_t9_t84apjpiCvwY7LlXpfp51OkY,646 +torch/include/c10/util/TypeCast.h,sha256=iJL9sL_egy1toxDRX58ZjdtcBOlnpG8e_fvPfsYThpk,6182 +torch/include/c10/util/TypeIndex.h,sha256=WUOrtqNbqB5PPYHBP8hLiUDSw4Kg1Al_p1j77sV7oYE,6038 +torch/include/c10/util/TypeList.h,sha256=YA3psuJdiyHYbjPAsGYrJVRqVvtWMH7kgC25_jqufu8,16829 +torch/include/c10/util/TypeSafeSignMath.h,sha256=ItboL27IimgDESSU-BFR907wTGbgkO5UdOENC6jh94M,4363 +torch/include/c10/util/TypeTraits.h,sha256=-3g4qpe76E0OKxVK731TUYKxElzCCj99SoAMU-UebHg,5338 +torch/include/c10/util/Unicode.h,sha256=hCDwsMEcAI5h6nD8F3ZxJbO4M4lq1pCRpwj6h3H0DvE,295 +torch/include/c10/util/UniqueVoidPtr.h,sha256=HbNzhyCaqaksAka7MfDVbu4E6jipEMt_DgVNu2hoaN8,4184 +torch/include/c10/util/Unroll.h,sha256=1R1zTrF4Blvqu0FRtpofBLroPSSVujaMdh0E8GIBDbw,843 +torch/include/c10/util/WaitCounter.h,sha256=_fzV8rbmmaLQ8Pe72GDFRZtv_sZe9jyK22VestVOzko,2440 +torch/include/c10/util/accumulate.h,sha256=hobsJjXaKqmwAZOCXogE7eZ-NFiScJS52hvnVjwElFs,4032 +torch/include/c10/util/bit_cast.h,sha256=X6VwjMumLNAkNzRsOJd55pxLofcLpj8meuotGhMjecI,821 +torch/include/c10/util/bits.h,sha256=5vkKgffFbfdboQtD9E4fDsMkxeGlMymsJuRwS_ChH-k,1449 +torch/include/c10/util/complex.h,sha256=suAf3yKLZi_eo5Rg_acGac03lcMStr-_2L9HtYdKDRY,17983 +torch/include/c10/util/complex_math.h,sha256=K-lYAOgfqRUPZQQBExYkHuDpQtVplmaY85WlaO18wjk,12533 +torch/include/c10/util/complex_utils.h,sha256=647X9XNovef-gDB7_p-Q7TskQ-boGgW5MedmVJtp58s,1077 +torch/include/c10/util/copysign.h,sha256=EGcQC191aoUTlLXo8tKfAF9NFnMpQCE0hyWxw47FQZs,832 +torch/include/c10/util/env.h,sha256=LBlMbRbzB2oADvjzwxW-ZileJQT9T7aJlTWJI6nOtKQ,954 +torch/include/c10/util/flat_hash_map.h,sha256=oSaLrGZejJg2sE4vOS73g9wxznCzBzaTEOeUKPH2ctA,61931 +torch/include/c10/util/floating_point_utils.h,sha256=qEX-EkcPf74per7v8KHX4c1-NAWsAqjckImw5pNC33M,809 +torch/include/c10/util/generic_math.h,sha256=OrVoQ3KVBmyRAXztOZwncxz6hcomJDYKwndsJAYGRfs,2156 +torch/include/c10/util/hash.h,sha256=6yk_3pfqiXLyBfNhh9Hw14z2CN8RjmAPtaD4rHqh0KM,11107 +torch/include/c10/util/int128.h,sha256=6B-OapJ188dixe1U3B4PQPzxyR9iueKI1WrCau6Rjyw,12452 +torch/include/c10/util/intrusive_ptr.h,sha256=MH2BXHTP-x1rEzDF16ShZyp79MQzny-7IbAYSQE3Zyw,38527 +torch/include/c10/util/irange.h,sha256=PBKiXBnJTFP7Mpx1xZTa_jOX9027t6xF1tCc8MeRzLA,3352 +torch/include/c10/util/llvmMathExtras.h,sha256=x-xJE6ttLhUS3INBjLjXXjVwUgZWEx5qtUBIoSyXWxg,29493 +torch/include/c10/util/logging_is_google_glog.h,sha256=LU7_XPpQPV1CNYYOfKAnm9vRSTpUNFmRT81AqXJHtVQ,3794 +torch/include/c10/util/logging_is_not_google_glog.h,sha256=ES9qcimZf-7a5zq5P2jCJrmlWeZJyzZ7u34D5yMIawI,8681 +torch/include/c10/util/numa.h,sha256=Zv_RgPaUVPp3KN4FmvwaqIr-hZ_Es1bd_djbAIYGzsU,713 +torch/include/c10/util/order_preserving_flat_hash_map.h,sha256=H0kqajXqg-9Ez_FhP2U-C7ZKgP8mzAvSECi9AirffIg,65390 +torch/include/c10/util/overloaded.h,sha256=HroKnPbEpPcmOJ2ud_43qLAENSCdvN9q3F_2REQLhvM,727 +torch/include/c10/util/python_stub.h,sha256=Nigc7ZGrniF0qdLn-Ra4KIicNWTBrgARkJYpxs6Ssek,56 +torch/include/c10/util/qint32.h,sha256=iyBUhux_GEJ8J92GhOPnukFSHXEp_vCM_ImIZTR1_Gg,319 +torch/include/c10/util/qint8.h,sha256=bc5SknZDfO9_2Ez-QlgP6CVXkFhkFIc8pV6vqx4y4vo,472 +torch/include/c10/util/quint2x4.h,sha256=yGJ_ehKTUy8Iq54q6LGOQtBf2-Tdwm-6k9EzgLZnwr4,366 +torch/include/c10/util/quint4x2.h,sha256=XARtXHqCVQKflXpoKx4ybFS1E3Pdq9FafFAb9J8SUH4,366 +torch/include/c10/util/quint8.h,sha256=nEiBXDtefPX4IlYbmn_mxC7Ec9QaPZ-d0nIO1BbsGh0,320 +torch/include/c10/util/safe_numerics.h,sha256=QDH6C2kwmkewC8zw7pEBndXR2QvgsVMSBzCpvE9Pzow,2258 +torch/include/c10/util/signal_handler.h,sha256=cVZtjB4I9_Oy2ZinG-3BVa-8cMlDuflFqpPKWW0Y2AU,3309 +torch/include/c10/util/sparse_bitset.h,sha256=TdMRyhYIJtzDNRJQvMgTjY69YyytHZTRz9qJpNHxX4E,26645 +torch/include/c10/util/ssize.h,sha256=fRC0RaSU4YWJOWGpglcKzJbJoogdxtvQoSUKqW3Sc4I,1369 +torch/include/c10/util/static_tracepoint.h,sha256=u_jYYCJlvn-3eHJvxlGmnfwt99aG1mmVv4G65dS97bE,1076 +torch/include/c10/util/static_tracepoint_elfx86.h,sha256=_Nm-tHskm5pjhufoKXpjIEOnr2BAyhlndaI6DoZYt0o,7208 +torch/include/c10/util/strides.h,sha256=636o3IYdyG6Ms5qYbliIFWW2WrUEmSvdIYRHgWNrzzQ,630 +torch/include/c10/util/string_utils.h,sha256=9tNMsGkmJEL4Uh8tO4_01EImdBzACHsNIWPVGn4yN2o,378 +torch/include/c10/util/string_view.h,sha256=zCJx2FT-VZFQjuTTrNvoDs5BsuTV_9fhmLiyv6ArLDk,17062 +torch/include/c10/util/strong_type.h,sha256=3DZAVci9UflQRlaPgGlUQvg-G5Bin4WfC-Cu2Rnn9hc,35794 +torch/include/c10/util/tempfile.h,sha256=eCtZ_I5XJ2I-WUUWs0UoTFdgAhr9HKsVcmFEJsI340A,2760 +torch/include/c10/util/thread_name.h,sha256=ekaynOg56CtdQLnuzKzGOPlMiqOwI3BVsm61zjNYiTA,186 +torch/include/c10/util/typeid.h,sha256=a9WQ7tqJv2xvGRBXPWMH6N5M1U8xPOJCiH15rcCgYW0,23327 +torch/include/c10/util/win32-headers.h,sha256=Hwx6Heb2BvTTMDfWYL3yRoXsqMd9xdu1_LRXz2nMzws,858 +torch/include/c10/xpu/XPUCachingAllocator.h,sha256=_mt4kmmw3R0STSiGloRR9XCb5ER_d3oG12rCh0_OmBg,660 +torch/include/c10/xpu/XPUDeviceProp.h,sha256=g9nAlBTOHjTIJst9TlcbQvGGFZeI51LKH-LIdGQC_fg,12000 +torch/include/c10/xpu/XPUException.h,sha256=ZSAWqWf8on0ZSjP-MHInJhN8BAt595-T8Qch7QO5DVs,415 +torch/include/c10/xpu/XPUFunctions.h,sha256=WNspPSqWta_BYs6aVToWaPE8z5Zv37iV3DEsDuiT3ro,934 +torch/include/c10/xpu/XPUMacros.h,sha256=j3rgISZgHXKvaT-BCOVUdIs_icvCBY0nWc8LfMUMkEo,870 +torch/include/c10/xpu/XPUStream.h,sha256=MJp36_F2RyBm35-imDE9plh9l7fHLvunEqpkTUiBqP8,5833 +torch/include/c10/xpu/impl/XPUGuardImpl.h,sha256=lg2zcx62PB2M7-ezOmylggDHTpfwZ9W_3HgsgISdXjU,5412 +torch/include/caffe2/serialize/crc_alt.h,sha256=1S1Y-QOdR_AF6_ynNXP7skgrzBL4JA8knBpcUNniGFQ,75497 +torch/include/caffe2/serialize/file_adapter.h,sha256=cWooIxQt2IZhgtNh8Z1EMH-q6-ZRYt_Wm46bTZU2wl4,866 +torch/include/caffe2/serialize/in_memory_adapter.h,sha256=YvqxOKgl9o2TU79PqV29jh8jGxb91F1C_pdpuZd2S8k,644 +torch/include/caffe2/serialize/inline_container.h,sha256=Y001ThvgR77npui_9I7iRMdrdK1AJePbZiDDzHuR5_M,9835 +torch/include/caffe2/serialize/istream_adapter.h,sha256=jhPL2xpULlEAj8d6YhmRaSRRjRK6ZFkodgYX3vlPZSs,669 +torch/include/caffe2/serialize/read_adapter_interface.h,sha256=TrtqY4bnGW_iI8ZpIunrB0fbQhRq11g2Xic-AlE2ZPo,556 +torch/include/caffe2/serialize/versions.h,sha256=v2fi7vl7l_cGOVlPEjyW4GRFReQ5Kn4VTyJACt7_Euw,6648 +torch/include/clog.h,sha256=AaXz8upLdfJ-qFoEjRHcpp3j-ps8jNkBgStV3V19X9A,4900 +torch/include/cpuinfo.h,sha256=-Mj5ng0N6IpA79f5_p6flZAUapKkOq9V1hX7_9z__-E,53783 +torch/include/dnnl.h,sha256=I-QBhgaQhsKI9ws5dIWiC1mo_tzjMZTqcOEH2R_dquQ,826 +torch/include/dnnl_config.h,sha256=4gxE_VLSQRh1ZjNU0b813RVsIqLRf1g97y8E7yYt6SE,854 +torch/include/dnnl_debug.h,sha256=54dhupUgTYXVDXGNKkIHZa_CIs5rNdGaB4jzaKOmsV8,850 +torch/include/dnnl_ocl.h,sha256=zEhZqSx-AMUh-leUn1DUIKre4POhBRoclUX204x0Ynw,842 +torch/include/dnnl_sycl.h,sha256=hP8wUN2leTYsOSH0zxGhYCowrWL6y1qzEMJx82cMOLE,846 +torch/include/dnnl_sycl_types.h,sha256=wx0L7FacMUW1yLGkQdl633awv1WIiMyjPit8VR0MxRw,870 +torch/include/dnnl_threadpool.h,sha256=pXtNzHokkJu8cOj1YHKPswQ2lHyfnlWmahZ6xSFUONk,870 +torch/include/dnnl_types.h,sha256=mj-E92o3tq2jYmf4m5ApwxF5ZUpOHSW2Tcx1Ye2-CXE,850 +torch/include/dnnl_version.h,sha256=TMnzInMTsAACwnwp4ICpkZmiGXNhA4PHNkrHME_E8_8,858 +torch/include/experiments-config.h,sha256=wokuhtIs0aUOFxBc3Hw0PRvqIs7o6KuNIAd07_HeuI0,471 +torch/include/fp16.h,sha256=E8HMsWXIGhISM6v9jeIa5K3ncDxV_ccTbShVn-bEmb8,141 +torch/include/fxdiv.h,sha256=fSkLf2DxcdPiQIF9s90UAZYYt_70xe8TWmU1DA3MAc4,13149 +torch/include/kineto/AbstractConfig.h,sha256=JGuVUmQz4cZ7kuItEVfQRQ4TSnbsS90WY4CnwzjWv74,3754 +torch/include/kineto/ActivityProfilerInterface.h,sha256=CMuGSYLTPFmU7tBMUHh9hACn4mAhIG3qBBmzzOL4Qps,3546 +torch/include/kineto/ActivityTraceInterface.h,sha256=Coq_Kwi86OF1LTZQRmfIuMrLIBNaHim726eYNAilhkU,584 +torch/include/kineto/ActivityType.h,sha256=1KIlnvX4wk42MvrOwL6ytcmlCMWhgMHKmftQVRXSOqo,2336 +torch/include/kineto/ClientInterface.h,sha256=srKIInxD8tnw9dVVj7FLdWXLvxa4ywy4-pyOI44XYu8,492 +torch/include/kineto/Config.h,sha256=NF4dJC6-8Kyu0oa_khITLmHtVbvZHBkcqukh3jigOdE,14824 +torch/include/kineto/GenericTraceActivity.h,sha256=VMg1IcQPMnV3m2zBQWMBJk-cjuq69zu9IgNFdEiN7vA,3757 +torch/include/kineto/IActivityProfiler.h,sha256=m0yTm4cODS-iHea1vmUA4h8epIaUgWynb4PTq3zzj5c,4966 +torch/include/kineto/ILoggerObserver.h,sha256=Mol7zVGUDxcsIhpXtzt6NWi-uBALA8RYkXzsIukGH2E,1641 +torch/include/kineto/ITraceActivity.h,sha256=irKZv99K2tkWmXBigU4m_jpYFWFgrWe9eKn6zuKJoKE,2044 +torch/include/kineto/LoggingAPI.h,sha256=pvS_9Xif-roEL6p49FFK_2FkLAOx1zJOCzP2ctMzHOg,350 +torch/include/kineto/ThreadUtil.h,sha256=lYKipwt_bScFF1GrF7SaVAQdoAi-S5qZGmqv1NnhwMU,702 +torch/include/kineto/TraceSpan.h,sha256=s7ACz6pbPUw3qrO2FWCb0vpZSw5FRKf7euDjrtUSiAk,996 +torch/include/kineto/libkineto.h,sha256=haFj6Fkb-w5SLt6BUuU8M6U3ShlloeUdrjJBLQBZcOY,3850 +torch/include/kineto/output_base.h,sha256=Qkac6IQU5_aK2qxjHpGgLY45UBBqy2sTVvrHVR3Be-M,2063 +torch/include/kineto/time_since_epoch.h,sha256=6iW7aWNh_vYlxI3YbrePjcGEUEh0kY6e4pTJ6Rc24Mc,529 +torch/include/libshm.h,sha256=6V9-_p0xRhavijNsGdB4oxPHUgjOUZjd2U00-3dd0fA,1196 +torch/include/nnpack.h,sha256=1msg-Qi_QPKPHQHBEDHVWvHj0ilZNjwVIUSEhhs7_es,33083 +torch/include/psimd.h,sha256=4z8bkysyKCCrjI4MhcqPUIsdV46VUcRfjK8vO3p5q14,45504 +torch/include/pthreadpool.h,sha256=r65lzOu4EN3iozcIdd9C-J7y5DUCo0MCCNe26YD0t0w,99328 +torch/include/pybind11/attr.h,sha256=QPjH7BfhL8QFwHHkrDak8gNOLMlb1itAO5fobjdoLp8,24334 +torch/include/pybind11/buffer_info.h,sha256=_FcQisqdpphfWXKeCGNv3Gq5ivy1z-qF3d1Noeteaok,7778 +torch/include/pybind11/cast.h,sha256=8gJ4Y4nc83dyq12CuU7ircAvAV1HoEZEVr0UyfeLQNA,71696 +torch/include/pybind11/chrono.h,sha256=A23naeloqn-1NKVAABOsJtHU9Vz8lfvrAICuLk-7qBM,8458 +torch/include/pybind11/common.h,sha256=ATg9Bt1pwF8qnNuI086fprM4CUTdrZdk_g2HXE1Sf6A,120 +torch/include/pybind11/complex.h,sha256=AaDZ-rEmK4tFaue-K9P5y3TxxnaQF6JwZ_6LAzkdLQI,2096 +torch/include/pybind11/detail/class.h,sha256=KR-30QA8Eime1Cwe6JtYPV6Dd5J1j0dXCuVkxGySBLo,28686 +torch/include/pybind11/detail/common.h,sha256=OnGRKae9W73KW71ZCsKsCMHMGJrBD-gKcIUFo43YdH4,54708 +torch/include/pybind11/detail/descr.h,sha256=D63pIHsF3luO_g51CjbJU8Wl9VOihciEXQhXvfRg-Rk,6035 +torch/include/pybind11/detail/init.h,sha256=Sb1UkPecC5l9xj5naYLdUM7qIRLVpe614H9Frvyg8xg,17983 +torch/include/pybind11/detail/internals.h,sha256=ajHKg-dYH8L9VjyNs5d5BS5SlmnbOM8_5hCDJA9TgbM,31992 +torch/include/pybind11/detail/type_caster_base.h,sha256=8ONT9BowOAesZcbtr199LLaXPpUdUrvME7uFiTEYs9g,47550 +torch/include/pybind11/detail/typeid.h,sha256=jw5pr9m72vkDsloT8vxl9wj17VJGcEdXDyziBlt89Js,1625 +torch/include/pybind11/detail/value_and_holder.h,sha256=hwNYlqxjUhlUqihwMjr6s3LhhKlZiTLaWREtQrgOAkQ,2814 +torch/include/pybind11/eigen.h,sha256=-HmSA1kgwCQ-GHUt7PHtTEc-vxqw9xARpF8PHWJip28,316 +torch/include/pybind11/eigen/common.h,sha256=dIeqmK7IzW5K4k2larPnA1A863rDp38U9YbNIwiIyYk,378 +torch/include/pybind11/eigen/matrix.h,sha256=VjCfx8M2AcD3m8THUbIEYidJyIClaNw9jMbd_Fzfo1s,32142 +torch/include/pybind11/eigen/tensor.h,sha256=csE3_N9yy-9k0SWQPJuAxmv8Jp_-lFrrPdVOyMV8-gc,18384 +torch/include/pybind11/embed.h,sha256=F3JQiOWnLGSuZ0NuEyBWFhHyVdczD8D_67kriU4QfsY,13362 +torch/include/pybind11/eval.h,sha256=7re-O2Eor1yD0Q_KgFkHIjKD17ejzII687Yszl9_KfE,4731 +torch/include/pybind11/functional.h,sha256=iOyYuNmbI-K3zgc1IMDwe4iHEOO3F8vwZbVSvbgxFQ4,5267 +torch/include/pybind11/gil.h,sha256=hsJj6z1iXqlo5c7fPCgEvK_-eeDoKZm7PKPwPNCdVVo,7702 +torch/include/pybind11/gil_safe_call_once.h,sha256=KKcy9Wgc_MJY-U5WpCZeNyzW7oVmC-d6yXkgephZ7zs,3993 +torch/include/pybind11/iostream.h,sha256=K5rPXoCYN325r1PptcJCIhPhgtRtTJQjMr7bvUIOwxk,8862 +torch/include/pybind11/numpy.h,sha256=xREhfycUTCOPF8CF-UWRdoLX0B23V6YWRiBqeRRElZg,84442 +torch/include/pybind11/operators.h,sha256=224RoAXcv1la4NNY9rQ3aD_AeC8S9ZKx3HVK1O8B4MU,9103 +torch/include/pybind11/options.h,sha256=qXvmnj--9fZSp56NYefnB3W5V17ppHlY1Srgo3DNBpw,2734 +torch/include/pybind11/pybind11.h,sha256=WgWz0DKN1WnQmeqM1c5JboUGt0blitYCxkP1oyFdq40,131928 +torch/include/pybind11/pytypes.h,sha256=BF8x4S5fsAzWf-d9pu83UsqjwRRo0ragHPy9sDOpUvk,99894 +torch/include/pybind11/stl.h,sha256=aMi1OCCw2Zb-IRLSlAtQEJJHtWsRJiLT9dKDMHST1Ic,15532 +torch/include/pybind11/stl_bind.h,sha256=B5t8E0A4Zdgm2sF0J8Q_UI2U5uqEBQ9TsJCelsJ4q0E,28495 +torch/include/pybind11/type_caster_pyobject_ptr.h,sha256=H7pKBYTvUlibiJQEcKmeAkygSQwoCkuIyukNSDmVq-U,1929 +torch/include/pybind11/typing.h,sha256=1PgwIQIvs-30-M4lfZ6OeFZ-oTPIrpZEI0Wyo-VBm4A,7188 +torch/include/qnnpack_func.h,sha256=kxuQHibZQi5M43Cvi-CVaYx3sB5V0w8IHvy8sTo8GtE,4146 +torch/include/sleef.h,sha256=zOBFkTEj-EcJhOegJKOnUz-6Fafl2hVCIZbWv7V_248,267778 +torch/include/tensorpipe/channel/basic/factory.h,sha256=3o2OYoXOWYEwtjoQ5X-EswIApRuljOdRWv-ojMhhx5Q,463 +torch/include/tensorpipe/channel/cma/factory.h,sha256=JxKJlqaLXICRxUAUQ4WhZvnlDJ16q50RaRNrd_LI1kk,459 +torch/include/tensorpipe/channel/context.h,sha256=7ALwdlTvgQ03mQaf62xXQ8DUyPOqwg45Qd0F4K6d6II,3701 +torch/include/tensorpipe/channel/error.h,sha256=gELOO5tHQHB4A-WmJuEs6ti1R4EsclrJwIKd9M7tcI4,778 +torch/include/tensorpipe/channel/mpt/factory.h,sha256=G0uZbp_uflgv-AFC7ne6m0txzApGqMZWvt_ICfq3WKo,646 +torch/include/tensorpipe/channel/xth/factory.h,sha256=EmAmNeIHbPe5bMKHnlHR6xdxzTEoCjczXWS8AhiKJWc,459 +torch/include/tensorpipe/common/buffer.h,sha256=p4M8zVk4PM2536SlgY44_RhxEKREEZT6I8oVDJxhyXA,3472 +torch/include/tensorpipe/common/cpu_buffer.h,sha256=pxnNAhil60W4izP77i80GpYRAM1C80jeBO34Jai1RfA,441 +torch/include/tensorpipe/common/cuda_buffer.h,sha256=CTwIuLbgn0Jfp7rKEPZGG_Fo01q9oU67Mpw48SICUls,468 +torch/include/tensorpipe/common/device.h,sha256=KZSlF-D7o3gWVCc8bzQpt_nDBLvW0maDdOFrx60Whis,1649 +torch/include/tensorpipe/common/error.h,sha256=JkbuNVfCczMIFayGA0woiLCOEHTaSLmqC3dHBCvl4_Y,3136 +torch/include/tensorpipe/common/optional.h,sha256=R8Io-h6lETspEfT9z8gWWkKuS2y1X1bTCVFDJCp-lTQ,26638 +torch/include/tensorpipe/config.h,sha256=W7Q7azBmk7xdHVyvraFKBKwxkk6dsQew26lwxuDJu84,351 +torch/include/tensorpipe/config_cuda.h,sha256=_SLA9yKKTnnSbVDgUCfc5BmO3kFgcjGxUxVWu-WH3VM,319 +torch/include/tensorpipe/core/context.h,sha256=jM_MOIRoc1ZUU_NJi6du5PtFaxAzOEQiqzAb_ludD_g,2468 +torch/include/tensorpipe/core/error.h,sha256=7NdEl7Irf82Yk0WL2XXTFnpIapv04vuiEMn7mCCq060,961 +torch/include/tensorpipe/core/listener.h,sha256=19RG5UUa6-8AZAf5Nr-yA6_4j7bfBsdOv0CUwJF83cM,2928 +torch/include/tensorpipe/core/message.h,sha256=ycfshEi1a8QOrNJlOKZwKWKVzNo7F42-pZeiZtglxuY,2885 +torch/include/tensorpipe/core/pipe.h,sha256=Zb0z4tKyoAPd99se_YZs1RQe3Cd7iydEsbanE8LbiD4,2972 +torch/include/tensorpipe/tensorpipe.h,sha256=UlOcNcNxPcn35eq0OBZ0YzjRGPaZevqfOas094Mya-Q,1449 +torch/include/tensorpipe/tensorpipe_cuda.h,sha256=jfoYOu7NbE5bgHtQoW_LktEqtjP-D8QNHl9PgXfuKsQ,704 +torch/include/tensorpipe/transport/context.h,sha256=L6kYLwqLG3V-o90buhzJFPh7eI_dzQEqPj-m0A96alw,2645 +torch/include/tensorpipe/transport/error.h,sha256=cmUQYONBrI-zmT28gCKojsqp6OxSUh-OJq2bNWqjbOA,919 +torch/include/tensorpipe/transport/ibv/error.h,sha256=K_0RM-V2HRRp_f-CQR8VWxXao-jyT_BhAycnAlAX7uw,920 +torch/include/tensorpipe/transport/ibv/factory.h,sha256=x1RLLvO58D4JQY_BTj-Jo7eKQwKA1FVueAeVzzGg9Gg,465 +torch/include/tensorpipe/transport/ibv/utility.h,sha256=OwgbqTXmVFVfbq4HYZTPShLz0lhYWLsJ7OyG1c1uEK0,569 +torch/include/tensorpipe/transport/shm/factory.h,sha256=7SEWso-QS8ZpHPaffz4QCP-o8rusTJAC1Vdw3bMpJt0,465 +torch/include/tensorpipe/transport/uv/error.h,sha256=YHLjjMatSyU_V1ulMyFi3WWJQT_xJZCzH4YHt20tYLg,716 +torch/include/tensorpipe/transport/uv/factory.h,sha256=1hSYHcEhiLvuRgvEanrpDPzO1wtzG_2ZgeLH72mtwAE,463 +torch/include/tensorpipe/transport/uv/utility.h,sha256=FRljgmy75kb4DNXkINUOSN3prXPrNWttP4FvsyR1t3c,1049 +torch/include/torch/csrc/CudaIPCTypes.h,sha256=1NWXzIO6hR-YbsXDEAaux5cXPhA6Dc4POOrVBstZJe0,3397 +torch/include/torch/csrc/DataLoader.h,sha256=lznqMptWAhecYOjR4xNw7M9uz_D_3i-erB2afkahEns,222 +torch/include/torch/csrc/Device.h,sha256=-2jKs9r1Kd_NzeRiVXarBVyRAEHOd8JEZry6XhU0GWA,483 +torch/include/torch/csrc/Dtype.h,sha256=HoH8TFmslGxkrcBi3ASytPZ09Dz_KkCP2w4oZT2Xx-o,834 +torch/include/torch/csrc/DynamicTypes.h,sha256=5QAXN-7uShXfuhKBbqdNDkkt9-bz8PmQl1o-EsHYPEY,999 +torch/include/torch/csrc/Event.h,sha256=FhNV-RDeKc8bf5kBF1fcqgkvyqG27fbTQEb2sqh4j6Y,534 +torch/include/torch/csrc/Exceptions.h,sha256=GXDKhFB5bfcMsqkJG1CYSq_cm7N5154G2CYVL2vjNu4,15328 +torch/include/torch/csrc/Export.h,sha256=ZiMG2QBIF6DcKqwWj3KSiXLWEn55prmg9a5IjvOzjuM,157 +torch/include/torch/csrc/Generator.h,sha256=rjXBbuB2of6n4tiwU2p2KdHca3t_QFa5Gxa1Fn-QyZU,1040 +torch/include/torch/csrc/Layout.h,sha256=f4b22tdUkDPMBPf10r18CSrLrEX1zV0LgNBt9DUG1CU,537 +torch/include/torch/csrc/MemoryFormat.h,sha256=PS_TTIZiELRrg08au2TF5Pn5OjJ_78kESahVT754j2g,632 +torch/include/torch/csrc/Module.h,sha256=wN2tjWdCnIQxJrMDwGu9qvP8r3Q-33eB2HkCvydBUUo,101 +torch/include/torch/csrc/PyInterpreter.h,sha256=ilADlYayiLe3ShJT172FL3t9Y9C8ZXtR6LX-L6JQOTw,383 +torch/include/torch/csrc/QScheme.h,sha256=7Tp540AWYE0HrNgNQmHdhyGwAouc2ZS3oewmKHsyXbM,558 +torch/include/torch/csrc/Size.h,sha256=i312eBDDhfQsJqIEY7HLO4mm6-F3gw0xptYBn-jcGYY,428 +torch/include/torch/csrc/Storage.h,sha256=34PcKpN5OAYDej7xvRhpEgPYYHXmG-tFBPkmDBNkT04,1520 +torch/include/torch/csrc/StorageMethods.h,sha256=rwOmQTprbmsE6JSQwctAI4et7C4sCu4T9aIfqLlcFY4,132 +torch/include/torch/csrc/StorageSharing.h,sha256=1CArzbCWa2fFqAh9GWj4fQovYycv5iY4D3UaamuAKJk,139 +torch/include/torch/csrc/Stream.h,sha256=7kZ7O6VsI0JxaZ0a27v3mGPSVEDXllQ4zPy4DSw0ZXs,546 +torch/include/torch/csrc/THConcat.h,sha256=JD1F5DtB2Ep00ZJVpilglulqKt6sTBwu7hS_Cmw2Cek,691 +torch/include/torch/csrc/THP.h,sha256=SPSS5bsFlvR4N7dgR2kV4fhELIwwggJnA3a82Wtgh84,894 +torch/include/torch/csrc/TypeInfo.h,sha256=ELy98ihypY76ViLiKmnrn0I3Lm6v0Xkko6hHzVdLl-M,498 +torch/include/torch/csrc/Types.h,sha256=AmVTjwFoCqH3ekahbAr32ulikXerrtB7ycNIDeX32lU,163 +torch/include/torch/csrc/api/include/torch/all.h,sha256=9nh4mWm6VZTD76X8xtCVjsx67eP5d3he8dAGKS_DmBc,590 +torch/include/torch/csrc/api/include/torch/arg.h,sha256=iGZepqd2MO7wSLfVuxeFo4UIfEHsI344159ekT528Mg,1427 +torch/include/torch/csrc/api/include/torch/autograd.h,sha256=MsdrQ_Un675B93MTyFZ1cLc09aLGLpXhwvJTZhR_j0Y,172 +torch/include/torch/csrc/api/include/torch/cuda.h,sha256=J238-uWS1thxsCxBj_f6wxC_fQPFc8A6MAYftdEEa5I,738 +torch/include/torch/csrc/api/include/torch/data.h,sha256=Kmhs111GxZwH-79BzETOAFrXVdXa-biOfDVy9H0PSvc,301 +torch/include/torch/csrc/api/include/torch/data/dataloader.h,sha256=j0fVYhw0DQ13sisK4ijga_fwWz9mpfMTa1uSv-4ys78,1962 +torch/include/torch/csrc/api/include/torch/data/dataloader/base.h,sha256=yj7cHB4gtPpBcgQVANB5XRN3YvXyLjsji_qWR0qsMmw,9131 +torch/include/torch/csrc/api/include/torch/data/dataloader/stateful.h,sha256=yo4ybCUjHdgHj7v-Fj0IBORQzNnoeSo1ejz_ilVr7hg,2360 +torch/include/torch/csrc/api/include/torch/data/dataloader/stateless.h,sha256=FQPrVZzPd6ya96lQPSGi8YtGd_cKTwHIVrq48kkoZd8,2778 +torch/include/torch/csrc/api/include/torch/data/dataloader_options.h,sha256=jzeQXDFOfwklNvHKfugNVpN2xCfD05abKZwGWQd4aTk,2222 +torch/include/torch/csrc/api/include/torch/data/datasets.h,sha256=Q3_zlkvTD7BOp09o0vpPhI9r_1ABb_6n5CunXPS8ZCU,289 +torch/include/torch/csrc/api/include/torch/data/datasets/base.h,sha256=NknUHzQiY2PL0yZmvl6sZi1PYxgN033Hlc-5rpLieh4,3276 +torch/include/torch/csrc/api/include/torch/data/datasets/chunk.h,sha256=g-W7Ula8_k0C77C4y05jZebGfzVJftbkn7_i76x-RhA,19171 +torch/include/torch/csrc/api/include/torch/data/datasets/map.h,sha256=y3tyehvwHsusjOxTRyHSpxMTAhIvvwHaZn6TUtHeBG8,4150 +torch/include/torch/csrc/api/include/torch/data/datasets/mnist.h,sha256=wNkr8DtB5t4V1uWLylxXgw_a-ndGoYIgdY9p6utyBSc,1279 +torch/include/torch/csrc/api/include/torch/data/datasets/shared.h,sha256=imEITLSAmID8dHbEyOCWwpkWqKGEovI0z65AzT0X3xU,2645 +torch/include/torch/csrc/api/include/torch/data/datasets/stateful.h,sha256=SFFASJLB2Y9sNRQb-IOO7Af8osVGyRdNH_glyvTmr5E,2309 +torch/include/torch/csrc/api/include/torch/data/datasets/tensor.h,sha256=GqdTKhqfuaKCMaJnipoy0hgnEXdBXwg6Qe0IsFHFSxw,959 +torch/include/torch/csrc/api/include/torch/data/detail/data_shuttle.h,sha256=KyM85aaszVirddi4GH5HJV-TRAlFG1rPWEE8SD-ZE78,2630 +torch/include/torch/csrc/api/include/torch/data/detail/queue.h,sha256=dc4vx8hMIN8m74cah1gfws9OJxgoBnaUeSHsdqSij8M,2496 +torch/include/torch/csrc/api/include/torch/data/detail/sequencers.h,sha256=NO0HrGCpIB4vLV26UawUrqSyV14bI0ahmFzMuzYNMnw,4510 +torch/include/torch/csrc/api/include/torch/data/example.h,sha256=FoF2ZsbWqnyGe62RGxQpEe9zkPEFwPbnV_O9eGErl_o,1314 +torch/include/torch/csrc/api/include/torch/data/iterator.h,sha256=xqqx82FFOom0wBQlA3h2d7gNNTCkNir733520sDguuU,5294 +torch/include/torch/csrc/api/include/torch/data/samplers.h,sha256=ILkgiKXJN6RyR0Fjmmv8E8LuRBIoBH_P6lUmenbf8Ho,318 +torch/include/torch/csrc/api/include/torch/data/samplers/base.h,sha256=PcT_AfulVVvsmKs1YYySpp4NF7oBWA23Rzr5TV44_P0,1240 +torch/include/torch/csrc/api/include/torch/data/samplers/custom_batch_request.h,sha256=wwuFriVD9gspMiNaUerjj0JbzYiSq9gGHcvGxR5HJMA,556 +torch/include/torch/csrc/api/include/torch/data/samplers/distributed.h,sha256=cYKAmpgVjwKVHltXlZI6oazE5oZM4NfWA4WLjuWIYlM,4150 +torch/include/torch/csrc/api/include/torch/data/samplers/random.h,sha256=7hp2PiKEirCVUsfEXad9dLlIuIl-0tJibxqKNCCovg8,1537 +torch/include/torch/csrc/api/include/torch/data/samplers/sequential.h,sha256=ltKV-5DZyMATrcxbJ8DhoX8bYh35lRL6luPxyoApQts,1269 +torch/include/torch/csrc/api/include/torch/data/samplers/serialize.h,sha256=Xziths-YjwF_GZ48kRC2woI-Mq3T5FnPaqb_rN1Dd7A,707 +torch/include/torch/csrc/api/include/torch/data/samplers/stream.h,sha256=3O5vsix5jOjzsJniY8oO2Y0UgdEXD7FVRTLFdlflUr4,2048 +torch/include/torch/csrc/api/include/torch/data/transforms.h,sha256=TvF9SJpmwR6rCQJvCvO4T5egw3AsiQsCCoOXs_0UTrA,222 +torch/include/torch/csrc/api/include/torch/data/transforms/base.h,sha256=ASbt1QfCjbbr1Y0UlhbxJMe2fB-aRUiQknSgnhSc52E,1629 +torch/include/torch/csrc/api/include/torch/data/transforms/collate.h,sha256=WmDBU1eC-t1-s0j8FQioTkdq1mwXoO_bqpB1lQo3rQI,1113 +torch/include/torch/csrc/api/include/torch/data/transforms/lambda.h,sha256=Kv3GGowexuP96q9NMdRD8VOPvAWq1V3vkc_c_SiUdjM,1709 +torch/include/torch/csrc/api/include/torch/data/transforms/stack.h,sha256=-PJSj11bWeg0mja_pATfo56qqrMcrHxvKO_qEZXaRQY,1424 +torch/include/torch/csrc/api/include/torch/data/transforms/tensor.h,sha256=kuYi1QbUpZcFZBA6_ty3rndAxH8ijPHUDaK64LqDr9k,2473 +torch/include/torch/csrc/api/include/torch/data/worker_exception.h,sha256=5wwUj8XNAgIBfepKIFOVzjtZ1_jf4TNXXXe-MUOIVxA,1088 +torch/include/torch/csrc/api/include/torch/detail/TensorDataContainer.h,sha256=0WGFrKHAV_i2pCI9cQ3xgWVBQATOajYUL0JSz0eDn78,13504 +torch/include/torch/csrc/api/include/torch/detail/static.h,sha256=pUIWtGGNH8v1WLnEULDUp3nUXrEn5mlgr2VEUCvP2nQ,2200 +torch/include/torch/csrc/api/include/torch/enum.h,sha256=dlD3txFI94pD1IoKKTXSSkAMCAcuRFX1BOqE6QoYgZE,7475 +torch/include/torch/csrc/api/include/torch/expanding_array.h,sha256=HlDC3lCN4ReGTxiyCKAcWMUcw7w5dbOhvu5CsI8-Ek8,6662 +torch/include/torch/csrc/api/include/torch/fft.h,sha256=M9Wz5r_i3Xo446HFrZl4wHOWNk4CiTcp6QiSGAjcsQo,12018 +torch/include/torch/csrc/api/include/torch/imethod.h,sha256=jKsQgJqScAnlZLk5Gpx_b0mE1crUn95AULY3w77hjmw,1740 +torch/include/torch/csrc/api/include/torch/jit.h,sha256=DHCcXjlpyeSFTF58OmdCXUJL3hrXRujX-hIgA8YXukM,913 +torch/include/torch/csrc/api/include/torch/linalg.h,sha256=qRNTLcSTy5rypWI_xs5BdUAQD6U_yDkCd1Qx0tkrWhg,28580 +torch/include/torch/csrc/api/include/torch/mps.h,sha256=FRDi-oKf2Xayga00DSgWPomyyWuC_nSKSbMSgkUAAWM,1219 +torch/include/torch/csrc/api/include/torch/nested.h,sha256=kTLbAGhJqxVVNIIJJ9_PeJitApi4FTF0qSsSkl4mvOU,2798 +torch/include/torch/csrc/api/include/torch/nn.h,sha256=Iah9Blyam2lcEKX85589p1KxupDmsbwkigjMErXYSUI,251 +torch/include/torch/csrc/api/include/torch/nn/cloneable.h,sha256=b-Y6FjbptrT_3uLI7EcPteC3Ct21e4_Rzq3fHIK5vUQ,3896 +torch/include/torch/csrc/api/include/torch/nn/functional.h,sha256=NysmuOu8Bb9fjjXr_qJk2eflRZtyB-B_txwODYKiCyU,642 +torch/include/torch/csrc/api/include/torch/nn/functional/activation.h,sha256=vxSGH0AQHBm90GK3CsD-W5FOyJg9kIEQGECxERnbkBs,29850 +torch/include/torch/csrc/api/include/torch/nn/functional/batchnorm.h,sha256=31u6gLhj8flgcFBPvoffCyXVo4GonleHhHBBERUwymg,2088 +torch/include/torch/csrc/api/include/torch/nn/functional/conv.h,sha256=EQxsD6CHlj--0scQH1jMzLx1eK_oZJtOcCH3gf4JUFo,8147 +torch/include/torch/csrc/api/include/torch/nn/functional/distance.h,sha256=ZKeMzztlHI9deP_sGkkZ6Sx25xSV_BliUEDD_j1iRfA,2549 +torch/include/torch/csrc/api/include/torch/nn/functional/dropout.h,sha256=TW8xAR53mhni7fpCQDKIQ7pHBW793a6p8-N6lhaob94,6586 +torch/include/torch/csrc/api/include/torch/nn/functional/embedding.h,sha256=7CkmayuUbds-Vz2DnmezRvYH69lue6GGpnU4k_0qW5A,6459 +torch/include/torch/csrc/api/include/torch/nn/functional/fold.h,sha256=aDNkzK1XoHGUpfbcjXdSugxGJPR8_-9HFBD_I630mgY,2786 +torch/include/torch/csrc/api/include/torch/nn/functional/instancenorm.h,sha256=vF5wCiUfY4a0tBS-NWCIy84Sh-8gfK_ypSLoFLegvYY,1605 +torch/include/torch/csrc/api/include/torch/nn/functional/linear.h,sha256=WXUeDK7FwvXxtZroPOP9KdP6Rvn2T3K8ds2OCEkPJfc,811 +torch/include/torch/csrc/api/include/torch/nn/functional/loss.h,sha256=0X-7uWuMN5Cy6n1Gb9EfA7SEwZ9iyXb0rGb-k0m-MPY,31954 +torch/include/torch/csrc/api/include/torch/nn/functional/normalization.h,sha256=JSpK9_l6Al3snBvDKEBSqqaYDDNezkz_7VB9o_VdBxc,6021 +torch/include/torch/csrc/api/include/torch/nn/functional/padding.h,sha256=5kT32hmj1ubDqLp_w6DIgsRb852LHj1Yw9V0Q548THY,1724 +torch/include/torch/csrc/api/include/torch/nn/functional/pixelshuffle.h,sha256=-i59zXgh20MQbpE8GvKGb8PkyXl8FAWFTb-4thfnwTc,1343 +torch/include/torch/csrc/api/include/torch/nn/functional/pooling.h,sha256=f00mutqpdczDVgxncQukTKqWYpi65kHDExNbyjSw09U,35455 +torch/include/torch/csrc/api/include/torch/nn/functional/upsampling.h,sha256=1JqHHkQDhTG3pKeU6XDyKFvkCsU-5neYrsAjlPszTx8,10761 +torch/include/torch/csrc/api/include/torch/nn/functional/vision.h,sha256=nstJWF90xPCvXNF2rNu-_fcY-4ASGwZr6p96JKOnmR8,3675 +torch/include/torch/csrc/api/include/torch/nn/init.h,sha256=peZX66JUTiRh4XRtOdscOKM9gmzt0N8RdqnvQFVleAo,4967 +torch/include/torch/csrc/api/include/torch/nn/module.h,sha256=W7u5tZLTQyv_VVSwu5GadJuYBfpHuJrJdL_0Vqz9ZvE,26878 +torch/include/torch/csrc/api/include/torch/nn/modules.h,sha256=N-ch-fayJ5OIZEUP2hKxJK26CjIEGUnC2d8BnQWfK2M,1289 +torch/include/torch/csrc/api/include/torch/nn/modules/_functions.h,sha256=BP9nGTfrMWkvkNeWHXCl9beAWRvq-EQtnNo-DrCaCKY,706 +torch/include/torch/csrc/api/include/torch/nn/modules/activation.h,sha256=NMSmBpvtp_qPQziq5h7r2wv-YXrIA-AmromKeXZBNmE,30338 +torch/include/torch/csrc/api/include/torch/nn/modules/adaptive.h,sha256=g7WzDvs07m2oIaQE60YwFaBHgieqpCBm63YBIn2PBOY,3511 +torch/include/torch/csrc/api/include/torch/nn/modules/batchnorm.h,sha256=y_lZaGCREqw8sYeWNXPTGVdzvMHOvHMeoOOa93strd4,8429 +torch/include/torch/csrc/api/include/torch/nn/modules/common.h,sha256=VIpLpI_JSyD8B372lKDzksHNFqgy5Ktn0F9iv5v-YTQ,4318 +torch/include/torch/csrc/api/include/torch/nn/modules/container/any.h,sha256=-0djOjOArzluwlFDJsruTCko9mWKDtzYJXyvR4BLHYc,13737 +torch/include/torch/csrc/api/include/torch/nn/modules/container/any_module_holder.h,sha256=bdAIFrJoaFuojx_fZ3womElXZunmzjFtQWnNoL4YzQA,4816 +torch/include/torch/csrc/api/include/torch/nn/modules/container/any_value.h,sha256=szTdZbkCAWlv_2fHJbC53Y7_-umOoeHnsX-Xwxxa1Ww,4096 +torch/include/torch/csrc/api/include/torch/nn/modules/container/functional.h,sha256=qofxHYNImf3Xz165cWFeHfiiUaXbShoULoTe3F9Ydnc,3435 +torch/include/torch/csrc/api/include/torch/nn/modules/container/moduledict.h,sha256=KhBGOIBpOzTFDYMbUAJr7oCKHtNQi37g6MJ6rIwbhgA,8447 +torch/include/torch/csrc/api/include/torch/nn/modules/container/modulelist.h,sha256=DecSFwJkcEqzDnYv61mNuXYZ495Rz0nayGPKFw_PuxQ,8993 +torch/include/torch/csrc/api/include/torch/nn/modules/container/named_any.h,sha256=P2sWNAVFOwMDgAxGuLNeWTfv27J3pwgQfd6fDZ1xTUU,2748 +torch/include/torch/csrc/api/include/torch/nn/modules/container/parameterdict.h,sha256=hZfkx8aAFcLHdr-pTgDgRZB0AhFKEcw1fhxpoyqGRus,4500 +torch/include/torch/csrc/api/include/torch/nn/modules/container/parameterlist.h,sha256=qMtiD_jFW5IkUUCpuwzLcK2VqRqDIOIhr6zESABTwug,5612 +torch/include/torch/csrc/api/include/torch/nn/modules/container/sequential.h,sha256=SzNwh4q_7ACUoyFFKkBnrdpU3TioIXumqrFvCYJ_-jA,13750 +torch/include/torch/csrc/api/include/torch/nn/modules/conv.h,sha256=cwKq_hgs8btQvEkcWjmSARc5MoGiTifuq_ANL94_fsM,16338 +torch/include/torch/csrc/api/include/torch/nn/modules/distance.h,sha256=_pLgboy-ozY5cCyo_45WM02RoQLmNYMMZbm1BfvzgKU,3081 +torch/include/torch/csrc/api/include/torch/nn/modules/dropout.h,sha256=OUlN1AlhQZYKuiWH922-2d_ulSTy2-j1vp2jBGUhQwU,6517 +torch/include/torch/csrc/api/include/torch/nn/modules/embedding.h,sha256=-vVov7QfA7f3wc-ZEBQaGA6g5rcn9fcyiJxQrp7w2FE,6220 +torch/include/torch/csrc/api/include/torch/nn/modules/fold.h,sha256=BIg7APQ_HaeIxPnIvt2NwXa_7K03B-ImFjUEb-z5FCU,2847 +torch/include/torch/csrc/api/include/torch/nn/modules/instancenorm.h,sha256=lC3NztWPUljk3HqLTJ25BFyyNmiyH2bYfSou2p4_FBw,5434 +torch/include/torch/csrc/api/include/torch/nn/modules/linear.h,sha256=Dj053TykL1F_LHYOr7deM6oX4h29y96RnTI1VmK9wLo,7445 +torch/include/torch/csrc/api/include/torch/nn/modules/loss.h,sha256=gkzXNOMfG7NZSdwYV07DhbvNct4cSeddRBs-BrhKZB8,31007 +torch/include/torch/csrc/api/include/torch/nn/modules/normalization.h,sha256=g8a90_b9uhX0sdY0gzdiSxUYqjWTyg2FJ3apWPDrCms,6926 +torch/include/torch/csrc/api/include/torch/nn/modules/padding.h,sha256=RetIQeHGtV5VHxsiFzE3kvACiWVWccne5OgDgvv-nnU,14371 +torch/include/torch/csrc/api/include/torch/nn/modules/pixelshuffle.h,sha256=ffOnYEpSUTn4_cs5o9i-4o9sBwmJlkNl2LHJWgdhK-E,3134 +torch/include/torch/csrc/api/include/torch/nn/modules/pooling.h,sha256=qSQea70gvy9OY0BuIMF8nn2hwOvopC7S01-bzjAiYv8,29665 +torch/include/torch/csrc/api/include/torch/nn/modules/rnn.h,sha256=ZGV1Iak0TOsKcJuwI881fUavCq99XzCX9k5tTVL2OxY,13471 +torch/include/torch/csrc/api/include/torch/nn/modules/transformer.h,sha256=csOLk0HEH9ecHhyRvagjkFSx4ImefSI-beO_mn1Tnh8,5347 +torch/include/torch/csrc/api/include/torch/nn/modules/transformercoder.h,sha256=EPoNusED4pNobo8JPiOcu_3vgNc1oSmlS0wsGhWgw14,5206 +torch/include/torch/csrc/api/include/torch/nn/modules/transformerlayer.h,sha256=ygMr2iO_75qd_-_jQ-58kjD5N6oxv9QMOMpeWVwgEvs,6432 +torch/include/torch/csrc/api/include/torch/nn/modules/upsampling.h,sha256=APIo9hRrQldZurXJqPguJ3ZG3YWfFBGM0qjwpcIjpJ8,1651 +torch/include/torch/csrc/api/include/torch/nn/modules/utils.h,sha256=Sx9S0iLCpXRgFlXc3L4y41z4eyks5MlJm4YrLEtIb-k,1506 +torch/include/torch/csrc/api/include/torch/nn/options.h,sha256=gilbiVyS93p7hxxyII0HQeKrDG8lVfOafzVF--xe6Jo,645 +torch/include/torch/csrc/api/include/torch/nn/options/activation.h,sha256=MEhQ0XumEdwpd7aAre0xYsP6xIUN4eMzkTDdrTZod7o,19044 +torch/include/torch/csrc/api/include/torch/nn/options/adaptive.h,sha256=5EPq-J3DmNkQbwGBVLp7PeBCdVtSvBkfbPgGwdn4hCk,1082 +torch/include/torch/csrc/api/include/torch/nn/options/batchnorm.h,sha256=bA-q0Q25BAHnVEZZsLvFeZhgP7v-EFcoKHttCkYGjGY,2799 +torch/include/torch/csrc/api/include/torch/nn/options/conv.h,sha256=fDKeCfujd4KWTHVxhK3uEHJwcc1z0pa4Kphq6FQW_PQ,13471 +torch/include/torch/csrc/api/include/torch/nn/options/distance.h,sha256=1xO3QSvj2bRjC_tDSxsRHeZrWUo-aoyjoWGfjpE4FB8,2014 +torch/include/torch/csrc/api/include/torch/nn/options/dropout.h,sha256=310P1wMBa54NUwsX9-ZnaZz5cQ6MXt--zLwMopR-kZw,3070 +torch/include/torch/csrc/api/include/torch/nn/options/embedding.h,sha256=zp9zondxTXBMOhQvXbGMnvLpfbm07fTkt5ToXdAw1lY,11667 +torch/include/torch/csrc/api/include/torch/nn/options/fold.h,sha256=O0L5OK4knN17vWHoOlJO0oYcTPtg45KwHf5_Ozm7QOo,2985 +torch/include/torch/csrc/api/include/torch/nn/options/instancenorm.h,sha256=OYi8hkjptDwVSmUe4rVRKbWAinL3-yGWKfnrgVQhchE,2321 +torch/include/torch/csrc/api/include/torch/nn/options/linear.h,sha256=4vbCx_PsUqaEBZHT23QkkMlwX8dYJlgPDUewYEgAOP0,2804 +torch/include/torch/csrc/api/include/torch/nn/options/loss.h,sha256=VgQfR6AWRz29Krvn7UGpfr6idTUpQK0YZ18XI8tr-0o,26727 +torch/include/torch/csrc/api/include/torch/nn/options/normalization.h,sha256=Ln-i8xDwZwpOjrcE3WSNHSXKxMNxwc719QnDZbHk868,5522 +torch/include/torch/csrc/api/include/torch/nn/options/padding.h,sha256=niy8hQD-O7chIgVuxed2Vn5Y4msg6qsCsUnoU2XHNwM,6860 +torch/include/torch/csrc/api/include/torch/nn/options/pixelshuffle.h,sha256=8IxbtIymppT6zZ5HML8U-u4popSCbPoGWwL97kWovj0,1657 +torch/include/torch/csrc/api/include/torch/nn/options/pooling.h,sha256=QruKDQiRRcFi2DruBEJ53sAXEV-NVlMP4sZSgmoWajM,17743 +torch/include/torch/csrc/api/include/torch/nn/options/rnn.h,sha256=RBLJcPr1RXoK-lFWVyAgxcmQa0StVX3wifjYOr3XksI,8220 +torch/include/torch/csrc/api/include/torch/nn/options/transformer.h,sha256=ZovANxKNXQMg3-GjB1FYcYnGiWq5zAunZVRNxubCnvE,1839 +torch/include/torch/csrc/api/include/torch/nn/options/transformercoder.h,sha256=GyPoGtcjMp1zC0cdecPqdHUyqI9qMRImM570EXUoVjk,2344 +torch/include/torch/csrc/api/include/torch/nn/options/transformerlayer.h,sha256=j-KijW00OJcG8mDqkY7RzA8UTE337YryBmvU0SkhGU4,2084 +torch/include/torch/csrc/api/include/torch/nn/options/upsampling.h,sha256=VRRsuAWEu3YEmw3lJCpYIIQGkVmp6BqP1XIVeWnlmmc,4150 +torch/include/torch/csrc/api/include/torch/nn/options/vision.h,sha256=gDz2vPPgqQw67D-IBXB2bl9yG4mKr06QW3wl43SQMI0,1099 +torch/include/torch/csrc/api/include/torch/nn/parallel/data_parallel.h,sha256=9L3S4kEoNvgW6ogDqucCmja6t07knrcFkwezE-M92Dk,11156 +torch/include/torch/csrc/api/include/torch/nn/pimpl-inl.h,sha256=F-2WQ11FuaWHsDcz31hVCBBD1WKgkaxCXrabd8sclMc,3224 +torch/include/torch/csrc/api/include/torch/nn/pimpl.h,sha256=hpxX46z_z3hVmfsx0fZ-nbJT8BVHmaRahmrtXEleHrE,6637 +torch/include/torch/csrc/api/include/torch/nn/utils.h,sha256=WCbMNSev0r4C0hIKiE0SK89PEv_fXBGZA3jregGzNNE,131 +torch/include/torch/csrc/api/include/torch/nn/utils/clip_grad.h,sha256=Fyo272MbPlf1nOsKjRgK9fv_Iw97ZM64og-xNnttJkQ,4874 +torch/include/torch/csrc/api/include/torch/nn/utils/convert_parameters.h,sha256=okJLdSy5uXB6Lqu_X6r8oDGeDgnLmB_BQuN-v29oJNY,2442 +torch/include/torch/csrc/api/include/torch/nn/utils/rnn.h,sha256=2RCrHlY7Qh6roiB-6xyTIuba4UCJBxMaL2LpuywgRt0,12841 +torch/include/torch/csrc/api/include/torch/optim.h,sha256=N73RsN8bKIwQiMQ5Syj4MIBBHDHTkb-2aOxb4UuoQbA,394 +torch/include/torch/csrc/api/include/torch/optim/adagrad.h,sha256=MTFVhxp4UVapF-9_WycQrkLOJnm28z-2NkW1bRIaJNQ,3553 +torch/include/torch/csrc/api/include/torch/optim/adam.h,sha256=g25k323eiHpoomr8I4U1FB4ftmSAfwqAhSMEhvK0Fg4,2948 +torch/include/torch/csrc/api/include/torch/optim/adamw.h,sha256=2ggm--yAP7_u6q1LQuvOgVYvaFsHX_luZVlYRQMwJVg,2968 +torch/include/torch/csrc/api/include/torch/optim/lbfgs.h,sha256=W8WrhXAdkVTVEe4LayIZanMWPYmiE9p7zbsqoQ38_i8,3475 +torch/include/torch/csrc/api/include/torch/optim/optimizer.h,sha256=c-Jg2ATVLJixwj7skkD3B2X-qH9kbA37S_vognSzrV0,7828 +torch/include/torch/csrc/api/include/torch/optim/rmsprop.h,sha256=_-0cYe5WqZEx5CFQbQ7kDqI8s7ErzvVf7FFJ8OSYyJ0,2944 +torch/include/torch/csrc/api/include/torch/optim/schedulers/lr_scheduler.h,sha256=IEBVve89OX_4eeCsiUE7VeImY798ivG5y1S6qH5T89U,1073 +torch/include/torch/csrc/api/include/torch/optim/schedulers/reduce_on_plateau_scheduler.h,sha256=gNi-Egw6VDbcCaVhMMwjzcuUrTW05wFgYg5O1i5c72k,1350 +torch/include/torch/csrc/api/include/torch/optim/schedulers/step_lr.h,sha256=dL77rvbqybcqFZr6FodFnW6taud9JFPFnX5kW_2b_ZY,424 +torch/include/torch/csrc/api/include/torch/optim/serialize.h,sha256=3wSDF8IC_9oq_DnTSsLzo6KHwY6Mb1i9_ISaLOxEB1A,12550 +torch/include/torch/csrc/api/include/torch/optim/sgd.h,sha256=5W-3wIeP3Zic4ZJRA0bwYiwMPJGzX88CHi_X4qr5uVU,2690 +torch/include/torch/csrc/api/include/torch/ordered_dict.h,sha256=LyKdAi0oHRb09ePr1N98eid4PwITHxxOwUcogYBX-6c,16236 +torch/include/torch/csrc/api/include/torch/python.h,sha256=a1qb9sEiYP1CTRMOLSmWfIQ14fkO0ITwNuB1dlDVwjE,9903 +torch/include/torch/csrc/api/include/torch/serialize.h,sha256=IDE9Nj1gc7itAvC3fPOtydjauYKC7nLkWUi6vecOlLQ,5244 +torch/include/torch/csrc/api/include/torch/serialize/archive.h,sha256=IM8xj82javqyrkZJGayTWjrst7_FKKllzucRwSMrlos,101 +torch/include/torch/csrc/api/include/torch/serialize/input-archive.h,sha256=gpV1NyPoUFB1HkSFS4V1taOdEV6TQ7UyzstyqbZ8Kqs,3981 +torch/include/torch/csrc/api/include/torch/serialize/output-archive.h,sha256=SwvplynJPXRoJFGKYnocrS4K_jelwVG0cCmiB8wd9Ow,2315 +torch/include/torch/csrc/api/include/torch/serialize/tensor.h,sha256=gEYAbqPp6R4AH1G7o4OuS-zVPY8osmtguUFfSSUt7jI,432 +torch/include/torch/csrc/api/include/torch/sparse.h,sha256=XqH3dLpwkPJzg0p_5XcJ01tnrQC98M9Fhe48Jg6KHc4,97 +torch/include/torch/csrc/api/include/torch/special.h,sha256=Cms2GXCdMlPqc9bSGWuUI7sDbSgOn5Tku2jHNXQ03BI,38310 +torch/include/torch/csrc/api/include/torch/torch.h,sha256=VRdG_p1q4Y2gB0LtTmL_C4EpbrPQuvFVa-21l1wc-VU,154 +torch/include/torch/csrc/api/include/torch/types.h,sha256=YA8b7CeormyQ75LCelmGuuzDfki58RMEaRyT9nInnYo,2496 +torch/include/torch/csrc/api/include/torch/utils.h,sha256=T1WuMmWHTIEsjAwu6u04stbCbDGCpQyqDVmdvRrmIoo,3512 +torch/include/torch/csrc/api/include/torch/version.h,sha256=cK2j2UXkVa1MPDUYiSZnd5SSihkIyII-Ph6yWJRko9o,315 +torch/include/torch/csrc/api/include/torch/xpu.h,sha256=Dlrk3eATuICT4j9HpzznnO6T7nPzIoEXlbuUtwstXOU,602 +torch/include/torch/csrc/autograd/FunctionsManual.h,sha256=HeNXyPV_7T-PusxSd6-7mfjB6N-yrzGfynxPvV0f1K0,32550 +torch/include/torch/csrc/autograd/InferenceMode.h,sha256=hyp9uc-uXPuSoiyVupyje0n6r0_RlcZKQS4xX8eVAWE,156 +torch/include/torch/csrc/autograd/VariableTypeUtils.h,sha256=gUqdH4Dy8HJwn37g-_2faukdeV0MCekFjyL_4wH-gBQ,14551 +torch/include/torch/csrc/autograd/anomaly_mode.h,sha256=VhivPLzU1VudJLTVJvKOCfyR3e9Y0dLBwaefJaGDWpU,1724 +torch/include/torch/csrc/autograd/autograd.h,sha256=JxafoWW-lhFTK3hrK9xcsq0kwL0lmX0-wdPTlg_cW94,5309 +torch/include/torch/csrc/autograd/autograd_not_implemented_fallback.h,sha256=4uoj4sHweJt--NEkq8qWqEWj2tb6md_kC31TOU1N8os,1142 +torch/include/torch/csrc/autograd/cpp_hook.h,sha256=PRy5GQju8pdUEfXKQYo2qFRnyXa5G9l39RAc04cNXYI,865 +torch/include/torch/csrc/autograd/custom_function.h,sha256=LVGIHjryxdccd_EohZd0O2iROImMrg6QCn_ua4LcoFg,18020 +torch/include/torch/csrc/autograd/edge.h,sha256=pqMnb2ppiPeiKn61IR_SY2jMpRQpU-OB9Vr7cqZrR6I,1615 +torch/include/torch/csrc/autograd/engine.h,sha256=RYvctWCRIj1qGFhI8GOuqmRtMDjesZEOb1XQ5Diuggo,10709 +torch/include/torch/csrc/autograd/forward_grad.h,sha256=4b01CUfL2WNzrxME5fiBmkGdCBDFoYwa7Bg--rwIWDY,8938 +torch/include/torch/csrc/autograd/function.h,sha256=aOZe5dj4vHs9hN8qHaqZBQZCi6sTFMzv5mZ3XDb3sJA,29650 +torch/include/torch/csrc/autograd/function_hook.h,sha256=lRuKm3CZ19Gl8ueR_MMSn8m4VK_Kx6ZfrMXR8ZVitnk,2072 +torch/include/torch/csrc/autograd/functions/accumulate_grad.h,sha256=tdSdoR-soHOdkM6OcMi4Z29Ceeib5J0dVrPIrVEomgY,13634 +torch/include/torch/csrc/autograd/functions/basic_ops.h,sha256=oN3VX2OPbJrM3jZ5Ls5RM5DtpEAQ8SXhd3GwR21qndo,3140 +torch/include/torch/csrc/autograd/functions/comm.h,sha256=sVYyhl0ZLDFX6oDteXgnfsqbmrWs3v8JomZJ5sSN3EY,1197 +torch/include/torch/csrc/autograd/functions/pybind.h,sha256=e6eM-r7Xrvjp2lGt1nC0SOj56u_m2u6ysd0nBlXMP0s,342 +torch/include/torch/csrc/autograd/functions/tensor.h,sha256=0611AbCbX4aQnGq7pGCFhgnGJlBEFXobxwTMcIaVI0E,7230 +torch/include/torch/csrc/autograd/functions/utils.h,sha256=UbDNfQp9IL95w5ZpqkINJ-ootvixSysZHg3TWMyHqws,3159 +torch/include/torch/csrc/autograd/generated/Functions.h,sha256=8W1DM4DREsaLlGNtSv57FWjFiIIe2JWsQ1JoeXgG7XQ,505210 +torch/include/torch/csrc/autograd/generated/VariableType.h,sha256=zXJlpMZTgLXR9naIuPUZW9bBJ5xqI8wmM-Un00OheSA,1730 +torch/include/torch/csrc/autograd/generated/ViewFuncs.h,sha256=jI4jXwVMtdXHSZaYeK9YJ6g802RWJawHAe2r8_Ionsk,37106 +torch/include/torch/csrc/autograd/generated/python_functions.h,sha256=m2zbP4C796FvQVsVVmsXUCiz8Prj2O-pT55rbHA2NwA,891 +torch/include/torch/csrc/autograd/generated/python_return_types.h,sha256=duqsERucZcEvzjWUHjzvRJejN9GvrIcxhI29Sc7vrgQ,4062 +torch/include/torch/csrc/autograd/generated/variable_factories.h,sha256=eohEjcQEeI8km2AE_wWbDqni4xnJ5dGDTNsAp5_9lGo,56209 +torch/include/torch/csrc/autograd/grad_mode.h,sha256=uzjms9KcKXMdR0-20069M5-4lNYJoO6Dp5IrTlp0040,210 +torch/include/torch/csrc/autograd/graph_task.h,sha256=l4WB36eN9IpMBHupk2IGKjQYhKvvVN_H34rpfJRTpnI,9250 +torch/include/torch/csrc/autograd/input_buffer.h,sha256=tAsRpLHyRv5sOr6rn5NHGyMewfahCUu5KShs8Sbo7yo,1407 +torch/include/torch/csrc/autograd/input_metadata.h,sha256=c0fYFoqxMeKpEVrT1-eprIezNZw5T01JBKW7g-11tv8,2982 +torch/include/torch/csrc/autograd/jit_decomp_interface.h,sha256=FyQGL5BsOWcWFZruQYh_ysQDEy5LkO-GcRT63H3_DMM,1798 +torch/include/torch/csrc/autograd/profiler.h,sha256=6ArK8ksshLYFDODo91Q75_cqzFjn0mNzQ3-Tdefu4y4,112 +torch/include/torch/csrc/autograd/profiler_kineto.h,sha256=CG1CsnphIKQKqW-BEHVE-OevO4yC0SY-K4RjddJr1Kc,8013 +torch/include/torch/csrc/autograd/profiler_legacy.h,sha256=A2fG1Sj2zZ7_dBB7y3FFiEtJXl-1GYsmEhNbZwAmmLU,10678 +torch/include/torch/csrc/autograd/profiler_python.h,sha256=8Z3Z8f9uC_BLJzWSJAGsNyhmig7RCpSZm0aYsYzkOZg,84 +torch/include/torch/csrc/autograd/python_anomaly_mode.h,sha256=K3FdDEihxco-f7sD3or96JzehVb0B28W5VMWo6RpkN4,1139 +torch/include/torch/csrc/autograd/python_autograd.h,sha256=L9C-y46IX9u0Q48q4-cEFSDRK1weUIEzAQLcwtUlq-g,375 +torch/include/torch/csrc/autograd/python_cpp_function.h,sha256=WdKCOlfzd5RJkxyN7fVMo0hQ9QpyAlknGkP9rsfVDjs,4827 +torch/include/torch/csrc/autograd/python_engine.h,sha256=r4NGZcjGHlyMJMs6cYpxXRSdHLV8ehfhlqluuyYS9U8,1256 +torch/include/torch/csrc/autograd/python_enum_tag.h,sha256=xZjV2TWlkXw5USUrIq0aYpoVoG2mbDy4GXREOnPPkZI,120 +torch/include/torch/csrc/autograd/python_fft_functions.h,sha256=OZv3f3f7oonK7zYbShCv4EH6p3VhSPaAH7Sl-ahz62I,87 +torch/include/torch/csrc/autograd/python_function.h,sha256=9jweT-wPU2N1Ca5i63DOqdl15NOzjGaGgU6otUkQftw,5509 +torch/include/torch/csrc/autograd/python_hook.h,sha256=gyxs2ZFVFgML8tDkwlUbxU0ZBD-MV5t2xxWLjw8kPHw,2015 +torch/include/torch/csrc/autograd/python_legacy_variable.h,sha256=_8tWed3mDIVmtYpI_WPVUpfjK_eH7Qv4wNrSIIlahVQ,257 +torch/include/torch/csrc/autograd/python_linalg_functions.h,sha256=592vpVDlik2EwxZnuKwqJ617_xq2p0Rf3Tr4XspSh2A,90 +torch/include/torch/csrc/autograd/python_nested_functions.h,sha256=4R_lyK5cWoEtRSKS1HqX6xHPfO0IUmnA99yiU-lyxRk,164 +torch/include/torch/csrc/autograd/python_nn_functions.h,sha256=_MMnMSOYTvNIHHzX6OEExk2ABVmgSVYvD_gt9C7gBqQ,86 +torch/include/torch/csrc/autograd/python_saved_variable_hooks.h,sha256=QeL_3hpnQ4AcIK63QipFRCndchE8ZKGqjQr8Q9nRwBk,933 +torch/include/torch/csrc/autograd/python_sparse_functions.h,sha256=Yw7doo_gafLcBM809TnnzZoitf_NBtCYex0nSTNweAc,90 +torch/include/torch/csrc/autograd/python_special_functions.h,sha256=4OdWU29kp8Alvtug_gJ4sLBvBYPq7n-v5Uwi4NRL_gk,91 +torch/include/torch/csrc/autograd/python_torch_functions.h,sha256=WMyOlDkJIchECl67t4uDckaSgKg0d1oK_F36pQ0Xd_A,650 +torch/include/torch/csrc/autograd/python_variable.h,sha256=5nkTR6yfXMKAa1j5DRWp94ua5AHRp7QnIybHgOWNkGc,3506 +torch/include/torch/csrc/autograd/python_variable_indexing.h,sha256=MbNxTqIUSgc8hqrafHaWeAjJZ5yWDEO1p3olOF8wijg,2752 +torch/include/torch/csrc/autograd/record_function_ops.h,sha256=A01-BHejveN5ZZj6lO64LiiV7nQhj9XvCXzMMmHomkQ,935 +torch/include/torch/csrc/autograd/saved_variable.h,sha256=fiuhg4q9oF5M33E1AYyUGy4iSaZlK6w6AnqO4vgWpcI,4643 +torch/include/torch/csrc/autograd/saved_variable_hooks.h,sha256=d4Y8k7TzIhfaXWi_89WXbe3ZtT283ac_OiLtzFcpHuY,296 +torch/include/torch/csrc/autograd/symbolic.h,sha256=XMrwDnlSibXxnqZiy5pNDVwTBDsU8aIDamEg66kOEYk,300 +torch/include/torch/csrc/autograd/utils/error_messages.h,sha256=MDzsCv4cOe1XjppPx3i7fpR7s8VPs0DEpURV5EY50f8,495 +torch/include/torch/csrc/autograd/utils/grad_layout_contract.h,sha256=a5OE0iqU4BwZ7slkuusZm7K6U2KohHkMuq9r8uMNG9Q,2822 +torch/include/torch/csrc/autograd/utils/lambda_post_hook.h,sha256=l0Skqp1lgfdddQytz3F5Z518OBlQCYBXfaOwf6vXRKg,1273 +torch/include/torch/csrc/autograd/utils/python_arg_parsing.h,sha256=qqap1e7rG3Rv2HmCB2J6Ip1H7xngAlhI72Bgmi1xBYU,1423 +torch/include/torch/csrc/autograd/utils/warnings.h,sha256=e09IIgYcdcmYq2V2l3f3qCKajFgAzhztvsZENTxfQPg,583 +torch/include/torch/csrc/autograd/utils/wrap_outputs.h,sha256=a5cSaFpNxeNvCoTb1xY2EIj2Exb4Xu6Pbipu63lxtNU,3735 +torch/include/torch/csrc/autograd/variable.h,sha256=zUB10TBW-V8u2tI68XsNwyZismMGtDALjXjtFmW4R9Q,40109 +torch/include/torch/csrc/autograd/variable_info.h,sha256=R-Zw-sUmTMOIpWb5Vnf_n2jlteDs5o0NZpfxLx7yKYg,481 +torch/include/torch/csrc/copy_utils.h,sha256=HPNwtV01Xllja6b16GsOFNYlpV0DrHkqsGCiwSYWnVg,1420 +torch/include/torch/csrc/cuda/CUDAPluggableAllocator.h,sha256=WPjVT7_n0qNKtKbtErf0_bXR49ztb0uySKhVgxvlHBY,6610 +torch/include/torch/csrc/cuda/Event.h,sha256=FK6jWILeAE5Y3b53Eqw-pDfF0tqu-LmIiiJ3Qck7ahc,406 +torch/include/torch/csrc/cuda/GdsFile.h,sha256=13Pv7i9UtjuUQ3BBepI-B1QPsMiv425T44_5P2cERzA,158 +torch/include/torch/csrc/cuda/Module.h,sha256=GgDQDHrKiVqJgRS_Oxe49-vTIQorgcL1x4VgNbiEGvs,436 +torch/include/torch/csrc/cuda/Stream.h,sha256=Qp6DFebuoMgWE05n4AIfc7pasukINXSwKLrkqRK7TJo,504 +torch/include/torch/csrc/cuda/THCP.h,sha256=M9D-wbEcdRLxPxx-ZwChqKXx9TI1hL5GFLipfYbRHXY,213 +torch/include/torch/csrc/cuda/comm.h,sha256=4JXnOwAl3REHI3JkeDpfdfs1GzwzqPi7HG7OprONjbA,1514 +torch/include/torch/csrc/cuda/device_set.h,sha256=8s6LLs4PG1uykInR1FW08Ey32EQP84vgg4rDfPcLmF4,185 +torch/include/torch/csrc/cuda/memory_snapshot.h,sha256=0xySw7H9rTru4U_DECiQm2uNFk9z4hq5bUNBq7VIiQ8,769 +torch/include/torch/csrc/cuda/nccl.h,sha256=ASNPlTouWzgwdIWnIhi5p3mbFilyPwOomcO_A6t7DAY,5888 +torch/include/torch/csrc/cuda/python_comm.h,sha256=ysrI2Zyf3DEjAUPkc6Occ3SuSd__eZnPJptRA-hiU4A,123 +torch/include/torch/csrc/cuda/python_nccl.h,sha256=yNwoyNpT1EmyYAmk9k7UWpO3aNfErizqV0QVMiFtDts,682 +torch/include/torch/csrc/distributed/autograd/context/container.h,sha256=zgh1q2ouJQbhgfE03o6OFMf45Nf7JMF6oxK9xVqpd40,6435 +torch/include/torch/csrc/distributed/autograd/context/context.h,sha256=IBra9z6MfWpxdiCMJ5ydGwu8-9i0C5RpoqYc2MZ0Ooc,6623 +torch/include/torch/csrc/distributed/autograd/functions/recvrpc_backward.h,sha256=RXUW8DHNtGmsJncR5kalFR1OlSCMXAD42xlOfHwIdFU,1705 +torch/include/torch/csrc/distributed/autograd/functions/sendrpc_backward.h,sha256=aM04SmFMzmvHhoPbr3NsSLuNMxfnXUiRxKeoFxvPaKs,1373 +torch/include/torch/csrc/distributed/autograd/rpc_messages/autograd_metadata.h,sha256=TncgZoQ3veChIuqqWIrWk3C6fQLkE4vXUX1Xbmu04Po,750 +torch/include/torch/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_req.h,sha256=MbhyDQOZTiZgJdh-0NCSQGWxJd3HP5XI78PP37DD5T4,886 +torch/include/torch/csrc/distributed/autograd/rpc_messages/cleanup_autograd_context_resp.h,sha256=IhH67Y_YeYJG89z83_eabDLtLQFc6Qyw2i_Wf3UBrD0,711 +torch/include/torch/csrc/distributed/autograd/rpc_messages/propagate_gradients_req.h,sha256=fmUaNRSlp8UCpPOrfFVOYUWHnW-tIV1jPEq4gcOrCzA,1298 +torch/include/torch/csrc/distributed/autograd/rpc_messages/propagate_gradients_resp.h,sha256=gFrx1pp9o3mjfJMi4i0aN4nIofKvDrssUgI6ulwGFkM,804 +torch/include/torch/csrc/distributed/autograd/rpc_messages/rpc_with_autograd.h,sha256=gZptHkMQkV_GKb245ZvTXGj83XygGNlc_7RPxIUi6Xs,3556 +torch/include/torch/csrc/distributed/autograd/rpc_messages/rpc_with_profiling_req.h,sha256=ud7bL1DAGkXJjrYbT3at3PrUFSndKQML0MAYZu1SQ0g,2347 +torch/include/torch/csrc/distributed/autograd/rpc_messages/rpc_with_profiling_resp.h,sha256=ZAuSpskXTGPad2W7maXa6yOAqWwM8y01iC5V7M08w0Q,2307 +torch/include/torch/csrc/distributed/autograd/rpc_messages/rref_backward_req.h,sha256=97UW3DLiLJK1AiM3gMGDCNhXTZ7RHPUOeM6MJ0B9Gr4,1027 +torch/include/torch/csrc/distributed/autograd/rpc_messages/rref_backward_resp.h,sha256=ji-yEL0BUVRR6yGLhURGmje0GdRP5m047-urW_0_HnE,558 +torch/include/torch/csrc/distributed/c10d/Backend.hpp,sha256=2_O07CHrJLODFdBEP3O9hj6hd2zn3y-vGMm5J-YcFTY,13275 +torch/include/torch/csrc/distributed/c10d/Backoff.hpp,sha256=fWJcanSOcWUawhDaHYfx9-P0WxZyb5Xwt06JQrHm8DE,1051 +torch/include/torch/csrc/distributed/c10d/CUDASymmetricMemory-inl.h,sha256=c0Rd3w4NKoOttridxoI_qDjuDaIe6oXVe6yt4m2sx9o,9230 +torch/include/torch/csrc/distributed/c10d/CUDASymmetricMemory.hpp,sha256=pbSwYo6RpPziKcl2m0bPCBRyHnSRZP_pkZp52YmNzg4,3116 +torch/include/torch/csrc/distributed/c10d/DMAConnectivity.hpp,sha256=e1XniWkUQKNXh_L04nbqxaJeFMWVPuuLQKjEJJ6PUmU,1289 +torch/include/torch/csrc/distributed/c10d/FakeProcessGroup.hpp,sha256=pyYQS2AQugAQ8M6NRDWWHE-Zx8qeUEDbGyisRBRMFq0,6309 +torch/include/torch/csrc/distributed/c10d/FileStore.hpp,sha256=T57ueYyvSnyRalBp2sZ51oVvMbE5QoTIiOllsNfzuUo,1492 +torch/include/torch/csrc/distributed/c10d/Functional.hpp,sha256=EJsKg5jbdd4WwbA6Rc3xrFw5B5hqGXYq-JQyUjMkQW0,212 +torch/include/torch/csrc/distributed/c10d/GlooDeviceFactory.hpp,sha256=sh1F0-6ykB0f58LitkS5CC35UgX4ts1cp2G_my6_V8o,773 +torch/include/torch/csrc/distributed/c10d/GroupRegistry.hpp,sha256=AY2m8qmBtcSvfHQLWn79oGfSPmDczounrM9eR6nX53A,562 +torch/include/torch/csrc/distributed/c10d/HashStore.hpp,sha256=Uo7fHZfd1C-LRGMCuyBfk9HyxFizWNWhatj7IllJyU0,1553 +torch/include/torch/csrc/distributed/c10d/NCCLUtils.hpp,sha256=_EOlry63GglfyGderSmgA8hAmZWSmPGIru8EIUTJGhs,27088 +torch/include/torch/csrc/distributed/c10d/NanCheck.hpp,sha256=zlLnH1phdNMzbPDd-R6_Q_ryRhOWtTYYz-I-u6tvzKQ,328 +torch/include/torch/csrc/distributed/c10d/ParamCommsUtils.hpp,sha256=wL6NQQOgtN3c_PPzi0mckFl2Ab_SywypN6JUKJolEn4,8753 +torch/include/torch/csrc/distributed/c10d/PrefixStore.hpp,sha256=eCZ4aJMUKgR69sADUByLy1l9o0Dov3wyxSLpfMVgW5M,1949 +torch/include/torch/csrc/distributed/c10d/ProcessGroup.hpp,sha256=kV1Wtx8A5kjBZBQjKbDu4P2aZjAAn2Bd-939QhUQs0E,28538 +torch/include/torch/csrc/distributed/c10d/ProcessGroupGloo.hpp,sha256=g9Cy5J2rANfJ51wcj7KkdwekJy8tJ5d8dX2DLKiaMUo,15677 +torch/include/torch/csrc/distributed/c10d/ProcessGroupMPI.hpp,sha256=K9SRHo5k52sl1T8emF312IoxGwqczsnip66_DwtoM9M,8649 +torch/include/torch/csrc/distributed/c10d/ProcessGroupNCCL.hpp,sha256=qG9zxiWYG2Kl3GKyoVHZ4N0VbhSbIwaTOrJ6X7C563s,46830 +torch/include/torch/csrc/distributed/c10d/ProcessGroupUCC.hpp,sha256=QTanDcv0ZFHp7LNwsVTFwm2viX79V6YodonMJvcOE8I,11076 +torch/include/torch/csrc/distributed/c10d/ProcessGroupWrapper.hpp,sha256=WoWJp46RZc8OEI1S_jcOiYC02QAq4VYZhUkTuwHQUNQ,5077 +torch/include/torch/csrc/distributed/c10d/PyProcessGroup.hpp,sha256=VsHzATOvjqC6tKzljLa5L7WVC1ovlUHSb6Nhyr_72kI,7888 +torch/include/torch/csrc/distributed/c10d/RankLocal.hpp,sha256=tG9YhmIjvYo21H4aCITvrL3XsHS2b4uEU4MKUG20hDo,2279 +torch/include/torch/csrc/distributed/c10d/Store.hpp,sha256=LJ4tVtL92wKfaSpfwTKDGwbZi7t3xi3yU6wFu5UN424,3570 +torch/include/torch/csrc/distributed/c10d/SymmetricMemory.hpp,sha256=wbgfLEGjiabKoXux43poulykOBZgVQ5hMi-iZGjSk_E,6802 +torch/include/torch/csrc/distributed/c10d/TCPStore.hpp,sha256=Sn9irAGU41Tr53VJ3NJdcKm7dmibfg0QaN_-O6cFTLI,3832 +torch/include/torch/csrc/distributed/c10d/TCPStoreBackend.hpp,sha256=g8xXzuOEW9iqV4aD6zzDCWQDXDbDYRcdg0e3lWz6r2M,1527 +torch/include/torch/csrc/distributed/c10d/TraceUtils.h,sha256=DToEpiBE7bCLKfX0uYV9M-ixJ4rAaLJswUKUht773no,10142 +torch/include/torch/csrc/distributed/c10d/Types.hpp,sha256=7tiAd6U_xqN-avPG0CdzDhBCCDpuKN-OVUUkpfh7XGQ,4867 +torch/include/torch/csrc/distributed/c10d/UCCTracing.hpp,sha256=t3jW1WV_VfbBnKvL-vJscb1D5oZDHiqIqUoO0AjRbw0,2301 +torch/include/torch/csrc/distributed/c10d/UCCUtils.hpp,sha256=lj2dmFih-AdPgLut2cIYKQwhASWm8u8pU0hIWAF8ktU,6360 +torch/include/torch/csrc/distributed/c10d/UnixSockUtils.hpp,sha256=_kpDQ1OrlMit7seBqBK6jRPDiyDs_S0a2lS4hBlVrPw,550 +torch/include/torch/csrc/distributed/c10d/Utils.hpp,sha256=8RaNxSG_8mnLH0WsbqvYPb4JyG9izrLbSHs-vwB53nI,22996 +torch/include/torch/csrc/distributed/c10d/WinSockUtils.hpp,sha256=Z1e98jBOcbOUdJo2e9Vzt9wcfkJFV3aW48UERrgSiKQ,541 +torch/include/torch/csrc/distributed/c10d/Work.hpp,sha256=_HNWcBEgHoUJT68mJP-QciMDmtX4nSmfftPebzvVl6Y,5095 +torch/include/torch/csrc/distributed/c10d/c10d.h,sha256=erw6jxQNqwf2IKaRhi9c6_x8sscxKKEFFunON2rMUwc,216 +torch/include/torch/csrc/distributed/c10d/comm.hpp,sha256=jTsD3fEuIZXJuXvlXXh9_1ayQJeKf3P7uIh9c6vBqts,4417 +torch/include/torch/csrc/distributed/c10d/debug.h,sha256=ihvgjPPnKL5CIZCt-b-sG9ZkGQOgR127o9j4JnzCZY0,604 +torch/include/torch/csrc/distributed/c10d/default_comm_hooks.hpp,sha256=yZm5rDul1jeAe4NzXlA3YWBgOLqQBMOQXKhX-7CXAjw,1745 +torch/include/torch/csrc/distributed/c10d/error.h,sha256=JLzBftaNhMRWU7kZSmqSgniRPbj2NYgg0FshAyguz5E,1361 +torch/include/torch/csrc/distributed/c10d/exception.h,sha256=YpfrCv_c9vxk2tI14aa5iHujuxWkCwUYg56v4oYHBj4,968 +torch/include/torch/csrc/distributed/c10d/intra_node_comm.hpp,sha256=9YD34ToTUl6Ft8P16SlolYF0-qb3x9j9cTYw08_LiVA,4349 +torch/include/torch/csrc/distributed/c10d/logger.hpp,sha256=uAMZoEx53uoD1-pDFmzR3rj0TFT2PkxQii9c0O604IQ,5175 +torch/include/torch/csrc/distributed/c10d/logging.h,sha256=WSP7f4iQC-gr0BLEUEEpCMq74ihWIr8ERvk3fo7_LX8,2059 +torch/include/torch/csrc/distributed/c10d/python_comm_hook.h,sha256=3C91AuADu-mZPOuxjNWxQywWOdp0X94yf4zbPjR3M0w,1072 +torch/include/torch/csrc/distributed/c10d/reducer.hpp,sha256=nmQVowSxOavzxVLFwS2yQFQffW1n32EriXiIRNzMVcU,25671 +torch/include/torch/csrc/distributed/c10d/reducer_timer.hpp,sha256=3QMeZJVsX8-LlAyTFaodzGfVXvw4caZXp9XSAsrNBVQ,2384 +torch/include/torch/csrc/distributed/c10d/sequence_num.hpp,sha256=lqOeKob1Pgw8lBcyXCKnHBSPwG52bHrNh09I9ntRmqY,1687 +torch/include/torch/csrc/distributed/c10d/socket.h,sha256=QKlmuMuTIBLoMulqyM9Q547UqIoqvZCaqjbHmpPJWxw,2487 +torch/include/torch/csrc/distributed/rpc/agent_utils.h,sha256=GnT_Z5VBQjr8MX54ju9oIuXlpk1O5jpK2y0aXQmbPUo,1679 +torch/include/torch/csrc/distributed/rpc/message.h,sha256=oaI15j4VSm5zxVTJOa_6FU3KyDO5FZ69PfGfBRjOoUY,7527 +torch/include/torch/csrc/distributed/rpc/py_rref.h,sha256=vRGei0f9FAjcmk1u-tBFG0cbCJgp_CSzhkELhQrIpIY,2974 +torch/include/torch/csrc/distributed/rpc/python_call.h,sha256=DUSuli81Ud5Y3ljN8u2brlP8uoDG2PU53FUajPCiuJ8,740 +torch/include/torch/csrc/distributed/rpc/python_functions.h,sha256=l7rvYxyH_FwklGlEXvb4eD4u56HAieoaB8RHHbOFWDc,2257 +torch/include/torch/csrc/distributed/rpc/python_remote_call.h,sha256=4owllmF2CL5M6UDbJhSU6y7Xjxf8x7NL3Msz8ag8lEo,1134 +torch/include/torch/csrc/distributed/rpc/python_resp.h,sha256=EruALdG3oMMI9g9D33twsUjWUKbxFmnXUpRs2_MuvXY,621 +torch/include/torch/csrc/distributed/rpc/python_rpc_handler.h,sha256=qI898C_Q5cHWNLnCD0C6hzCI1RjFfJumYGB0oRHxZRo,4954 +torch/include/torch/csrc/distributed/rpc/request_callback.h,sha256=7fB4tEeBfZycq0AeiqjtWILjEPYMgJaWEggGfLFFOHU,1226 +torch/include/torch/csrc/distributed/rpc/request_callback_impl.h,sha256=pQHwpJE6B5HqNKABznjZNSYX28pAeuqXE0x2q6rVJVU,2082 +torch/include/torch/csrc/distributed/rpc/request_callback_no_python.h,sha256=eHwBIITOXD88DkY89MVo9HRZu7qMWsdIHRyinKPP3CI,3904 +torch/include/torch/csrc/distributed/rpc/rpc.h,sha256=gw6OlbaNdLZo4MjtGUi-olfI73hC4MXOoXWWH0USZLQ,164 +torch/include/torch/csrc/distributed/rpc/rpc_agent.h,sha256=oCrvYY2hur8J43MYDARIMb7KVQbZHN8BKAL8iuA_bX4,13405 +torch/include/torch/csrc/distributed/rpc/rpc_command_base.h,sha256=XSAUXFKG_5WVxmSzfM4CzwAU0GpSA7vW3suz7Ytm7PA,678 +torch/include/torch/csrc/distributed/rpc/rref_context.h,sha256=xipzrInd-SMPA5_KFs5mEo7m4vJgahCJaRXBbMvyO7o,15768 +torch/include/torch/csrc/distributed/rpc/rref_impl.h,sha256=-lQt4vFerSa0UOXpHRaulG2ZlroCSOSmmv0v27OPTW8,16098 +torch/include/torch/csrc/distributed/rpc/rref_proto.h,sha256=nujIyovpcGE7titZrtqtpHAwUvr_BOo55kOYsRTBTK4,5240 +torch/include/torch/csrc/distributed/rpc/script_call.h,sha256=WLYSwcuhva02aHg1_qFLH_nz4Up-KZEEnzP7CBly_U8,2516 +torch/include/torch/csrc/distributed/rpc/script_remote_call.h,sha256=WV-k6jQJ_tjJzxgs_ZI6fyg16BLxctgIrubBrnuX73E,1652 +torch/include/torch/csrc/distributed/rpc/script_resp.h,sha256=hMiCaxdFPYYGYFOaRRNOd-tTvNrD1T36iv-7AkwJIP0,678 +torch/include/torch/csrc/distributed/rpc/tensorpipe_agent.h,sha256=ynvHLCnffE6kxa11ULBi8nYJ57_U-Zz5BaHlqVb4UD4,17430 +torch/include/torch/csrc/distributed/rpc/tensorpipe_utils.h,sha256=l4LeUxs-mqLa5cvLNrC3rXkmRWSU7EkhpC3Hqkm9SLs,4718 +torch/include/torch/csrc/distributed/rpc/torchscript_functions.h,sha256=zQxTYJt0NNJdX4HeNmMKmFpa5BYHMdg8LzkcCrQvkl4,1657 +torch/include/torch/csrc/distributed/rpc/types.h,sha256=rK1k6kYEVyfLqoOvrlw8RreR3lZNYWE7HKtaLIW-5Ms,1670 +torch/include/torch/csrc/distributed/rpc/unpickled_python_call.h,sha256=MKru-ZIYPTM3RUixZCRI0vHF3q6c1kCanosDJjm9UsY,1388 +torch/include/torch/csrc/distributed/rpc/unpickled_python_remote_call.h,sha256=4bPjObXNMf73qJnrW_n1JrW0YX3z_1feutLdAVbo0cg,1207 +torch/include/torch/csrc/distributed/rpc/utils.h,sha256=bpmEULAaBqiQXZ_om3jMieazn63WWndj-vLbzl_j9M4,3840 +torch/include/torch/csrc/dynamo/cache_entry.h,sha256=WpMqVaH4_dqf09Nb2b-v3nby70Tf7Y1O53D6zExg1Ig,1808 +torch/include/torch/csrc/dynamo/compiled_autograd.h,sha256=J0LXTAmwoeqmBEAWNZ07P5IjIe5CEkpF-ycBEWjPDgo,25193 +torch/include/torch/csrc/dynamo/cpp_shim.h,sha256=fnpkREuBeUxQBVmRTju2V-MjbxZr7kOtFOimeSV2sJU,478 +torch/include/torch/csrc/dynamo/cpython_defs.h,sha256=YSTaABJQR-8eCUvfUi4gUpsX2kRTZHs0gN7rK_ACNis,1050 +torch/include/torch/csrc/dynamo/cpython_includes.h,sha256=_JvYiestJl25tmHhCGt0RvvbisHMHkbRm-JDHKM4dXQ,1014 +torch/include/torch/csrc/dynamo/debug_macros.h,sha256=xLTX7IzEZTf--rhyiGDa1Oook_OsML4jXabBjzhwB3E,1607 +torch/include/torch/csrc/dynamo/eval_frame.h,sha256=E_GkS9vOzpIaTH15YpFAfw7Pf-q6JG19wf4R7um9-RY,97 +torch/include/torch/csrc/dynamo/extra_state.h,sha256=v4YGpTdkGiAxkgBrwHgjmBHS3hk0rg24irv7Xfd6OBw,4680 +torch/include/torch/csrc/dynamo/framelocals_mapping.h,sha256=-SYjo9iLMvQpS4xH6nC5gPzTf1GSqsiTpsuO_64bfHA,292 +torch/include/torch/csrc/dynamo/guards.h,sha256=twvNLfRbEYeEnZg_0afjUi3QqXrN79FWWSPhcQ552rw,2909 +torch/include/torch/csrc/dynamo/init.h,sha256=Z7mSy66v0be8l9dl_jSqHqxaK8Qqqxj0lnv_sbHSA1Y,187 +torch/include/torch/csrc/dynamo/python_compiled_autograd.h,sha256=dFWRhXaGLZEKOxIRw_8aJK_qTrhZkEf8iUOAmg2a7PU,215 +torch/include/torch/csrc/dynamo/utils.h,sha256=u3d9P9HoSgcqIikwFQcT--wyHY3Fa0KykZBAT48u1Pk,518 +torch/include/torch/csrc/inductor/aoti_runner/model_container_runner.h,sha256=AuBUuVoUD7NKPVg7QQwIXSe4hKHaVN9nnPm44PaRA9k,4066 +torch/include/torch/csrc/inductor/aoti_runner/model_container_runner_cpu.h,sha256=XgmR_Qd3JWNLa8vYG0sELcPbnfHbDNWpONlslyVSQfg,489 +torch/include/torch/csrc/inductor/aoti_runner/model_container_runner_cuda.h,sha256=T-t2cPRW9s8aeJRCk3tnq6GzrWGPzIpt3DRDpli3Krg,927 +torch/include/torch/csrc/inductor/aoti_runner/pybind.h,sha256=9qh4-fASrKhHGajRPRFIPbyarL0evSWaK-LeL2HaZf8,148 +torch/include/torch/csrc/inductor/aoti_runtime/arrayref_tensor.h,sha256=4Bt9KWZ_40V6Qol09nahNHt4n5AMEQWKqVUErUGlWwM,10616 +torch/include/torch/csrc/inductor/aoti_runtime/device_utils.h,sha256=fv5rFXUkRoU8uAEUAwIAbbc1w7WLiTdQ1-QFyIa00QA,1528 +torch/include/torch/csrc/inductor/aoti_runtime/interface.h,sha256=5CfC69ajsaTCtlp-IJNmdrkk6C9HSIiLMujcgv-hWhw,7549 +torch/include/torch/csrc/inductor/aoti_runtime/model.h,sha256=d9a6ycEet7E7Pn46-_26fiZnLad0cANh5sULqHAsVso,19024 +torch/include/torch/csrc/inductor/aoti_runtime/model_container.h,sha256=u1BgIbx_RhjDdxGBYVbrMywXSaNG3mLclA4L-zbG2gs,18563 +torch/include/torch/csrc/inductor/aoti_runtime/scalar_to_tensor.h,sha256=m_kAY0PxxfKue0KqrUCFsBmC5cNrFUqxk_JWAwDJ5KA,1434 +torch/include/torch/csrc/inductor/aoti_runtime/thread_local.h,sha256=HqZLPg6j39QLgB9za6bulfwtbx4FYlQW2_24kArRaPg,4226 +torch/include/torch/csrc/inductor/aoti_runtime/utils.h,sha256=IUNmQQBhTkC0Ymo-qvrVd2-LPop70xWw_ZIjEOY4Edk,4965 +torch/include/torch/csrc/inductor/aoti_runtime/utils_cuda.h,sha256=yZotqyNACdBkM44JRRPw5-Yo8sCv36BtTftVg_mccIY,1733 +torch/include/torch/csrc/inductor/aoti_torch/c/shim.h,sha256=vWNG5lrBMGVMT0mECsYYwnfLbgYtlaPRiaenu-PISwM,24808 +torch/include/torch/csrc/inductor/aoti_torch/generated/c_shim_cpu.h,sha256=230hdXa45-M95QcfFOpoQ7uNKT1godSSC3EwSeMCgXY,25958 +torch/include/torch/csrc/inductor/aoti_torch/generated/c_shim_cuda.h,sha256=2ucd8zciynQqquE4Gxoeya63yPVFJk2zqT-uURUYo9A,31276 +torch/include/torch/csrc/inductor/aoti_torch/mkldnn_tensor.h,sha256=OkME3dFfr67cGRDB7702KRUIA1T_edv6_pd-50nb2Ps,371 +torch/include/torch/csrc/inductor/aoti_torch/oss_proxy_executor.h,sha256=4J2jgjXJNedHlf8ff8fB0MLE9BZrSDgPb-27K0IHoYw,2860 +torch/include/torch/csrc/inductor/aoti_torch/proxy_executor.h,sha256=Gy2JJroE1MUkL7L8QCMtNc9yRZKXM_Pefr27fP6Grr0,483 +torch/include/torch/csrc/inductor/aoti_torch/tensor_converter.h,sha256=OCZfVYzYTZjZDsGnnXFO8aOsP8CKI5lcXhQmC0J1U-A,985 +torch/include/torch/csrc/inductor/aoti_torch/utils.h,sha256=pAC61Szr0DOp_Nl7LldPZD6bmvXtkCOByDTaE8IEpmY,5906 +torch/include/torch/csrc/inductor/inductor_ops.h,sha256=HvweZA2MvxHR5BHI7YbsAvHN0ZEOaCvKJYTjEpdwdOU,1095 +torch/include/torch/csrc/itt_wrapper.h,sha256=eCw9u1gl7vzf92sSxqO7WrEc0rhmVBW5cydUc2r_c3k,320 +torch/include/torch/csrc/jit/api/compilation_unit.h,sha256=ZguKCbjv7RV5vsDRyaMkSvDuZtrix3Wv0VWjBlaMMKA,11697 +torch/include/torch/csrc/jit/api/function_impl.h,sha256=brARpK4Hs-vD6A_ekTL6pMIUImVKZTK9wuRUJ73M8qs,5645 +torch/include/torch/csrc/jit/api/method.h,sha256=sS0T0BQZ4ZZiO0lGh5xdmsW_Q3vIFaGUNWH0O8770BA,2370 +torch/include/torch/csrc/jit/api/module.h,sha256=I4X7yMZYVISjI0yjEhdH4LElJSPwN60eE7HTA0IvoJE,23545 +torch/include/torch/csrc/jit/api/object.h,sha256=K63_KwB67PpQxBkbSFIGf7Em0plBS4y3whiVFjhsyR4,6063 +torch/include/torch/csrc/jit/backends/backend.h,sha256=xDYuoy_lXkz4x7kY0SaBVjG5VFun4DcW3X7m2LLPVXA,4060 +torch/include/torch/csrc/jit/backends/backend_debug_handler.h,sha256=woz0f7Pjk6htyfyrnTOdsqb4xvh-CYXocUcIzqZlzGE,6356 +torch/include/torch/csrc/jit/backends/backend_debug_info.h,sha256=dLArTRoaGulvfZIMzDV6iw38zJCJLbJePxPcW71YNLA,2338 +torch/include/torch/csrc/jit/backends/backend_detail.h,sha256=3Vwqochwvvj1Hx_gOf2fQX-tIB-LyLfQV3d9bVffXFI,1104 +torch/include/torch/csrc/jit/backends/backend_exception.h,sha256=TkQle7sRjMI5lqbkadTp9Sme_CyI8iMWJyQSOPK0mDw,2085 +torch/include/torch/csrc/jit/backends/backend_init.h,sha256=oO64G0Ay0hdQtUeGSgXccDAWxSZ49bw_0wKNahGjM-A,277 +torch/include/torch/csrc/jit/backends/backend_interface.h,sha256=hMNGdVeLdZuOSTPMlTlqoT1hnWrq7UuwsOLcGr4jp_8,1184 +torch/include/torch/csrc/jit/backends/backend_preprocess.h,sha256=VfwbX1X0kyR50iKhCqbsdrp8a43AYaEMpIC6VxD51DA,438 +torch/include/torch/csrc/jit/backends/backend_resolver.h,sha256=fcuqo_lthD5E7OM7nY57kLXUNzMTiMcAe0GPpr1L9qE,277 +torch/include/torch/csrc/jit/codegen/cuda/interface.h,sha256=wT2NC-AX4H16CeKIs8moB7revz07jSlZ8LtaoJ13xdU,1929 +torch/include/torch/csrc/jit/frontend/builtin_functions.h,sha256=N9lLDDymkcH6Da5WNOcr7RL-gu-yv4y2u56n1tLwLOw,215 +torch/include/torch/csrc/jit/frontend/canonicalize_modified_loop.h,sha256=FK3cjZ4Dp4FwFgHDW6XN7GfYgeIv2L_Z2P4V572Ew4w,287 +torch/include/torch/csrc/jit/frontend/concrete_module_type.h,sha256=5cH-x2pChi5b8zJW-47IkwNNA3kxxqdE92zXIs0JPPM,9037 +torch/include/torch/csrc/jit/frontend/convert_to_ssa.h,sha256=lbJSrtkGjBiipyC7GDSd8a4iw_TDOE4aC0nhXpXU0hs,302 +torch/include/torch/csrc/jit/frontend/edit_distance.h,sha256=JCUWjpEFwXDYl48nOdCMyZ_3IGUP2M9LoCjYvc3Azes,229 +torch/include/torch/csrc/jit/frontend/error_report.h,sha256=KYK4IrnyVl9-dEakj1Ap153X6XHq_KSAQ-ZAuZxJcVk,1436 +torch/include/torch/csrc/jit/frontend/exit_transforms.h,sha256=S0lwPjF5_06Niw5HYQVnhMwsv-c0ahnflhPGzPrMdbw,193 +torch/include/torch/csrc/jit/frontend/function_schema_parser.h,sha256=rGadv7t5fd1mmK3tIJWdCUqHJYtJh8krExvzyab2QnE,804 +torch/include/torch/csrc/jit/frontend/inline_loop_condition.h,sha256=jctM26T7Cxk6xtthR8arllVR0H-msVzR6jcQ5WKX5Nc,327 +torch/include/torch/csrc/jit/frontend/ir_emitter.h,sha256=bWP7TwoYtdRDx4AI4e8jeBGTwOxsYdSmnAja4NioybE,516 +torch/include/torch/csrc/jit/frontend/lexer.h,sha256=gK_ex90RvGE11E3DGqv_MgElIAg9u3le8eiVjNiDPxA,18932 +torch/include/torch/csrc/jit/frontend/mini_environment.h,sha256=crwmw6iPa-TzpfsL7u0ylAal7Kj8EZkOx1pxGLGqRWg,1375 +torch/include/torch/csrc/jit/frontend/name_mangler.h,sha256=zAYp8zADBY1NzqeBU5GQY3Bgx9w-vJu1d6OMT8ZFZQA,630 +torch/include/torch/csrc/jit/frontend/parse_string_literal.h,sha256=qn2aHuN4QGPl9aT9SmJ3NIYzvGvJKNkFbT6Jaikm4Fg,2294 +torch/include/torch/csrc/jit/frontend/parser.h,sha256=7r8AmpY3GJrlv_czDl1WGF-cJbeQW-RjnwxRp5dhC8I,649 +torch/include/torch/csrc/jit/frontend/parser_constants.h,sha256=P7gjNKCi3sW3PKmaNc4yqXiXLhYRoHAvbxQowN-Z85w,137 +torch/include/torch/csrc/jit/frontend/resolver.h,sha256=g9m8n-yNKYP4YErU2WJlknjp4h-jFY9mcNeCeZpWUjg,1957 +torch/include/torch/csrc/jit/frontend/schema_matching.h,sha256=Nq5izZL-h6NS2AUXSEd30-gWwv3xMjEQs36AaYCy6-8,2108 +torch/include/torch/csrc/jit/frontend/schema_type_parser.h,sha256=hfRMpl6HdaN-pFVW0kccuHdcULUPGFWfcWrscz65dso,1201 +torch/include/torch/csrc/jit/frontend/script_type_parser.h,sha256=P9YeUXXqqkTbnBXFUOpxKtY2hfOqd4x2mwV1DPxwDr8,1580 +torch/include/torch/csrc/jit/frontend/source_range.h,sha256=SGw2I1PJyGm-HsAIo_FMNf9uK6NA4QlZTkeeVEN5aZI,12839 +torch/include/torch/csrc/jit/frontend/source_ref.h,sha256=VgA3KBnZ7lm8xNVcgTugwBQEoIfQrxL-y2Kd1ulfSVo,1288 +torch/include/torch/csrc/jit/frontend/strtod.h,sha256=Vpx33Po6jFb8YysqU5b64P-1Y6HBKvuuhdgRZz9dD9A,216 +torch/include/torch/csrc/jit/frontend/sugared_value.h,sha256=3WfRvjoOoq9C2XS8-C4JTCq8fM58n6x3vhwUQpdAhj4,27902 +torch/include/torch/csrc/jit/frontend/tracer.h,sha256=0MzAeecmnfek7K6w_AVrcW4BjdVCRE8Az1OJuABAuBw,12730 +torch/include/torch/csrc/jit/frontend/tree.h,sha256=2BoWIu756sTFTqrdcZgUSSKprkRnEBshBOEkb2ywVc8,6599 +torch/include/torch/csrc/jit/frontend/tree_views.h,sha256=RHk_vwV_bSub6mGsz1NLoeXfOtEv8D5QyRQ4r9_H8-c,37139 +torch/include/torch/csrc/jit/frontend/versioned_symbols.h,sha256=tLssSRG7fdJ4BgcZ2sUqSUBOYY-TNYbZXx1bNjCiGhY,595 +torch/include/torch/csrc/jit/ir/alias_analysis.h,sha256=_y261a7NixjCrs4ZKKZsILecjYFeTrvG03PuxiVfObA,12832 +torch/include/torch/csrc/jit/ir/attributes.h,sha256=UabCM8WHeFZ1Y4gF8qCnWFPjHj53ZoTQh_-W-RM-b4c,4904 +torch/include/torch/csrc/jit/ir/constants.h,sha256=gtzaew4jNBbirPIy1vj4nwlVI9Pn19ph2DTu_X9LqD4,2008 +torch/include/torch/csrc/jit/ir/graph_node_list.h,sha256=xfVDMfQRcyU5JSF0wgrU2_-nwopQhAceRUJDfRquTKQ,6354 +torch/include/torch/csrc/jit/ir/graph_utils.h,sha256=HxQcilBWyFeP0H1SG1IAbSju0OcauktL3N72rtteEcM,504 +torch/include/torch/csrc/jit/ir/ir.h,sha256=prhGPSu5qiAMrviIR4yKcDxkEJ5MuhAvaE0e306LG9Q,54186 +torch/include/torch/csrc/jit/ir/ir_views.h,sha256=KedK8xniQe7Z6i5TFZbPZHJtUG-NWX16ooyhTYEx2DY,4623 +torch/include/torch/csrc/jit/ir/irparser.h,sha256=_Z9wSbbRPtJGO5esO1eskSL7gb29rYg3w90YM1rLUSc,1096 +torch/include/torch/csrc/jit/ir/named_value.h,sha256=sONGq59HHPd4lL8E_LbgWscQMFBM1_9WF837lklVAzk,2396 +torch/include/torch/csrc/jit/ir/node_hashing.h,sha256=e7BcYSSzRUdHouCPuHwoudhE40CNJxJ7WBxymUldYx0,265 +torch/include/torch/csrc/jit/ir/scope.h,sha256=05-wkq6x6su8FVbkG-s6ZEDfgENBFjOsNXPa6tbpGlM,7148 +torch/include/torch/csrc/jit/ir/subgraph_matcher.h,sha256=XTz0-hJJuX17RZE_-t0o8hCbPf7OUWlL5NObCjw8wRs,3126 +torch/include/torch/csrc/jit/ir/type_hashing.h,sha256=9nytJGHdiD4rJsWZc-rD_IU8PbKvBChPHry-SsRv1qE,434 +torch/include/torch/csrc/jit/jit_log.h,sha256=AnnkSiZH0NhggkbmGNbqo-_T6eVf295o_jhMDaT6hv4,4773 +torch/include/torch/csrc/jit/jit_opt_limit.h,sha256=XJpbnGQnkJtquSPeb7x0KduePq5nER1diRowR5s3Khg,1381 +torch/include/torch/csrc/jit/mobile/code.h,sha256=En5_S6Bro3ljAHuxfjVD74WArspp_P1pLD0NwOAPsj4,1078 +torch/include/torch/csrc/jit/mobile/debug_info.h,sha256=asNWlcmEwER60l_6g5nJD4izXdAcogTX4loA-QGEG5c,2205 +torch/include/torch/csrc/jit/mobile/file_format.h,sha256=oWiWOdIO3UCbR-UA0M6kC1gi5CWrbKJIKL8_r4ss4qo,6597 +torch/include/torch/csrc/jit/mobile/flatbuffer_loader.h,sha256=JLk2cOwGWU4WxiQwgJkgWa9fbWwmFbTJGb2Lx2EyOro,4963 +torch/include/torch/csrc/jit/mobile/frame.h,sha256=mZ01u-2-cRV5hCBHVEPZ1m59SWudn1v5RY_lLKl0YFI,811 +torch/include/torch/csrc/jit/mobile/function.h,sha256=AeJi_59lhezOHPCMwhtGY2_DgXGPCxG8FFMcRmY8jdI,2900 +torch/include/torch/csrc/jit/mobile/import.h,sha256=EmILgMSBf0Nh76yKNvdD1fkZk1Yw7DpbTCEwq8FSbv4,3840 +torch/include/torch/csrc/jit/mobile/import_data.h,sha256=rrwfgQfByxiy-sLwD39YL43UQ_-DvMDWv1KUjV765Xc,995 +torch/include/torch/csrc/jit/mobile/import_export_common.h,sha256=Mn3lS3DQL0enRVC9MzgXDl5ZQQhyUlthe4-1Hr9yuGE,477 +torch/include/torch/csrc/jit/mobile/interpreter.h,sha256=rRURKeJqZ3rN3yk72s8lSW2yZigCN3DdRC9b-HF2HAM,638 +torch/include/torch/csrc/jit/mobile/method.h,sha256=PxGPyD6Ob6ic-D8gyrwjosvKLTpfD2aK2_4WxbXB7Rc,824 +torch/include/torch/csrc/jit/mobile/module.h,sha256=EbPRDu1mzN7PWBQo_Fnu7pBSv6r6LGdiynSW1NFAinQ,5921 +torch/include/torch/csrc/jit/mobile/observer.h,sha256=GhQYLhyUPbiIkVzBO2PcZRyo44G8yIoVwtLEzZ5iaLY,3637 +torch/include/torch/csrc/jit/mobile/parse_bytecode.h,sha256=vEgtBoHpKaQ0YfR3CznfZIy5_NZQqK34bMlva0r-hXM,740 +torch/include/torch/csrc/jit/mobile/parse_operators.h,sha256=KErayQj-WQw2Q5wBA4Bxy71eZscQesCOLNGvlpd9C8s,709 +torch/include/torch/csrc/jit/mobile/prim_ops_registery.h,sha256=PASxNKpx71Vx2SbToMG_NCujQs4dMvQIQIgdiG379kE,595 +torch/include/torch/csrc/jit/mobile/profiler_edge.h,sha256=G0cmlkQTIjYi-eHDyksRLlzt1jfDcUIA3UnuXPDnWLY,4488 +torch/include/torch/csrc/jit/mobile/promoted_prim_ops.h,sha256=H9ce41Qp8BFZIGNFQbhHAx_60pb3ji0YgrSwKbwJR48,1043 +torch/include/torch/csrc/jit/mobile/quantization.h,sha256=GkNn9-1ZofH63_IP9Dj6K6zE3broXIey1v9uP8uYT5w,1238 +torch/include/torch/csrc/jit/mobile/register_ops_common_utils.h,sha256=-IoUdDv2f0xWSNWnllkmcyQbReTLtsbbNMBczzDk7ws,1658 +torch/include/torch/csrc/jit/mobile/type_parser.h,sha256=SHCUMNas3tT6nlP66FwPGAoXo0rYsUKhaLCVaM7amqg,1443 +torch/include/torch/csrc/jit/mobile/upgrader_mobile.h,sha256=X7HNrKi5kXMm5YOc3YwN3mFnJloaPrIcAUVYTybR4Nk,905 +torch/include/torch/csrc/jit/passes/add_if_then_else.h,sha256=SqzsU8rP9F3Vqay5CWuT7YaLtmDnhP2kJDdgAjR2Ws0,163 +torch/include/torch/csrc/jit/passes/annotate_warns.h,sha256=fPa2pkQCGnlzw-e9q5rk3diP-c92wlfpcCFxmZpo5zQ,167 +torch/include/torch/csrc/jit/passes/autocast.h,sha256=LDml9aHIr81KJeXT0sQvKWF51QY3idDSvsTQo6guAX8,242 +torch/include/torch/csrc/jit/passes/bailout_graph.h,sha256=ad8Y1rPYM1wAQdoqnBtCQKfB3y6PXRqRay3XCtO6t0E,1090 +torch/include/torch/csrc/jit/passes/batch_mm.h,sha256=48oVZGLKM07Co5ACKE3rCiPU-L5j_T0-WBrCjuiPYFk,131 +torch/include/torch/csrc/jit/passes/canonicalize.h,sha256=KdpN14lVQAASxOPl3mUsYAICqytdFaJw3D-3TivkM1k,467 +torch/include/torch/csrc/jit/passes/canonicalize_graph_fuser_ops.h,sha256=0KZ8gzL3zNsxdMcaKn69fgjmQacBjDnoO9LZUdPNozA,145 +torch/include/torch/csrc/jit/passes/check_strict_fusion.h,sha256=q-liogijqECywbsOAZ5szYGRp1XUbmGJWLIg3ehgqSU,166 +torch/include/torch/csrc/jit/passes/clear_profiling.h,sha256=GpqR36A1Ins8W2FLiDvW8rn8WzPH9t4-skIeQg8KlVU,468 +torch/include/torch/csrc/jit/passes/clear_undefinedness.h,sha256=eWi0gIwFbplRZTeyq_Ibvkwv2BYjhBFZBmP42vLv9J4,850 +torch/include/torch/csrc/jit/passes/common_subexpression_elimination.h,sha256=BGrSGphxkxrEdHVVWj1A0RhMRh5aX0Y2-o4HQyyWDrg,162 +torch/include/torch/csrc/jit/passes/concat_opt.h,sha256=2KVY2Y3TMdWBcyS_cvuENvq0yOMCjF0XFjD7Ds_QP2o,525 +torch/include/torch/csrc/jit/passes/constant_pooling.h,sha256=nF_RzkunPKRtM0PxSI8KC-_CKWvY_h_nnd9I5oS19m0,145 +torch/include/torch/csrc/jit/passes/constant_propagation.h,sha256=l_9XDRrWqiKKBO2D7CCXfmBSjdz2k5rgUdraw1doHXs,1289 +torch/include/torch/csrc/jit/passes/create_autodiff_subgraphs.h,sha256=5kVblBhnjxqtWX5IKevPTrdOWoI3vn1oIIcPifFh66c,513 +torch/include/torch/csrc/jit/passes/create_functional_graphs.h,sha256=zK8VjkR7kBYtphNK1XlAlRY-hpW8IJK6-pbBbbAdWrU,284 +torch/include/torch/csrc/jit/passes/dead_code_elimination.h,sha256=m9NaVt10OXyfTM6pOrV49trrF9TveUr_RD_MWdSk-d8,1559 +torch/include/torch/csrc/jit/passes/decompose_ops.h,sha256=_8xtoLSnuTFCKsC3ygCm0sCv9ySqufbXClFfRYGI14U,136 +torch/include/torch/csrc/jit/passes/device_type_analysis.h,sha256=2a1paFDp2VO8LAu2XFNYzFyRtuxTn2ESkVOvOnnGa1w,242 +torch/include/torch/csrc/jit/passes/dtype_analysis.h,sha256=B9T6BLHBhnfRrCG_tGWrWlf3IkiD7aCzfjx1IA-6THk,389 +torch/include/torch/csrc/jit/passes/eliminate_no_ops.h,sha256=ntVyB4-DqePQ41UUDtANSL-26sOfU_BOL1z4nBSNjHA,492 +torch/include/torch/csrc/jit/passes/erase_number_types.h,sha256=LtBRyI6TMzIXTgA2WGQDkCirBNAl2L0K_XT_Od0eUyc,788 +torch/include/torch/csrc/jit/passes/fixup_trace_scope_blocks.h,sha256=siqmFD6fqb6r4w40g3oDqBsgNb0Dnjw609ZyaSVb8LA,1648 +torch/include/torch/csrc/jit/passes/fold_conv_bn.h,sha256=f8AqfbcNSwB4W0BdEVKumrwbWw_P1NDKyDyeIjb5ZyA,970 +torch/include/torch/csrc/jit/passes/fold_linear_bn.h,sha256=qzyUzlYV0zNOckcwDT_YaSqsYuKcBh6ZY3b7rET2VIo,666 +torch/include/torch/csrc/jit/passes/freeze_module.h,sha256=nFx8e_M2AjJElhLpfa47FeDmOIhYVZb0Pz23xH1rAJY,1219 +torch/include/torch/csrc/jit/passes/frozen_concat_linear.h,sha256=wgVhANKP7mWQXpwL5WqEklkUXXXG29XRSFy3-ksbmiU,252 +torch/include/torch/csrc/jit/passes/frozen_conv_add_relu_fusion.h,sha256=x_VBteq6eOYHonm-J4nMrfWcj8cBPTs77NuCA2oZTH8,304 +torch/include/torch/csrc/jit/passes/frozen_conv_folding.h,sha256=8Kt-Wf-I8P0tu6Nm5iwQdm6-vEQf2uPLseIonSXpbt0,847 +torch/include/torch/csrc/jit/passes/frozen_graph_optimizations.h,sha256=cufZDZ87-yO-38nNYwVAmmiW4QDlBKjscZinyQJDTjQ,441 +torch/include/torch/csrc/jit/passes/frozen_linear_folding.h,sha256=bpFNgW4uQFGhLuqqo-u0V-ecigUdcj4iS8xKplozeKw,345 +torch/include/torch/csrc/jit/passes/frozen_linear_transpose.h,sha256=83eH-ogd9UGm2cI7Y73Zwwvt9vmEgqLK3dp5FWa66Sk,261 +torch/include/torch/csrc/jit/passes/frozen_ops_to_mkldnn.h,sha256=l5fGupNo6vpH28iG6m_LwbLLVUy6ur1ubSGDGFAnQ6k,389 +torch/include/torch/csrc/jit/passes/fuse_linear.h,sha256=ifSEjZDMEgrzwlIWc1LymM3PIXKIZLchfaWTT9U5cGA,743 +torch/include/torch/csrc/jit/passes/fuse_relu.h,sha256=W1Yh8ZdLNfYHjNA7q1P_U6ZQSHzkx6DdSutcf_Et-wk,248 +torch/include/torch/csrc/jit/passes/graph_fuser.h,sha256=PlrCfCFXusmx71htAI5tcwJFBTEI-n5bnJtbWORxBmo,1226 +torch/include/torch/csrc/jit/passes/graph_rewrite_helper.h,sha256=r4QxNlqBRsvmIlli5Wp01br1cpvlT1T-8AF-hTsskBM,1735 +torch/include/torch/csrc/jit/passes/guard_elimination.h,sha256=ifoI_eEFuQzmsLr0Fg-TOuPcT7_4bpDzfIumVVi5yjg,351 +torch/include/torch/csrc/jit/passes/hoist_conv_packed_params.h,sha256=1FbeHqN5nWKU4JwX6UvJ-tpyoDqn_ZnMJnrRjxgAFuQ,186 +torch/include/torch/csrc/jit/passes/inline_autodiff_subgraphs.h,sha256=xOn17EMKsN9kTjDMq-nerxssJiTsplQDFaZjxfvYQpM,250 +torch/include/torch/csrc/jit/passes/inline_fork_wait.h,sha256=9C3SsJok6HcxmZ8EU9E1kkvekx6GBBIJt04Lizu5ixQ,522 +torch/include/torch/csrc/jit/passes/inline_forked_closures.h,sha256=HtmSYANlcwPCSg6CChxfZed_rbLhKhwMmM623CzYN-A,202 +torch/include/torch/csrc/jit/passes/inliner.h,sha256=lM2XE7KkZLwhPz6Eq0e20S2cxLdWkkZWTZ7raId2eUQ,229 +torch/include/torch/csrc/jit/passes/inplace_check.h,sha256=GxWaSePuolrvA_MNjy-HQYINuXX8VyrJOw1OXMEXkTc,136 +torch/include/torch/csrc/jit/passes/insert_guards.h,sha256=GtpucbqQnAfUNZaNk2XdP83qmg6N_tJqamW26gi5OOA,414 +torch/include/torch/csrc/jit/passes/integer_value_refinement.h,sha256=f7sxiTI0QQfQHfJWrzDuPk7z-s25PLWvTu69ntOQIl0,209 +torch/include/torch/csrc/jit/passes/lift_closures.h,sha256=tDmxkDUMgSKE-icmGwNVngQR37fSuSmgkCG6d_m-xV4,197 +torch/include/torch/csrc/jit/passes/liveness.h,sha256=kkQ_gJdjAI5UfHyo-p7D2iLmbBzmeG6utM--OfjZoR0,625 +torch/include/torch/csrc/jit/passes/loop_unrolling.h,sha256=bWg1UxAMJJI8DYH4fKxAoIZH9AYlEFOzfuhISqj8mD8,981 +torch/include/torch/csrc/jit/passes/lower_grad_of.h,sha256=2plAYjg7fEESZf8MSb9NXAVWWynf2bX7PRZ54pL6piA,323 +torch/include/torch/csrc/jit/passes/lower_graph.h,sha256=7tiixKEGcAhiCA9WtGitm2NIRFNVfTWPvZXSX37d-Z0,725 +torch/include/torch/csrc/jit/passes/lower_tuples.h,sha256=l8lohwGRY2iri9rt2VEYa1RzCGRC1hJWAhxlQrm3Mpo,641 +torch/include/torch/csrc/jit/passes/metal_rewrite.h,sha256=gI23EmH1oEFnwkNqNzcz5figxog77K6mxYYzeoeAECM,581 +torch/include/torch/csrc/jit/passes/mkldnn_rewrite.h,sha256=P3NmShFkexY9WpJcIN2BEYPrGbiJtcEAUOrxfcESLZk,605 +torch/include/torch/csrc/jit/passes/mobile_optimizer_type.h,sha256=FkImh2z2c2osMwsZ-dCA8wQysofiETWhhZMbcDg54Ek,237 +torch/include/torch/csrc/jit/passes/normalize_ops.h,sha256=WGodCS7W4mE-iuNWeg9auaEb8N_qw2_Q13B5LkX1bjI,511 +torch/include/torch/csrc/jit/passes/onednn_graph_fuser.h,sha256=TjR7kSM7pDJqEO3ce2ohPkXYjtKBnWdZscj8HucbLqw,1399 +torch/include/torch/csrc/jit/passes/onnx.h,sha256=qvFTNGzfgk1Qg_PjQCK1JTKJBiVja4OxSO1BFago1xM,832 +torch/include/torch/csrc/jit/passes/pass_manager.h,sha256=dTTEaB1MO0S3Dn2j6RSksfUb8Zn0rXWIjjwl-Di7k6U,4549 +torch/include/torch/csrc/jit/passes/peephole.h,sha256=TcAnx68gFYsDwg5KltP7-VrlmHWISj2VvVaRTNPy9Q0,482 +torch/include/torch/csrc/jit/passes/peephole_alias_sensitive.h,sha256=TgzvCyJ89Vqmj8uogJHuHeWHkho4pNdOmIyAXzdX-Ps,410 +torch/include/torch/csrc/jit/passes/peephole_dict_idioms.h,sha256=GVUd3kCbCfg1KzJbevwHJyRiqWV-ib6MI0J0DURz1yM,975 +torch/include/torch/csrc/jit/passes/peephole_list_idioms.h,sha256=l4W3EHa7glMKZkvJ-vyjrjvugUqd-CZX3F4Gfqfi8z0,1978 +torch/include/torch/csrc/jit/passes/peephole_non_tensor.h,sha256=NMWReecnowP8yShiTl4-_atPfpkDCNTRZBKGLdLYdBI,317 +torch/include/torch/csrc/jit/passes/prepack_folding.h,sha256=h-aHTSMo1X3R5_8g7g8joc1KKnsxFGZeIbaDjBNks4k,333 +torch/include/torch/csrc/jit/passes/quantization/dedup_module_uses.h,sha256=BO4QlaPSdH1I37kIrdUi88CDoLDm3MYaKY-7YjMFNGI,826 +torch/include/torch/csrc/jit/passes/quantization/finalize.h,sha256=QEDufBsyjHdBm0rYP-yTAIQi7sRdRLUAEc0UEWMmUjw,2321 +torch/include/torch/csrc/jit/passes/quantization/fusion_passes.h,sha256=uDbKWQfLMcdtJt4H-1h-V4JCT8mKzKx6cAtxb5FtxkU,191 +torch/include/torch/csrc/jit/passes/quantization/helper.h,sha256=fSrqDAU6DMqydDfuy_tUTqAW4g2_WTHbb7qk5kSJyuI,7484 +torch/include/torch/csrc/jit/passes/quantization/insert_observers.h,sha256=7z-o9x_nyG6_pgqnmsUnIWNP7dC8QvDUIgV0LtgPpJs,2351 +torch/include/torch/csrc/jit/passes/quantization/insert_quant_dequant.h,sha256=LHatNCh1b6fas9UPE0x9UxoAGJyoEWi1rxdSat6Olys,1450 +torch/include/torch/csrc/jit/passes/quantization/quantization_patterns.h,sha256=9jqgw4ptf0-ZoNlV-iucRJTO8TRE4An-6u8CLEM-380,53727 +torch/include/torch/csrc/jit/passes/quantization/quantization_type.h,sha256=UDgAy32Y3JJQUhZYoYo_qCA__SUbIdUThbBdQqGfOPw,358 +torch/include/torch/csrc/jit/passes/quantization/register_packed_params.h,sha256=XbD6rCZJx8Df4z6XnY-C1M4wNdaMOLuwxJCf63Jd2nA,514 +torch/include/torch/csrc/jit/passes/refine_tuple_types.h,sha256=Jux8D7hOzk34w5wP7svDq5arMN8xOz4A_M2Zl6E4VJI,242 +torch/include/torch/csrc/jit/passes/remove_dropout.h,sha256=KJkElpJYo7RIVQWOu0G0WCSIfR7WoeE7M_wgAnxPrZE,255 +torch/include/torch/csrc/jit/passes/remove_exceptions.h,sha256=SEB_5z3U_AHKWJJEsvAbOJAhPlarOnPt0kX68fnDq-A,929 +torch/include/torch/csrc/jit/passes/remove_expands.h,sha256=lutROSrK3K-Gy4Qh6oOXS33_-gTQRU119nGxRS3WiG8,143 +torch/include/torch/csrc/jit/passes/remove_inplace_ops.h,sha256=TIJotciLbSDLMNi7zrVAuRpD-XQKXNtlZovbxEtaGvY,271 +torch/include/torch/csrc/jit/passes/remove_mutation.h,sha256=1XDTabWwOaTULo96tzWZ68_J9AmF5mzu-G9z_oD0Yag,2637 +torch/include/torch/csrc/jit/passes/remove_redundant_profiles.h,sha256=1loSYnsacPiKPMBqt1WbzrwW6qPAyf-FVSz9cfUz70U,237 +torch/include/torch/csrc/jit/passes/replacement_of_old_operators.h,sha256=iGGFQbsPevD6vlwTk2dIlvbYEzrUJOY2NRqU9btVKgc,434 +torch/include/torch/csrc/jit/passes/requires_grad_analysis.h,sha256=GRWqIyC00uXNC3gAfrhjAS-5hk-qp71glZgns900s-8,221 +torch/include/torch/csrc/jit/passes/restore_mutation.h,sha256=iRuv29JV5ock1gZzIgUgsGVie9Ir9lSuQfd27FjZVEQ,1801 +torch/include/torch/csrc/jit/passes/shape_analysis.h,sha256=C27liVgubg0g1TkLxAtRX1I3GGrgPyYKcpsrh_NoDI0,1077 +torch/include/torch/csrc/jit/passes/specialize_autogradzero.h,sha256=pkBLCR8ZA3_uYb1Ptn7uByaAW_8CXsh5LoJJpGFDUbw,631 +torch/include/torch/csrc/jit/passes/subgraph_rewrite.h,sha256=Xv10XCG8o5cLPg4zzRl0ml8SEubnzo_lxi1nchuOUFU,4087 +torch/include/torch/csrc/jit/passes/symbolic_shape_analysis.h,sha256=4EELltaN0oxAI2YOh0kvYZGHUAj9wkanqMXCHOrLvpY,2078 +torch/include/torch/csrc/jit/passes/symbolic_shape_cache.h,sha256=qY3pwISkYPH68C9I5fxPixIT4qxixyqooZkUW45SyEU,1574 +torch/include/torch/csrc/jit/passes/symbolic_shape_runtime_fusion.h,sha256=RvR8HJfziaAAxgdSb6umHJfDdiKKNwoFd6ISGIInqk4,2339 +torch/include/torch/csrc/jit/passes/tensorexpr_fuser.h,sha256=Ner7w8Sd78Ueem2sfVghxP2Twsd0g0dKnM8bjQrYi4o,2600 +torch/include/torch/csrc/jit/passes/update_differentiable_graph_requires_grad.h,sha256=Z_LPzJWI0GchVkPjIFgiEhgpl9GPVC0mysXthliHtxY,715 +torch/include/torch/csrc/jit/passes/utils/check_alias_annotation.h,sha256=pZafsHGeDV3cPTfJJubzUe20nGjb8Jsfe_pU7qaEFCY,612 +torch/include/torch/csrc/jit/passes/utils/memory_dag.h,sha256=rjBsyJTzehZLCE-3BNjVsgvc-0t736s9kT0_frbpgPg,6416 +torch/include/torch/csrc/jit/passes/utils/op_registry.h,sha256=UoA-APAe_5BCuNBWKN-zGdUUbhF3jjEYIMwCezuasE8,1033 +torch/include/torch/csrc/jit/passes/utils/optimization_utils.h,sha256=rSeEsn9yWTqYUxLSiz-XOHfN3qmECg2fVnrhAl_Vef8,244 +torch/include/torch/csrc/jit/passes/utils/subgraph_utils.h,sha256=E_OlnMAVussYOgV2HzPZJ5Z0WBPOzS9Coqw5vDk0jlk,2411 +torch/include/torch/csrc/jit/passes/value_refinement_utils.h,sha256=puNwkucAZRWiPG9tI9t1dsEQlVuF211DITDTCNgh_yM,2607 +torch/include/torch/csrc/jit/passes/variadic_ops.h,sha256=nWdQbS91o6Xr27wuy8rPbvuTFZPPCQUqDw5W2NMiiVA,885 +torch/include/torch/csrc/jit/passes/vulkan_rewrite.h,sha256=JYOjWoqD8ZUR05Ez-e_gulm4_-r4lrgBY6It8TGenoI,673 +torch/include/torch/csrc/jit/passes/xnnpack_rewrite.h,sha256=jBXEUXGoNWqkLKqm0HcLvcyjZyvAFoTEUSnf7YBfiII,795 +torch/include/torch/csrc/jit/python/init.h,sha256=pi94AbCTzGH47EW-u9kPDdRazf2RXwVKISlt6S3p8dY,143 +torch/include/torch/csrc/jit/python/module_python.h,sha256=tlWhhVI7JWUKevZinVonNYTVrrdZ2x3X-X-jbdnF9D4,960 +torch/include/torch/csrc/jit/python/pybind.h,sha256=CeSp8cETxE6ZVooaaM8z4qzq8A0i_BOGV6Edf9IEjqA,7914 +torch/include/torch/csrc/jit/python/pybind_utils.h,sha256=BoeIoRzPlTD_dkIyBbbSZCsNVTpA6OVOqkven_nuo6E,43464 +torch/include/torch/csrc/jit/python/python_arg_flatten.h,sha256=-4yxueFeKLSCSDJO8zWm5nZfIWwNItwux0kXf_7jxZc,3527 +torch/include/torch/csrc/jit/python/python_custom_class.h,sha256=8ZyiBXOmSpVC3-7DETl_KcxJPPELJ4U-KlvBeXhRoHw,412 +torch/include/torch/csrc/jit/python/python_dict.h,sha256=9sxsFzVy99MLXfdG9W8J4wdV5_hAA-ChCYLvi-U3phI,3385 +torch/include/torch/csrc/jit/python/python_ir.h,sha256=NQBztQd2JZXnRRNYU0TE8k8dFxq92xhWQFxZXRMrAPw,1693 +torch/include/torch/csrc/jit/python/python_ivalue.h,sha256=NXRe0BMpPBqbtyjgsW03WxvAajLKxgA4ypbBJ-9SYNU,3281 +torch/include/torch/csrc/jit/python/python_list.h,sha256=F6dZBibdYl7WuV7Z0i_lVih6q6Ju5LyapmwYUGlH69g,5495 +torch/include/torch/csrc/jit/python/python_sugared_value.h,sha256=WYXLWO_IHCIA2dQHaWhXQYwH-kBFC2y9pLn1kA8CjSU,11336 +torch/include/torch/csrc/jit/python/python_tracer.h,sha256=OBYJohDlSm9YuWZ0rbnMenLbY-2_W1ySEUkPJPD9Xkc,1218 +torch/include/torch/csrc/jit/python/python_tree_views.h,sha256=yJkTxIBDBgYEkX-7zAjVE1ACKKBNXvBU9Kiq885knT0,150 +torch/include/torch/csrc/jit/python/script_init.h,sha256=-nyejY7PHUrX4O1LEs7W48ZifDvd_oMfucwL2vAndyg,152 +torch/include/torch/csrc/jit/python/update_graph_executor_opt.h,sha256=pdlOCRdZR5VsId6If4yEOtsN1zNtLDzzFT7fncH-B-U,185 +torch/include/torch/csrc/jit/python/utf8_decoding_ignore.h,sha256=rndEzjMH5efB3RbaWMpfujvNQUe3o2_mesOzreaJHPs,179 +torch/include/torch/csrc/jit/resource_guard.h,sha256=oMbaYMyJLGgPCEjIV4y5bAYsaYZ7_4p0QIQXTkawpHw,440 +torch/include/torch/csrc/jit/runtime/argument_spec.h,sha256=GthnYQuh3c-2fpFhV5bjPyoAq06YiWcO4CBPQJ_BSNc,16390 +torch/include/torch/csrc/jit/runtime/autodiff.h,sha256=5apSR8BAAdifHGPPFeCsS4GpYstVS1EvAYbK_lsaQgc,3930 +torch/include/torch/csrc/jit/runtime/calculate_necessary_args.h,sha256=WQujo6Vy1MOEeHzKhO04AYoOAAztIHnIZJ2Om4eqg9Q,2281 +torch/include/torch/csrc/jit/runtime/custom_operator.h,sha256=eKEDGUFIrgJPlHsk_XiittC4gnRPq1XZwjpkvFOiLYo,1054 +torch/include/torch/csrc/jit/runtime/decomposition_registry.h,sha256=FVhRmGP1tlkuOhoGxINQCRF3lko7zp_9r-yqhQpnJ8A,1045 +torch/include/torch/csrc/jit/runtime/decomposition_registry_util.h,sha256=vJ_K-8X9IaDOmrmwHNS79NxyyBlPdNcwH4NUbuvVS34,261 +torch/include/torch/csrc/jit/runtime/exception_message.h,sha256=HhV725zokG4hM07w-JpB6F2BLLoCoxwCjcYg2-Ccduw,614 +torch/include/torch/csrc/jit/runtime/graph_executor.h,sha256=AFJ3gpqmt0ioX2XSzHl-pzpLY3RZek3_lcFiqgVnPd8,4713 +torch/include/torch/csrc/jit/runtime/graph_executor_impl.h,sha256=xNv6Say3ma6wDV4mkbol5wbdBCsL5xE4LsrhOBjZ4LA,4027 +torch/include/torch/csrc/jit/runtime/graph_iterator.h,sha256=pSfARVvHC2XCw_p83DAuKxfLgvSJOgvkAB5dFXVE8LQ,4938 +torch/include/torch/csrc/jit/runtime/instruction.h,sha256=z1R5fLzff85MErdJopGVILbT-7Jii2ROWeRMoDSBoJ4,5621 +torch/include/torch/csrc/jit/runtime/interpreter.h,sha256=uGr_VUAeoSsOOX26yH7mVkAjesXuimt_sRSXE1_hDqI,5005 +torch/include/torch/csrc/jit/runtime/jit_exception.h,sha256=O0Wa-C7GGWdeuJAhSyc3nEdSi9JvGI5FsWFugl9fgN4,1169 +torch/include/torch/csrc/jit/runtime/jit_trace.h,sha256=lDX6zpQ7WCm0zTKNpj_dezBpR59DI64d0k6UaJ8eLIs,207 +torch/include/torch/csrc/jit/runtime/logging.h,sha256=To2f1sDodmQdE--QPGKuml5S-fOfs5u4dZwwpK8s3TY,2619 +torch/include/torch/csrc/jit/runtime/operator.h,sha256=8WXz07ix85MlCSQvw_oOnFAI9IOBvAwDF36WVSMMs7w,11738 +torch/include/torch/csrc/jit/runtime/operator_options.h,sha256=T3eF5cJyP0Gfv-8-8UDZQ67GYAznVFv4_S5MOr6QEow,164 +torch/include/torch/csrc/jit/runtime/print_handler.h,sha256=7BhckncLpvB1v1OViPjayXW-gMIDSyJ3HWc8ghENgBg,308 +torch/include/torch/csrc/jit/runtime/profiling_graph_executor_impl.h,sha256=cNEfjyNkfCqnwjMQMjqMGzk3pb75d16uh5s9VXoFdeo,2718 +torch/include/torch/csrc/jit/runtime/profiling_record.h,sha256=j5cXWqFSi4hxM1ncv4Z6u6kHOVf3c2f1MWlU1rqHq9Y,8543 +torch/include/torch/csrc/jit/runtime/register_ops_utils.h,sha256=glZMXULTXNx7cLbndy-U8d09aEKfmBd0cCMwQ4SfiC8,42568 +torch/include/torch/csrc/jit/runtime/script_profile.h,sha256=uryrw9gOOF1dB-0p-T2BROgTrDesmznjHrrv4cWf3IA,2617 +torch/include/torch/csrc/jit/runtime/serialized_shape_function_registry.h,sha256=TOfPLKlvlilf71WrOfkPCaCH0aFbnBCxFHSvxSAmEbE,356 +torch/include/torch/csrc/jit/runtime/shape_function_registry.h,sha256=E15CTZzgUVWvdzS4WBzzBLp53qHpRGIqi43STGjiymE,243 +torch/include/torch/csrc/jit/runtime/simple_graph_executor_impl.h,sha256=yMJXCsP6d25Q4tSpVc1aUNHbeuWT8Eq2Jw4Jzk8LSqE,643 +torch/include/torch/csrc/jit/runtime/slice_indices_adjust.h,sha256=-7bG6aoqU0uEXd4kXn8UNX63r3EETlWK8tZqBYiHhxI,782 +torch/include/torch/csrc/jit/runtime/symbolic_script.h,sha256=MqvYc552QYwDJwu5Y3d5VQ-Ty4_PTN6l7Z79AMZ4yKo,562 +torch/include/torch/csrc/jit/runtime/symbolic_shape_registry.h,sha256=wvBFq-NqUqFJTSxmlZLPzWkyPzByX8_XWSC0RHLYTDE,2802 +torch/include/torch/csrc/jit/runtime/symbolic_shape_registry_util.h,sha256=7p1cL2YS8jZuGeWI48XXETdslPoD9tm0_0FU_p-kWVQ,351 +torch/include/torch/csrc/jit/runtime/vararg_functions.h,sha256=4YE78NgzdL8MxckVRrej9O0B_JXQkcKJhxbazpoHXBc,1147 +torch/include/torch/csrc/jit/runtime/variable_tensor_list.h,sha256=BLJu7wftFO19tgS1o_8g9Vo58vKoBqtdLiH0pYMCbdw,527 +torch/include/torch/csrc/jit/serialization/callstack_debug_info_serialization.h,sha256=yVyHpm78281r4Lddh46hDRmWXA5BpUyUK70ume17TeA,2604 +torch/include/torch/csrc/jit/serialization/export.h,sha256=IA9c3v9dy6cYlZ7xOfohBhYi4Xo44J2dz8hAUKklkoc,11540 +torch/include/torch/csrc/jit/serialization/export_bytecode.h,sha256=guqXICFsEir-QQ5VQS0KW2ff1gGh1o71NY_azN8D7oQ,1368 +torch/include/torch/csrc/jit/serialization/flatbuffer_serializer.h,sha256=ymIQvsoO4Ce7W8t1tnIla5VIKJ4ijwzLDj5Y8bXTvo8,3043 +torch/include/torch/csrc/jit/serialization/flatbuffer_serializer_jit.h,sha256=ba6YMg9NRZIVrlgxuY11mdJDX2cMYms29cjLJGJhqGE,172 +torch/include/torch/csrc/jit/serialization/import.h,sha256=CdhrwoaPvQkM8yQ0_KfDRWPPxRR8a60kNf6WfqqqRbw,5009 +torch/include/torch/csrc/jit/serialization/import_export_constants.h,sha256=zkFq2CpJrrGZiezd_BKq5vXJRVWvD_ZBoKC_yIX0ngk,645 +torch/include/torch/csrc/jit/serialization/import_export_functions.h,sha256=GwP21rWSdHviVfH-qz05DrAQcKjCR_hKWLJ2gmlQNsg,389 +torch/include/torch/csrc/jit/serialization/import_export_helpers.h,sha256=JnAtp7YRC7X-Z3KUtm6A-lNk0bkRwhWEILm5ic6JliA,651 +torch/include/torch/csrc/jit/serialization/import_read.h,sha256=A2R8oRS_-CXTjOYDe2oBGcSiT1KXilq1E6rqYkJkr8w,832 +torch/include/torch/csrc/jit/serialization/import_source.h,sha256=50CV7J9Onkp5JEVO3LN9HI6pOgU27R1_5jBwXSwsBxg,3424 +torch/include/torch/csrc/jit/serialization/mobile_bytecode_generated.h,sha256=lYcBLkLIoeIVAno96rXwi7X5KMe3oAmjJaiodkZbv90,98594 +torch/include/torch/csrc/jit/serialization/onnx.h,sha256=y9W8Vp2prGtLkqktDi18KSp0mhta55dsGDoDpmH9ono,513 +torch/include/torch/csrc/jit/serialization/pickle.h,sha256=Ad87x2XtbWJT1lJ7fWorSudOVhbiqz6GYQLOSyPeXsk,4690 +torch/include/torch/csrc/jit/serialization/pickler.h,sha256=sTlBlFyCNkQNgSz-nctQ6Dx7zYX3j82Ie35NG06qLB4,13708 +torch/include/torch/csrc/jit/serialization/python_print.h,sha256=x828WdBN3KGyh1Qtx82eTZrDMUqtrwz6Gkyt2Eh6-NI,1309 +torch/include/torch/csrc/jit/serialization/source_range_serialization.h,sha256=qxu7nIQKG3P4s8rPC8fSJS-mxGEYeISHpCgAS0K5QCI,1666 +torch/include/torch/csrc/jit/serialization/source_range_serialization_impl.h,sha256=5ygYiNxXUq9rzzgJWN42_0XNVApa1-9G0MN3XFuuvTs,680 +torch/include/torch/csrc/jit/serialization/storage_context.h,sha256=7Y4_iJw9YtZ-RZ9yvx3_MB8H5ldo-s1q3kRhhvpmSCI,2487 +torch/include/torch/csrc/jit/serialization/type_name_uniquer.h,sha256=DYiGL3VcdzINSbyvH3FPXrLjeYQFj79-Y4cJtlxwrMI,754 +torch/include/torch/csrc/jit/serialization/unpickler.h,sha256=ywkgo0ukwykCoVamHJhRl-wqeaYhnf8D50Wcuz8LGLo,7566 +torch/include/torch/csrc/jit/tensorexpr/analysis.h,sha256=dqXdu0WroMBA2aE64FKskiD4lNKle7D_ks0WYESb8Kc,8982 +torch/include/torch/csrc/jit/tensorexpr/block_codegen.h,sha256=yh8ltX490BoS0jRJAhfUkDjoypHaNdOzMauwCrFsgTA,4290 +torch/include/torch/csrc/jit/tensorexpr/bounds_inference.h,sha256=hscYJSnWhGqcB0-8wKoPUYes7L3DWHVHtDTmjHAgejM,2220 +torch/include/torch/csrc/jit/tensorexpr/bounds_overlap.h,sha256=ENmHynThSlBmA-c_5QaUab62DMl5mWsOSnFcm0iDkoM,4537 +torch/include/torch/csrc/jit/tensorexpr/codegen.h,sha256=l3MyPUsOfblSbrtq0HTt37aadBVzIkwQL_S_YWRJdXw,7468 +torch/include/torch/csrc/jit/tensorexpr/cpp_codegen.h,sha256=C2OoTkNUamFMZh-_DM-XOnwqdap56Qo-6hdwqctOVWM,2347 +torch/include/torch/csrc/jit/tensorexpr/cpp_intrinsics.h,sha256=XEzshVCUefNQLU-d7ZixlApMdgtvZ3_9_wsuO8zGq4Q,719 +torch/include/torch/csrc/jit/tensorexpr/cuda_codegen.h,sha256=LvAQ84ENqSwE40QIAaT9F4Nq2FyaQVxJaql1JBieDWQ,8234 +torch/include/torch/csrc/jit/tensorexpr/cuda_random.h,sha256=c1i6pdqn9m-l9PeMXeNlhW9qiE8FlLSVj2DV-ioYB7c,2642 +torch/include/torch/csrc/jit/tensorexpr/eval.h,sha256=0SqXx5kJrUBrOrr3_w9zDbHVW4w7oYSQkZSMhU3BkDg,9841 +torch/include/torch/csrc/jit/tensorexpr/exceptions.h,sha256=4JoBnkTIs0YuVYRx_SMr813zihk6YFZOADG-8Xhc0ig,3193 +torch/include/torch/csrc/jit/tensorexpr/expr.h,sha256=5XD-EnXe_f4UzzuAlaLjn7aN9y4lyad-p4cUSpz1gyM,14183 +torch/include/torch/csrc/jit/tensorexpr/external_functions.h,sha256=uFLpmHcshyIOJ8wzZeWRmfzlyIu4AS328YJafwBSObk,3434 +torch/include/torch/csrc/jit/tensorexpr/external_functions_core.h,sha256=5M6ljT1lNcKQNBt6fLnRQfr-E9Ui8W5yNjiCRM6trgo,453 +torch/include/torch/csrc/jit/tensorexpr/external_functions_registry.h,sha256=relM7bB7SFM1ZoOtylwY0Gm4CxI4ADoezq-uYoejM68,2306 +torch/include/torch/csrc/jit/tensorexpr/fwd_decls.h,sha256=KhQdFKH2ycPwywWQRLCIzjqPtMGHGQAg6pj2THne0Mc,3041 +torch/include/torch/csrc/jit/tensorexpr/graph_opt.h,sha256=3hTzsJhun7izGmDp-OjSeISFCc6nqP7e9q5HfZiq6qc,4434 +torch/include/torch/csrc/jit/tensorexpr/half_support.h,sha256=PjNR8uWlCVZSo95jmK6bXPZVp06tgk0ln37Ng2GoqFY,5889 +torch/include/torch/csrc/jit/tensorexpr/hash_provider.h,sha256=D3wvOUI2JsSahzsDO0qP_pd3yJZMCI5cVlMsNtzvJyk,7534 +torch/include/torch/csrc/jit/tensorexpr/intrinsic_symbols.h,sha256=xFMYwrG9FzKJKOWVZSlyRaRn3YPR8SuDDAo9Md0GPVQ,420 +torch/include/torch/csrc/jit/tensorexpr/ir.h,sha256=RpvvJUUn5iqBKbLu6pnP7IVryHTxN1DN8Qh6Dqw3rRc,23024 +torch/include/torch/csrc/jit/tensorexpr/ir_cloner.h,sha256=bYNpAzFd6pWA7zs7WxoXfcCbNT3zM9GVXVMr8ww3MyA,2394 +torch/include/torch/csrc/jit/tensorexpr/ir_mutator.h,sha256=-L5qlFI8j-K5_OfQWe-_LCsvNxtnA5Jx2RQuMjvgSfc,2370 +torch/include/torch/csrc/jit/tensorexpr/ir_printer.h,sha256=C30dQljUcLU_FUvf5JqiHBRTi6yIGka-S7ho7XknWUM,4132 +torch/include/torch/csrc/jit/tensorexpr/ir_simplifier.h,sha256=D09DOvpEDEVJZmZSOPzUlurjnnrXSB0hU8Q_sROC3KA,15258 +torch/include/torch/csrc/jit/tensorexpr/ir_verifier.h,sha256=Gz3E5n-cdZS4b-Gx4wUlK2ERxDiyKnha5edLPU1ejcA,1345 +torch/include/torch/csrc/jit/tensorexpr/ir_visitor.h,sha256=h8LzJsF7__3r82pTCKQa5TyUxHIL0VrPLZy1GSqHVnw,2187 +torch/include/torch/csrc/jit/tensorexpr/kernel.h,sha256=u2cYcfwJCSaVWMEkEmeSrJxujhfj8zyosDNFjmbV1iU,13392 +torch/include/torch/csrc/jit/tensorexpr/llvm_codegen.h,sha256=xkc8U7lUwg9BS5lHO_hIzrbh6hAixZTGGkd4PCS0orI,3839 +torch/include/torch/csrc/jit/tensorexpr/llvm_jit.h,sha256=kvQmcGAeqrXGtkqslvwf6hSEJHY_Ii9EEBhIdGGZy7k,1940 +torch/include/torch/csrc/jit/tensorexpr/loopnest.h,sha256=SmifRvgsMGDgoAhWgQtgQscegexh5dWm0pG_M_rvJ-4,21707 +torch/include/torch/csrc/jit/tensorexpr/loopnest_randomization.h,sha256=B4UQ_zRjQAbOzMqTmIO-nwEffSuuC83XuTjI4W4L76Y,309 +torch/include/torch/csrc/jit/tensorexpr/lowerings.h,sha256=Iq4-ttSxTzkWmHRN7htrJosQ7vTUgnmRAyLdmmdAitA,1280 +torch/include/torch/csrc/jit/tensorexpr/mem_dependency_checker.h,sha256=lwu_rOUnjG70SxRruj7yjNDcMuXjKMdOj4k-HG8GweQ,13461 +torch/include/torch/csrc/jit/tensorexpr/operators/conv2d.h,sha256=0ZZGNCxz_im5dCpjqx9SyCL-YJlP5yXxyFApYlnJle0,2943 +torch/include/torch/csrc/jit/tensorexpr/operators/matmul.h,sha256=1gndiibjAkC-XTXGvdZEsmYV12S_F7HqxXvockrDn4E,653 +torch/include/torch/csrc/jit/tensorexpr/operators/misc.h,sha256=ksF1cQfgYYxV_o3ceicVffTquaX3oxMF-cM1QnAcmV4,3288 +torch/include/torch/csrc/jit/tensorexpr/operators/norm.h,sha256=45HCMbRK89MIwlXshrEGepftihmLnN4KCJ6nCuawNGc,423 +torch/include/torch/csrc/jit/tensorexpr/operators/operators.h,sha256=uUMEOYPpfSOrKklAjsN4YdQD7agTgSCUzMni2PeH78s,471 +torch/include/torch/csrc/jit/tensorexpr/operators/pointwise.h,sha256=gjrqWWrejZNe8ayJPE_uDLRtS-swtNZrTelHq6Ih5Z8,3209 +torch/include/torch/csrc/jit/tensorexpr/operators/quantization.h,sha256=uw-Bn1pmfxPqg6Ykf1kcI4GI16Jf_G7jDk1SWtEbjXM,5582 +torch/include/torch/csrc/jit/tensorexpr/operators/reduction.h,sha256=2K82V0gIkYDpbPOHMZ0SlYlVYEvNP7Q9No4uhVPvFPY,1155 +torch/include/torch/csrc/jit/tensorexpr/operators/softmax.h,sha256=tOkx_YTIAy78POs3G8GxUv-byZ39qEK_bBKZ5OruwUU,371 +torch/include/torch/csrc/jit/tensorexpr/reduction.h,sha256=fO2nOB_ePO17G083elLRj1SHIgn_op9oasHL8E6HIuI,8866 +torch/include/torch/csrc/jit/tensorexpr/registerizer.h,sha256=1QqjCIb0PAhcQtsc0rEQHXqLIwzP4ENtLU75lCozDtg,12521 +torch/include/torch/csrc/jit/tensorexpr/stmt.h,sha256=GlKenwCiWUMvhE8teCKs73qI0p_ZMyMqaiUbuDuJQ40,23895 +torch/include/torch/csrc/jit/tensorexpr/tensor.h,sha256=Ec3W8U9wOjE-JTYlPNuPZtUA7bJsemi067BVSJZT6UM,10499 +torch/include/torch/csrc/jit/tensorexpr/tensorexpr_init.h,sha256=0pkchsNPY_qjXdhviFyXdFCZzEF9Hzqnu30NbxyrF3M,243 +torch/include/torch/csrc/jit/tensorexpr/types.h,sha256=LszPlXpkB2Fr65gA39_OpG8NecjZIyD3L9mm7vaagFE,4284 +torch/include/torch/csrc/jit/tensorexpr/unique_name_manager.h,sha256=QLXiwan89GB0Afh4NDu2evGokAD8mCrOkWYppBaUeVM,825 +torch/include/torch/csrc/jit/tensorexpr/var_substitutor.h,sha256=5FaoDaztQTQWrL2VFoLe_eyJOu4Gfu-2zYKENx6w6AM,1660 +torch/include/torch/csrc/jit/testing/file_check.h,sha256=GfHH-By3ohBvZHkRvxE08DNKWMV3_Cz2InXbA8-UUAA,2568 +torch/include/torch/csrc/jit/testing/hooks_for_testing.h,sha256=qTVWQIZvaPw2xkzr_hbn3phdtT94Yc2_JCUEahUp1Us,603 +torch/include/torch/csrc/lazy/backend/backend_data.h,sha256=VQ_5Zm7jvFNYY9Vn3e-mkLk37hVolIl-YRC8Lo_-hEw,1205 +torch/include/torch/csrc/lazy/backend/backend_device.h,sha256=ogM5fBmR2XuCeG5ZSg7HtcyJaGjpAHb7vOqs2XYfPDQ,2865 +torch/include/torch/csrc/lazy/backend/backend_interface.h,sha256=nZUXiIxY3eCfXqZsLQsB-B9KbDX49OkO2OWmvJX05K4,4846 +torch/include/torch/csrc/lazy/backend/lowering_context.h,sha256=y5t4aXVFCZlM6aZFZDB6NbTfi4JB2aIjiXy9DD0nsPk,3308 +torch/include/torch/csrc/lazy/core/cache.h,sha256=O4NjeVhn5TiLcRIUQ5mvTNPac0TVOC1NVPZ3HnRmzVU,3645 +torch/include/torch/csrc/lazy/core/config.h,sha256=wF70zxarRonAuvn3nofxeKY1NXILNO9sqPhLl5MwPUE,913 +torch/include/torch/csrc/lazy/core/debug_util.h,sha256=HPbvpVneHJrn43myzDe1i24-wMCg7254M8TNGdcz13Q,1299 +torch/include/torch/csrc/lazy/core/dynamic_ir.h,sha256=iCbqKMuQU0OoJK5S4tG_dE99jzH5Ldw94DrQtLwdZyA,1586 +torch/include/torch/csrc/lazy/core/hash.h,sha256=Hjy3fpai0xQlWWmjEtkRFAQMxHOY4RaMFqXXFT-qQCc,7721 +torch/include/torch/csrc/lazy/core/helpers.h,sha256=eSbVfBNkwt_NZat27zu0uUEgbYUWwT-t6_hw7rdjZUQ,2248 +torch/include/torch/csrc/lazy/core/internal_ops/ltc_ops.h,sha256=Y1dv99_hdJegQM59a1sn2UVVCzG5M81IwNGChLyXtXE,1492 +torch/include/torch/csrc/lazy/core/ir.h,sha256=yKMcXvo7Ys-EsMUh8VvOt5bSOV5SdKgylnB0t9AC9gc,8007 +torch/include/torch/csrc/lazy/core/ir_builder.h,sha256=nY6YnTW0Y8WC3Fk9YjQhrGZULqzTrDKboh2f-A6FiNM,4727 +torch/include/torch/csrc/lazy/core/ir_dump_util.h,sha256=cnOi7BbGSdYt29JaW0y7_2TveSXKllMuM3eF9hbPP7s,689 +torch/include/torch/csrc/lazy/core/ir_metadata.h,sha256=LV22oumhbrensXigzC6UuJjcycBENC--T3KNVebdgkw,1154 +torch/include/torch/csrc/lazy/core/ir_util.h,sha256=t2vTTSDYuZaljqHqrzx7YXPinZSjrgHQLdKtsimnWP4,1391 +torch/include/torch/csrc/lazy/core/lazy_graph_executor.h,sha256=TVdKCIs61x6jDNmftqdV2nnss-KbIxx0iDk6t2wh6d8,14859 +torch/include/torch/csrc/lazy/core/metrics.h,sha256=v5PGW2mAr_N0x_mzN_CiAUaCmn7MYmd0brsvOrwatmM,8017 +torch/include/torch/csrc/lazy/core/multi_wait.h,sha256=S9a59_NsDmrWvprDvDiiVDq-FjvFv7-hzgEat8g225U,1740 +torch/include/torch/csrc/lazy/core/ops/arithmetic_ir_ops.h,sha256=fjyJJvOW1x0vED4C03fdt_imQkFs1A_2xP1SDQQpWA0,406 +torch/include/torch/csrc/lazy/core/ops/utils.h,sha256=RwVAGc_1DAG5aTpaHRntrYyFO-YPx1Bqfm3XcHq_M4Q,1010 +torch/include/torch/csrc/lazy/core/permutation_util.h,sha256=sUpKHJc5ZtY0fBV4r3hpJ_xzSjmX37TmvrXcrQ_UusM,1277 +torch/include/torch/csrc/lazy/core/shape.h,sha256=Q3qN4L5k0lr2e9yZzOTKkR8pJkU8Siv6m0HgFkhIyaI,2020 +torch/include/torch/csrc/lazy/core/shape_inference.h,sha256=_rX0HGLhXIzXYhruJETp6TABXdd3OT4Bo4zAts2awk0,15427 +torch/include/torch/csrc/lazy/core/tensor.h,sha256=O5AR6lQCls_xUEoA8Xher4zxwtnfMMdzUjELu6GxDLw,9439 +torch/include/torch/csrc/lazy/core/tensor_impl.h,sha256=CwYsxVEiQjuUW7MIL4qEmenLvKWy4MzVGH3J9K8Tbp8,1910 +torch/include/torch/csrc/lazy/core/tensor_util.h,sha256=KPvXHuIm0r5okpoETM2V7_HL4rYnq4rHr-bWhX2df2I,2561 +torch/include/torch/csrc/lazy/core/thread_pool.h,sha256=93Llb28b8IOCi4fzc2t_dxI0KbiosFaUBrfWR6bx-6c,721 +torch/include/torch/csrc/lazy/core/trie.h,sha256=QSiW5yPfjwjCk4b6T-2DxUXH3-cL8r7jMtF9LkvVYiQ,2217 +torch/include/torch/csrc/lazy/core/unique.h,sha256=32TD33Q7_cK6dsr67mz6T2QySnt_EvvFo6CeA86WvLU,1164 +torch/include/torch/csrc/lazy/core/util.h,sha256=URnFksK89cC-Bigqy4hqx2e3thjCeLAZnmCtDFhoa80,2819 +torch/include/torch/csrc/lazy/python/python_util.h,sha256=IV-EpJ-uDRJ0t-LvHl8ov7IL4oPshcsIXZbKc-DXgHs,340 +torch/include/torch/csrc/lazy/ts_backend/config.h,sha256=mAauhrHhWIzAJc-pHcjzFhaVBGuHXZUjiWL2G23BHt8,202 +torch/include/torch/csrc/lazy/ts_backend/dynamic_ir.h,sha256=RQWixJpb5piDUl5VaI1yL_uJXcEatY8YvwKY_WGVnEQ,2503 +torch/include/torch/csrc/lazy/ts_backend/ir_builder.h,sha256=gN6zcKfM0Kq2e-fpuc9ZwlUBYCMvsnBiTZqA152czo0,2416 +torch/include/torch/csrc/lazy/ts_backend/tensor_aten_ops.h,sha256=g5DsT3zcoF-9oEYQ37Amm4jdhL_YRHdB6qPJkqPCU2w,548 +torch/include/torch/csrc/lazy/ts_backend/ts_autograd_functions.h,sha256=LBqUJ_aJwGFV7IPtUzhnbyYhUpO59COTv9gBC_fW2TE,644 +torch/include/torch/csrc/lazy/ts_backend/ts_backend_impl.h,sha256=G2aYU791b8hOnkBE6TZaCle0oKU1GoYp2NDnqm_EEUo,1234 +torch/include/torch/csrc/lazy/ts_backend/ts_eager_fallback.h,sha256=GW6xzyRRYLvdDf1XA78_nT-1bkdeDvPgYyeefhn3piQ,717 +torch/include/torch/csrc/lazy/ts_backend/ts_lowering_context.h,sha256=pPmx22bfOeJ5toBc_7Ey-QAYwIeJIKCoiU5_onXEXA4,4519 +torch/include/torch/csrc/lazy/ts_backend/ts_node.h,sha256=N91HrPG7tJEC1MdZv5Zo3R7jiZ2lzBK5QtSTisI7R3Y,3376 +torch/include/torch/csrc/lazy/ts_backend/ts_node_lowering.h,sha256=TUl69lpUcqybpxrYebLv5EDHgYIhXfyg26GvfrAOknY,484 +torch/include/torch/csrc/onnx/back_compat.h,sha256=zc5K7G4NO3cMvNW8Uz5xvoUNnw5G0sZfc0j5Xwq4i8E,1024 +torch/include/torch/csrc/onnx/init.h,sha256=ZXAqnv9mhvV3QHMdSwzKprQOSAa-rSi3pcABk0aY3a4,146 +torch/include/torch/csrc/onnx/onnx.h,sha256=9ghsm-HHPWO3NC6nNNCfEI1U_H2jZ55SULufWFKPa2w,507 +torch/include/torch/csrc/profiler/api.h,sha256=2zxTYEcugn1jicqDNnSJ84Lw1xsjlGMdvbmDl9coJCc,510 +torch/include/torch/csrc/profiler/collection.h,sha256=rNMMtEIZgonO2DJmO6veqR_x0h1D0T7hD7vc8jkQa0Y,20324 +torch/include/torch/csrc/profiler/combined_traceback.h,sha256=xwV1K_fHpnblNCdp_69zKLbw2PxESKY80gtskCzA0zs,2457 +torch/include/torch/csrc/profiler/containers.h,sha256=00nSZi8h9c8_dPTH6200AWKhaNGCzC0Uux9ZbiIIREw,5877 +torch/include/torch/csrc/profiler/data_flow.h,sha256=uPu6BgWne5oO3086kPY0GxXPYadRrfO58isOdOROVHY,3625 +torch/include/torch/csrc/profiler/events.h,sha256=xZLUo_qQi2CakFguv9GOAVmZJ_B0VSZJBMrwAqNBimM,1038 +torch/include/torch/csrc/profiler/kineto_shim.h,sha256=ppITS8Ajq2zFDgX4v-7PE0yRWfXvD39RDfkgZa0bIsM,4104 +torch/include/torch/csrc/profiler/orchestration/observer.h,sha256=_dUklnFb_fcPt-h5tFhjClCPMlLsn9PAa51s_AckHBw,5369 +torch/include/torch/csrc/profiler/orchestration/python_tracer.h,sha256=HhGqnp7KRRn6efk4UrY_b4CLAW92nDbz6XpsP23QrvA,1858 +torch/include/torch/csrc/profiler/orchestration/vulkan.h,sha256=nbVl5pgj47F2WqBW45wwyXLMIzVtg9MCftxONuxb-zU,851 +torch/include/torch/csrc/profiler/perf-inl.h,sha256=9khZdhJrF2DvSd5H3J0m2y2_k1bUX6VX-vXuAvq_t30,1385 +torch/include/torch/csrc/profiler/perf.h,sha256=jHcjDFsVnsGYS7n0-K_IsWKwVEqTrK30X3w2PG3vgrE,2487 +torch/include/torch/csrc/profiler/python/combined_traceback.h,sha256=LX9Uf6zMXSTAXvE5vYGIPw825gjKEGZZ7UF2kQVBKZM,750 +torch/include/torch/csrc/profiler/python/init.h,sha256=z51YHsiaK1JwNfsq6qDRAj__XvLt4HfTwKH5_3R2FmU,1004 +torch/include/torch/csrc/profiler/python/pybind.h,sha256=S9-6up_radq5mGlPqVKOvduF8QoYPCVRwBLfObLQHWY,1251 +torch/include/torch/csrc/profiler/standalone/execution_trace_observer.h,sha256=OTUFGUGZ6YnqN-EH-Xdv94cWjA7Qi7vLilkvv5Wp8TY,628 +torch/include/torch/csrc/profiler/standalone/itt_observer.h,sha256=Uw0651Atw9iSKltiRYG9UFbPpgVz9bvllEKQROw5JVQ,224 +torch/include/torch/csrc/profiler/standalone/nvtx_observer.h,sha256=l1AKhCifuqehvjZdoZNkAweChgyu9OXtl3fkTD0jDB8,225 +torch/include/torch/csrc/profiler/standalone/privateuse1_observer.h,sha256=PC5uOTkJbl0EWguGaegnaOoJlMsAgnSF6vUxZ6XzsF4,1235 +torch/include/torch/csrc/profiler/stubs/base.h,sha256=mywaYsluaThccsb31UmdIXwXc8SENvAW6DQS6lgqrOY,1725 +torch/include/torch/csrc/profiler/unwind/action.h,sha256=Vbm-tE0c1tGZebsSXJuZ96WlqZOGlXsoLHvtqTOfBoI,1425 +torch/include/torch/csrc/profiler/unwind/communicate.h,sha256=BJKoqxTdZcVwY7l56xk__utDD4wEFPRCV0KjKw6VsFw,1926 +torch/include/torch/csrc/profiler/unwind/debug_info.h,sha256=WETmUNYx0OMbOlr4zPGmFX0eWEL_YYUpz30RrXOcxdY,9029 +torch/include/torch/csrc/profiler/unwind/dwarf_enums.h,sha256=Ojxlhbz2WvETGEnqFq4XH0Oi7d4zYg4nRU3LgyA6gew,1110 +torch/include/torch/csrc/profiler/unwind/dwarf_symbolize_enums.h,sha256=kXzFzct1qxjU74GzRt9vcELvA2pSO13koBjv5nvP39s,4658 +torch/include/torch/csrc/profiler/unwind/eh_frame_hdr.h,sha256=3sHjQ6t5RcY_agK1pygByKNhhxmcfvBU-9ttclO66Jk,2583 +torch/include/torch/csrc/profiler/unwind/fast_symbolizer.h,sha256=xvgN7u-98YzTMpUHpCladVJ9d5s2kDihDcyY99LLhNw,3297 +torch/include/torch/csrc/profiler/unwind/fde.h,sha256=WADP6LRTTFFS_MazWYbhOWzbJIxsy8wU518TBZf87T4,11981 +torch/include/torch/csrc/profiler/unwind/lexer.h,sha256=7XOquT2X2eIa8Uh-GqSuyJjCo9nhdnTEdzFzLufhSf8,3882 +torch/include/torch/csrc/profiler/unwind/line_number_program.h,sha256=SNa2Q7sHLt5IFX7ZvvVi2pD52cVY5kfpeVJ_mH0rFJU,10633 +torch/include/torch/csrc/profiler/unwind/mem_file.h,sha256=ewrKCoKzRvpLKlejUEfYW8DAEItpLbwrNUxWldjyHhM,4364 +torch/include/torch/csrc/profiler/unwind/range_table.h,sha256=_7bUXWy3XEAAVjj5EMrxl2uChR-0BDB-7bqhU8HX__g,2109 +torch/include/torch/csrc/profiler/unwind/sections.h,sha256=oPfc-_rkklWO6m-F6VoKMc69E4isDm8f_gSqy64411s,3639 +torch/include/torch/csrc/profiler/unwind/unwind.h,sha256=_RIMx9qlo9Hn7MmVAzhUNL2ppFq0unGS6R9WptOAaIc,1133 +torch/include/torch/csrc/profiler/unwind/unwind_error.h,sha256=G7b3ObxTirwGw9EjNG342Ou09Qerz0PWjKdcjmAQ5Ag,898 +torch/include/torch/csrc/profiler/unwind/unwinder.h,sha256=wZtP6Dl2KcWNgrKtQPhHDbB-1M_BX_lv93ufj-nSyGI,2293 +torch/include/torch/csrc/profiler/util.h,sha256=NyxQnaXKM_nihGCw9ESR_-8tr5pqckXTj4EZ3xfh5o8,5759 +torch/include/torch/csrc/python_dimname.h,sha256=-Bk8OBU36eIs9dyAMtpnfXkfa5KgFVxxrLKjhAem5sM,214 +torch/include/torch/csrc/python_headers.h,sha256=ZjIaoShls4JWN3vgElKkmNlZuulOavbFlgugV5L7R70,649 +torch/include/torch/csrc/serialization.h,sha256=olcwp7eeOqqpLV_hqyjMf6FAn4ppF9vonr7Sflay_T4,681 +torch/include/torch/csrc/tensor/python_tensor.h,sha256=ygnOLM0OHKcJw8mDotS7Ysq62bk7tVtTH-SOZ5rsBx4,1102 +torch/include/torch/csrc/utils.h,sha256=LkiNTOWTIKXlBOaDkOA8aqWArrLl_cBRNgrlW_P4jXs,9389 +torch/include/torch/csrc/utils/byte_order.h,sha256=plSSZ-x5wrgptqnFupRtM6WpqTU82SRZA5Mbn9rDtZA,5700 +torch/include/torch/csrc/utils/cpp_stacktraces.h,sha256=MbynZ32tmwy-9y-REs2kHEfAzth_NVJKt_8qCYn4yko,230 +torch/include/torch/csrc/utils/cuda_enabled.h,sha256=1hv3GlF9I_yK7CTNCGJ1Re0jgcwUXsMFWFYlU8xFmWI,170 +torch/include/torch/csrc/utils/device_lazy_init.h,sha256=TVVdOyHkv88DdZCkmoTVOklld2GHA5Bvo8MrVSdmRFU,1770 +torch/include/torch/csrc/utils/disable_torch_function.h,sha256=wrQ-qijlK9haBctyiV0gGcKsjekvZW5jNjwzpe5G688,1868 +torch/include/torch/csrc/utils/init.h,sha256=s3Sas-t_M5fl8aS8DXoZ7-4a53iCpvuvklhprvLx7YQ,193 +torch/include/torch/csrc/utils/invalid_arguments.h,sha256=dXhIUSgk21JztsfxdrsNWq40fQOVhPLmMWRQocy_q3M,302 +torch/include/torch/csrc/utils/nested.h,sha256=5OZ6lzhetvHDaJT-LMHB6e7M2whcimwDDhOZ-8gKogk,306 +torch/include/torch/csrc/utils/numpy_stub.h,sha256=1m2vyb-8t2lQGtylJZrcH2cw8nHX_OEtyKk7Q_jGg34,399 +torch/include/torch/csrc/utils/object_ptr.h,sha256=AUZl7WR4XxSyzIMahypE4_faOXCh6YoiXByY1uOZKxU,1574 +torch/include/torch/csrc/utils/out_types.h,sha256=45_LZPP_vy428wm05bdf0_j8EaDd7ekf0IHj2bVnDKE,320 +torch/include/torch/csrc/utils/pybind.h,sha256=8mUjU5gN4jDJHkroJ4_BmbnyTm6l6CrWPabnwa_gjRs,12948 +torch/include/torch/csrc/utils/pycfunction_helpers.h,sha256=4_LQ4gsIR9hC6vI7cHEW161VW8KxurlJyR3LvHQZxPo,385 +torch/include/torch/csrc/utils/pyobject_preservation.h,sha256=G0h2hb4Ou4yDrLX4UuNhp4-71hOioOugzTzqBilRFzI,181 +torch/include/torch/csrc/utils/python_arg_parser.h,sha256=vQXiQ3NoS1idDZwHWC4Igm9zq-IdiBD-_MpnN1Gfgv8,41069 +torch/include/torch/csrc/utils/python_compat.h,sha256=sHwJLq8n2GIn4ISgbWYr4iFOw91FguPs183GrDMN_zA,1056 +torch/include/torch/csrc/utils/python_dispatch.h,sha256=wgSvQqmVhE6o2nSBRSMzuUxXD3BykfXxFN8LrwyeWQQ,397 +torch/include/torch/csrc/utils/python_numbers.h,sha256=zxV0gz6FxOaIBjXKNio_PFziHIln6JxgqxaTceQ1HZk,5500 +torch/include/torch/csrc/utils/python_raii.h,sha256=cKNRfAaS4gd9eX-ZEXcrfkhbC1xQe_bJSCIYcS4MCUw,2658 +torch/include/torch/csrc/utils/python_scalars.h,sha256=MQcMiGBYMb-r-DfuwCyeulhsr7wL39gTlxDRIzafkTc,5561 +torch/include/torch/csrc/utils/python_strings.h,sha256=QVHz-mNlMXkYzeUJRNCz-VVOXJWfI5pvQMYeLk4YfCA,4361 +torch/include/torch/csrc/utils/python_stub.h,sha256=Nigc7ZGrniF0qdLn-Ra4KIicNWTBrgARkJYpxs6Ssek,56 +torch/include/torch/csrc/utils/python_symnode.h,sha256=jpmoMZr30KBWkNgaUDJbEZOuOa3iNVXd22tzZBrxX7U,9610 +torch/include/torch/csrc/utils/python_torch_function_mode.h,sha256=4tAtyW8TmksBE7M0CmOQDNGQEAOaF33D1BiVIZ8SSFI,512 +torch/include/torch/csrc/utils/python_tuples.h,sha256=zJ4thzUsi2Gc1aOAviCv8r-cBAUPPmmE5n0G2jRauKI,701 +torch/include/torch/csrc/utils/pythoncapi_compat.h,sha256=zHuya17yA0NPrWNADChfl8sEz1GcRuvM8BbOGrY3K0M,19601 +torch/include/torch/csrc/utils/schema_info.h,sha256=FCNeuz7R9or4BDyYFZq0SirDtj4QN-Q7Ca9DWK6ZQp0,3718 +torch/include/torch/csrc/utils/six.h,sha256=HzpaQpIxOJqfhrEHhKyEXRGwzc_u79Iuqy_Kut7RVi0,1465 +torch/include/torch/csrc/utils/structseq.h,sha256=FqMWwrP01RtislZcWjq_AJP4ezz45bLXKMvu49_qQtQ,141 +torch/include/torch/csrc/utils/tensor_apply.h,sha256=22xkgkrqOnkINK9fW3550Ry7gdmbQNBcrE9VMcCa6rs,428 +torch/include/torch/csrc/utils/tensor_dtypes.h,sha256=STkoDxONbre5kkeVeeTp7ZjM0GQaXFoStb9FT5UKG2c,242 +torch/include/torch/csrc/utils/tensor_flatten.h,sha256=ml9PIt-3X-yGWJgw3MY3QfWDYHh_4dDrHnjv6HviZi4,2745 +torch/include/torch/csrc/utils/tensor_layouts.h,sha256=HllPBlHMJI52vdhnMUQABJVLHLzTXw94MX5bNq9Ldsk,69 +torch/include/torch/csrc/utils/tensor_list.h,sha256=NDU92TL1XjVG5lvOtiCbi2svqU8QJUYx6ncdLra7v7I,167 +torch/include/torch/csrc/utils/tensor_memoryformats.h,sha256=vSjxk-I3RkZ9DBChN-SPvhZM5hraH-r2Y0io4cblcWY,323 +torch/include/torch/csrc/utils/tensor_new.h,sha256=DxPRJA90vOiJ740uhnQYpbPA3C6kwEzDsUE0Mn2H7a0,4014 +torch/include/torch/csrc/utils/tensor_numpy.h,sha256=zWwuOXEl4xKnnBF7zc_hLyBaKLQEA-GYFhnpcqb64JU,713 +torch/include/torch/csrc/utils/tensor_qschemes.h,sha256=gjMlYXJP9UsN7LPlLM0DX2IjILfIjg8dV7OBCZlue7A,174 +torch/include/torch/csrc/utils/tensor_types.h,sha256=8zrEI8PbBZqd91gRsT3t-23m9oNPVTTcb8-c20LKxho,669 +torch/include/torch/csrc/utils/throughput_benchmark-inl.h,sha256=nqw3ftA-0GilDuDi6siwsR2czNqQ-r7Fa7EcVNIja4Y,5560 +torch/include/torch/csrc/utils/throughput_benchmark.h,sha256=3b-zuOO8QWceVCn0M_WiJXsFQQL8WkwnOFAETwZD1mw,6910 +torch/include/torch/csrc/utils/torch_dispatch_mode.h,sha256=JWJ7szDAC9e5mMBf2gTbHvuy77nn7XVQSgAWx2i01vE,1615 +torch/include/torch/csrc/utils/variadic.h,sha256=upMO7qWiA0Tb9QS97J-uZVababBj3IcJqaXxwH_tLNA,3352 +torch/include/torch/csrc/utils/verbose.h,sha256=P_Dr_oOwiENVdRpzlGTBgJ4KOfl8PJI2ep2vCJeLsNg,138 +torch/include/torch/csrc/xpu/Event.h,sha256=GevWnT3ISB_N-aDQtFHkFPEU7rCcbdH6L1kkCvFeQgU,342 +torch/include/torch/csrc/xpu/Module.h,sha256=aqwqxUnsLYZ1g5nJeBmQYmOLSPJ6nPacKpQvuEWz9XQ,176 +torch/include/torch/csrc/xpu/Stream.h,sha256=Hbb8zcNcXj_JdslvzOROLYr77od5OpFGxFsocDiAaNc,437 +torch/include/torch/custom_class.h,sha256=2xWDkEg_RH53O79-PzHhGojd8MEo98QJUpexRgrTp1I,19852 +torch/include/torch/custom_class_detail.h,sha256=U1vtQ6ovxWyAORrxU_gtnymnLWIfopnYS9N3mRKkVFQ,7761 +torch/include/torch/extension.h,sha256=jIN4AnAwsUeV6tlDdnaZOlxuABYGNIEwSLtdbhwls9I,213 +torch/include/torch/library.h,sha256=HUUW6XuAYf3HQ8a_TymyyPGINoWeZXdqgTdHWDGGnoA,40694 +torch/include/torch/script.h,sha256=5qMfjbmTzP84r3BPgSRhTqyjMFGrsD54pxsE6C1e508,469 +torch/include/xnnpack.h,sha256=YqSOuuPy13xBmJ6t9KajyXpor5Emd3Mj2nPpBZ5yiwI,223141 +torch/jit/__init__.py,sha256=1QTV-Slt8tWFlI02tdpi3zLTR0cb5elcslh3vZeZhvU,8317 +torch/jit/__pycache__/__init__.cpython-310.pyc,, +torch/jit/__pycache__/_async.cpython-310.pyc,, +torch/jit/__pycache__/_await.cpython-310.pyc,, +torch/jit/__pycache__/_builtins.cpython-310.pyc,, +torch/jit/__pycache__/_check.cpython-310.pyc,, +torch/jit/__pycache__/_dataclass_impls.cpython-310.pyc,, +torch/jit/__pycache__/_decomposition_utils.cpython-310.pyc,, +torch/jit/__pycache__/_decompositions.cpython-310.pyc,, +torch/jit/__pycache__/_freeze.cpython-310.pyc,, +torch/jit/__pycache__/_fuser.cpython-310.pyc,, +torch/jit/__pycache__/_ir_utils.cpython-310.pyc,, +torch/jit/__pycache__/_logging.cpython-310.pyc,, +torch/jit/__pycache__/_monkeytype_config.cpython-310.pyc,, +torch/jit/__pycache__/_pickle.cpython-310.pyc,, +torch/jit/__pycache__/_recursive.cpython-310.pyc,, +torch/jit/__pycache__/_script.cpython-310.pyc,, +torch/jit/__pycache__/_serialization.cpython-310.pyc,, +torch/jit/__pycache__/_shape_functions.cpython-310.pyc,, +torch/jit/__pycache__/_state.cpython-310.pyc,, +torch/jit/__pycache__/_trace.cpython-310.pyc,, +torch/jit/__pycache__/annotations.cpython-310.pyc,, +torch/jit/__pycache__/frontend.cpython-310.pyc,, +torch/jit/__pycache__/generate_bytecode.cpython-310.pyc,, +torch/jit/__pycache__/quantized.cpython-310.pyc,, +torch/jit/__pycache__/supported_ops.cpython-310.pyc,, +torch/jit/__pycache__/unsupported_tensor_ops.cpython-310.pyc,, +torch/jit/_async.py,sha256=cnn2jzHr_1m03YOtfBjwy9xkIeCdh50WPJLC9f5_UPA,3818 +torch/jit/_await.py,sha256=UG6mM5L4tF7NRDT5BfFX4njoM1B_EhdoMT59KsyHF-8,852 +torch/jit/_builtins.py,sha256=OR7JsPngBjeYf7YAlBEbJIdwrJkHYYROcemd-65zedE,6636 +torch/jit/_check.py,sha256=CaatU1N-IyF0LwP_Cu9no73lrDBvELIuzIIidxcqzRQ,9386 +torch/jit/_dataclass_impls.py,sha256=yCanfEDRrvACTarJEg2ycCpioRsCO1btnWNj5ZAjzEc,6684 +torch/jit/_decomposition_utils.py,sha256=IfRghppgh-F5WCwm077re21qWDRkImHgahltbOAH5Ho,402 +torch/jit/_decompositions.py,sha256=cJ8W-VmclE2KCAsITYh7ltvI1yuu8n6kfKUuw1bSMkU,4391 +torch/jit/_freeze.py,sha256=l-gUJYOBKF2R8XYe7hQzyWslX18s6hOZdMZfLaT0L1s,9461 +torch/jit/_fuser.py,sha256=zm_1qXzYIcJJlzyr9lVvyukVPIQa_Yi7FkC6347NWA4,7114 +torch/jit/_ir_utils.py,sha256=G4D4xLtjDnQS3Imqe-C9Y5f6TfSbh8uIYvIzdfycUlo,677 +torch/jit/_logging.py,sha256=jWaacYoHjd_Cao_1hzyPKYiADS7cb7tLQc1gRkZblUQ,257 +torch/jit/_monkeytype_config.py,sha256=R226u0P44-yAU9bvqr8jKh9H5Gvo9ta58JQQ5vYP_fY,7273 +torch/jit/_passes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/jit/_passes/__pycache__/__init__.cpython-310.pyc,, +torch/jit/_passes/__pycache__/_property_propagation.cpython-310.pyc,, +torch/jit/_passes/_property_propagation.py,sha256=WCXn-60y8tZe_nZyIj6rSyqg4Xa77y3mbYnU_GSCXOc,1463 +torch/jit/_pickle.py,sha256=BmT17dBHjphOVtXo72QJGjIoeyHu-AQ5JUt9Eas1vrA,982 +torch/jit/_recursive.py,sha256=_y0bb0vjNZ6063OND3AefBUA5aHmISf9eLbRx99PDWU,42638 +torch/jit/_script.py,sha256=k_h4AYkVEK-tWIz4iW3Q2FW9l0eO7SUnlG_z6RFYcLM,65078 +torch/jit/_script.pyi,sha256=QHLsncRwczz3Qu9feGBVmFLZQ_K_4kTNW6sIEvUM1t8,9488 +torch/jit/_serialization.py,sha256=NpaBfD1Kw0tbJuLJdMVEvFhOy-p2LnQv9wpFMC2Jt2U,9467 +torch/jit/_shape_functions.py,sha256=nzRYgZnOtxPbrvmvMwe40NCEOGSeDfNTe57Vz490TPk,45376 +torch/jit/_state.py,sha256=XiyrcngQ9Si9w87C_5YnFg2AhbC_qdabwLbnMaTXtSo,3770 +torch/jit/_trace.py,sha256=o3njSByv_5OUkAMf0_WKfKNy2T8hGAEAdmRtMrtkNd0,58430 +torch/jit/annotations.py,sha256=U38QiWDTExOh5irQwtEeUXmW_oGYqWXoxZRR-WjphfU,17804 +torch/jit/frontend.py,sha256=L4KdpuQzjaEs8uFyFRsl45jW4H46l8ncGtebrqR8etA,45125 +torch/jit/generate_bytecode.py,sha256=v3IXa8oSxdBqFezFD3ugULneKR9uPA14fQ1XMxeinyU,1066 +torch/jit/mobile/__init__.py,sha256=z7Iui_TP_TAKQu1FwYR0jBAg0yDUy1vkPPe_7r3L1Cw,8517 +torch/jit/mobile/__pycache__/__init__.cpython-310.pyc,, +torch/jit/quantized.py,sha256=_qdmUEm74_VB2z0Dipf61uuvyefSkNAz0kLSPGxFMmA,3193 +torch/jit/supported_ops.py,sha256=LBjTlzuKy5IX78pGywt1eufyrdAtvJv9SoWUP23qRwE,10267 +torch/jit/unsupported_tensor_ops.py,sha256=7xdqme_Wb-FnQ5NDWNw9nI-qyOLCbQpns0VPYoKTres,1998 +torch/lib/libc10.so,sha256=QMyvEE21Pc73C8MrmwB-PvgFyY8p-DCU33PTNkNRqG0,1446953 +torch/lib/libc10_cuda.so,sha256=Qoz7lHVj5PxI0OG-awYgS5QOW4yZ53KmdcTWfhef6yo,699937 +torch/lib/libcaffe2_nvrtc.so,sha256=sRi9CZFJz776nIduSmIm0BK43Bl4tNeHkYWISQsUgng,22929 +torch/lib/libcusparseLt-294d6944.so.0,sha256=buurH5KFwEFn67zkzVLnyYtuZBgZJD0-XOe0ZqcmN9A,212602473 +torch/lib/libgomp-a34b3233.so.1,sha256=nrAQIbtY7DaSXa61pWYGv6tVgzLrX1EZ6ZDkwgjtJYE,169089 +torch/lib/libshm.so,sha256=0pSkZcyUjS6IxsCGRy1SL-tMz3BcbRVSi3ek80FB2a0,52745 +torch/lib/libtorch.so,sha256=hT5Cf6f0Uju94830xLGgkHW0KOIj5ISN1tKSteqKc4g,196169 +torch/lib/libtorch_cpu.so,sha256=aU0G4owI19q5nkvAVc4lTVpjsegDywNGNSN-g9hSo_I,436134369 +torch/lib/libtorch_cuda.so,sha256=dhsUrK-4sCAR4y0RvUN7Y8yj_ogrnEoCyJ_QHXOMy2o,899666713 +torch/lib/libtorch_cuda_linalg.so,sha256=AvRuhDBBH_bkM0ByLmxJV1VPcxgz683QC1nA6obEEv4,96295585 +torch/lib/libtorch_global_deps.so,sha256=-AwDRKEjUFH5WvYjK9oZ6MxRfq9eojVuVCawidZcsd0,21161 +torch/lib/libtorch_python.so,sha256=V2kNLSsOINKdMCTfgXrlrRL8Aymc5gTDWfJep3Z9SAQ,27887377 +torch/library.py,sha256=4Ct_OB5NMqFJzfPEFwgW5Ig4QHtMPAX_n8LEKInqC40,52995 +torch/linalg/__init__.py,sha256=zg7N8dsecgWfhzfgYcaaQF6kLdIZoaAigk4XBye5gL4,114095 +torch/linalg/__pycache__/__init__.cpython-310.pyc,, +torch/masked/__init__.py,sha256=WljBC5z-aMQ3EB29GNw5BVnONrx14zh1Cgrz7fqJ-1E,928 +torch/masked/__pycache__/__init__.cpython-310.pyc,, +torch/masked/__pycache__/_docs.cpython-310.pyc,, +torch/masked/__pycache__/_ops.cpython-310.pyc,, +torch/masked/_docs.py,sha256=Jr-iD8MG_4F6Ip_yxboE8Or1ms-bo3ay_6pdggbfV44,49468 +torch/masked/_ops.py,sha256=4kqEkPJAsyelD18zh2YTQXGzZTDJeMQkx9iZdj7PQ9U,65704 +torch/masked/maskedtensor/__init__.py,sha256=K62inlHZAKXpPohbbHnlJnrzIQqYpzb4SaHTF4s_hWQ,359 +torch/masked/maskedtensor/__pycache__/__init__.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/_ops_refs.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/binary.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/core.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/creation.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/passthrough.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/reductions.cpython-310.pyc,, +torch/masked/maskedtensor/__pycache__/unary.cpython-310.pyc,, +torch/masked/maskedtensor/_ops_refs.py,sha256=QR-2ywLqTdCTe_tULGwloEcKUzV8lfew8AouVF93vGs,17561 +torch/masked/maskedtensor/binary.py,sha256=GXxdD8ehGZsmzVG2e8O2EVo4k5pvI5V9wwgmFLu4RCE,5453 +torch/masked/maskedtensor/core.py,sha256=gtNxm-mpa0s3JYbgNJzT1HpyL9bHxFON12ocNbck0cA,12744 +torch/masked/maskedtensor/creation.py,sha256=fCpzScgLx6yQ23AR7Q6AQEzYuqicis3y3idQ6FuTC2M,554 +torch/masked/maskedtensor/passthrough.py,sha256=kEDm_BwgAN2erALo2NF8SLWX8J9jrAuySd9upW15-40,1447 +torch/masked/maskedtensor/reductions.py,sha256=4yJehs7A0Lg_tuBf-uzz9a0bFdIeYJX3V735PhIs24k,5585 +torch/masked/maskedtensor/unary.py,sha256=jnl6Y19bG-Ytpdodw8EY4RVGeCNLuzTgm6Wp580w2xM,4167 +torch/monitor/__init__.py,sha256=j7X1yUxrDcojgKFJsOBfCHL7ahOPoyhtf7CT9HJEaOk,1286 +torch/monitor/__pycache__/__init__.cpython-310.pyc,, +torch/mps/__init__.py,sha256=_azonfnGcmEbsS-H2gXoNRDJoDnM9zV_qXUqzbLn_o8,5356 +torch/mps/__pycache__/__init__.cpython-310.pyc,, +torch/mps/__pycache__/event.cpython-310.pyc,, +torch/mps/__pycache__/profiler.cpython-310.pyc,, +torch/mps/event.py,sha256=96R50ucdPjFwWfj5M1I96WdCatlth2Ck_1zTAxYQ-sQ,1683 +torch/mps/profiler.py,sha256=y9nGrR2YuJEESvvzrTuLica9neClKKlNZX3gpKRNA2Q,2373 +torch/mtia/__init__.py,sha256=g_Un8Ed4a_eUiYExifX8vwK87ORVpjQvHorr5bBSWG8,11059 +torch/mtia/__pycache__/__init__.cpython-310.pyc,, +torch/mtia/__pycache__/_utils.cpython-310.pyc,, +torch/mtia/_utils.py,sha256=4UJfB-hN2jC9pp0U-FuBOVHAstjmy2D9oeKQfPwWVHY,1597 +torch/multiprocessing/__init__.py,sha256=DD4wNCeg8HGUfV_Qq4OCLvjtfD1oPjtZfT6CQJsx19o,2911 +torch/multiprocessing/__pycache__/__init__.cpython-310.pyc,, +torch/multiprocessing/__pycache__/_atfork.cpython-310.pyc,, +torch/multiprocessing/__pycache__/pool.cpython-310.pyc,, +torch/multiprocessing/__pycache__/queue.cpython-310.pyc,, +torch/multiprocessing/__pycache__/reductions.cpython-310.pyc,, +torch/multiprocessing/__pycache__/spawn.cpython-310.pyc,, +torch/multiprocessing/_atfork.py,sha256=NDXE2HR06ENj_1bGrQumxLdTGSDOlP-2bzeNwij7ajA,790 +torch/multiprocessing/pool.py,sha256=M4P93j12ZHPdQakHYucZdhq8_u4bIOH-UDelyYt1c5Y,1743 +torch/multiprocessing/queue.py,sha256=OT9KTbNOpqjhun_ZQTXizodE1UOwrpSbJhYRKtAyNmE,1477 +torch/multiprocessing/reductions.py,sha256=OhxBLWJ7qV_FmE77GxsYBCcdpcjHSE6b9AZs_js-I2Q,23164 +torch/multiprocessing/spawn.py,sha256=80AtUpicswfIoz4uQCIvL3CWDpIiZT1hW62bX24tCeo,11998 +torch/nested/__init__.py,sha256=ACjZ-ht1XbpjeunFHQMHQgN8vLFwFCjzdgwSNTlLeos,20908 +torch/nested/__pycache__/__init__.cpython-310.pyc,, +torch/nested/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/nested/_internal/__pycache__/__init__.cpython-310.pyc,, +torch/nested/_internal/__pycache__/nested_tensor.cpython-310.pyc,, +torch/nested/_internal/__pycache__/ops.cpython-310.pyc,, +torch/nested/_internal/__pycache__/sdpa.cpython-310.pyc,, +torch/nested/_internal/nested_tensor.py,sha256=GEM91LJUJcM30OgtJm06B5wvftxKhLV8E7ufmqXrdpc,20747 +torch/nested/_internal/ops.py,sha256=yrGmxN0bD0XeifSFFKcM1sGdpjxmEUmXzqcmV5GfV1I,59580 +torch/nested/_internal/sdpa.py,sha256=0c4XHfohd_uaTt6Vmp0O8DnAcunvquFF65-4V9kH2I8,32404 +torch/nn/__init__.py,sha256=SGakM4nNGjI1YZwNintPfgNVTV5FrVKhiFXbq5Db8Pk,2425 +torch/nn/__pycache__/__init__.cpython-310.pyc,, +torch/nn/__pycache__/_reduction.cpython-310.pyc,, +torch/nn/__pycache__/common_types.cpython-310.pyc,, +torch/nn/__pycache__/cpp.cpython-310.pyc,, +torch/nn/__pycache__/functional.cpython-310.pyc,, +torch/nn/__pycache__/grad.cpython-310.pyc,, +torch/nn/__pycache__/init.cpython-310.pyc,, +torch/nn/__pycache__/parameter.cpython-310.pyc,, +torch/nn/_reduction.py,sha256=OX_t4R7Yc31OwxBMOYyY7t6WZEm475fO04iq3hU7BQo,1625 +torch/nn/attention/__init__.py,sha256=MvLQFrqSksGC704_-hyLQUPRbpn50B_8SZk2alo0I3Q,4993 +torch/nn/attention/__pycache__/__init__.cpython-310.pyc,, +torch/nn/attention/__pycache__/_utils.cpython-310.pyc,, +torch/nn/attention/__pycache__/bias.cpython-310.pyc,, +torch/nn/attention/__pycache__/flex_attention.cpython-310.pyc,, +torch/nn/attention/_utils.py,sha256=bXZjYSlDWAZ4GraYtPi3yApDf8OnObOw6ZGzBv1xmzE,2289 +torch/nn/attention/bias.py,sha256=-2u3XLujTa3KYVkjNV_GDomh7RBmd5FQlUT7KBlEpQI,13377 +torch/nn/attention/flex_attention.py,sha256=IoxhV4ETQLAddsKgPvGBbk1n2WU-_AcZGjvKi7JmkxE,39710 +torch/nn/backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/nn/backends/__pycache__/__init__.cpython-310.pyc,, +torch/nn/backends/__pycache__/thnn.cpython-310.pyc,, +torch/nn/backends/thnn.py,sha256=30l44thYgv59GXTfz_89j6MEYlkWUaVRv0laJO4UW6g,146 +torch/nn/common_types.py,sha256=5NeZy1lyaE6vciR1hhA5AfAy4ADOd0HCy3-L4V0xVH0,1846 +torch/nn/cpp.py,sha256=TMiA6xzuxtvKLDIi99W1RtxHiOSFAf9ORBeYQ1JDGT8,3017 +torch/nn/functional.py,sha256=6RzRx3LvkwJycWn4m_g3W-_T_GSo1EXfYISVhwipXVM,235999 +torch/nn/functional.pyi,sha256=6GzJQ33Odb_MFNyKRy6z11OB-J3sTsO1dQljPsToQw0,23713 +torch/nn/grad.py,sha256=xEFG-ZnkxxjBMJajUSoNPqlHLiFFFAZ3wXBvn2kCRYc,9910 +torch/nn/init.py,sha256=64b3JfEuXNYla3I9bCnwzPDP75AxDee9LP-ia2lBbNc,24168 +torch/nn/intrinsic/__init__.py,sha256=mLJtyO2jadna-iZ8mCcW_NoEMq4mJxr3BqaJDy0jBhE,697 +torch/nn/intrinsic/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/modules/__init__.py,sha256=bqi9D10UvQcO_cimMFdlI5HjLEvFOp7Y0hT6g3WOSMQ,517 +torch/nn/intrinsic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/modules/__pycache__/fused.cpython-310.pyc,, +torch/nn/intrinsic/modules/fused.py,sha256=Q6ta4U7JgPoSVs66FPll7Hjhjl_pXcO3pFWKGnBzspk,563 +torch/nn/intrinsic/qat/__init__.py,sha256=7rFxYP3UVmm4gwSlC7aBqXoJF1s0Q4aIdJZuW7ST8wI,59 +torch/nn/intrinsic/qat/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/qat/modules/__init__.py,sha256=n9s1HTHyr4Ao5_38irURngF2ZPh7pTJy9eos_oIz3Bs,637 +torch/nn/intrinsic/qat/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/qat/modules/__pycache__/conv_fused.cpython-310.pyc,, +torch/nn/intrinsic/qat/modules/__pycache__/linear_fused.cpython-310.pyc,, +torch/nn/intrinsic/qat/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/nn/intrinsic/qat/modules/conv_fused.py,sha256=7E_n97P10No_DFQ-KoKcQ8_NKx97m8k6HoiSVDVr9Cw,854 +torch/nn/intrinsic/qat/modules/linear_fused.py,sha256=sUSyA7rEOEfPRmn-M6SbEltWGaTym-yzDhkCEARnqWI,455 +torch/nn/intrinsic/qat/modules/linear_relu.py,sha256=ST8JHORElUi8Ze5E9SxJ94JFaudNQzdRmBj7c9k4a3Q,455 +torch/nn/intrinsic/quantized/__init__.py,sha256=Elo1b9Ro7sj3MsWn3xys-1rGWSe21SANc4H6Ai3x25c,336 +torch/nn/intrinsic/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/quantized/dynamic/__init__.py,sha256=Ov25o_5XouCKIqpzK42xcw8dAwVd02tGZqS4XL8BOIM,73 +torch/nn/intrinsic/quantized/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/quantized/dynamic/modules/__init__.py,sha256=acHjDVSQZsjlTsbUoN4pI-971UJRfjavwMJRJuEOOBk,114 +torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/quantized/dynamic/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/nn/intrinsic/quantized/dynamic/modules/linear_relu.py,sha256=z509nJpUu_QTwDUjE_ON9Fbu7n5GfMnOzWju35bEXRk,97 +torch/nn/intrinsic/quantized/modules/__init__.py,sha256=6WHqSE0pA7qaW20FVPWggCar8uawjVKABKVeNdxjNlo,379 +torch/nn/intrinsic/quantized/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/intrinsic/quantized/modules/__pycache__/bn_relu.cpython-310.pyc,, +torch/nn/intrinsic/quantized/modules/__pycache__/conv_relu.cpython-310.pyc,, +torch/nn/intrinsic/quantized/modules/__pycache__/linear_relu.cpython-310.pyc,, +torch/nn/intrinsic/quantized/modules/bn_relu.py,sha256=3t9doT82fRh1SytKSivqQgxVfDP41KuvL-R9hz8oSb8,111 +torch/nn/intrinsic/quantized/modules/conv_relu.py,sha256=QWzacsMW7FtkasYvsZ9WqRExBjaMqsxHQHD5tYsZBFA,149 +torch/nn/intrinsic/quantized/modules/linear_relu.py,sha256=Zuj3I1owPTdoovG1-BoogRjn8JX1rMWM-WXYc-QFneU,89 +torch/nn/modules/__init__.py,sha256=chOVC7vb1QKPVwZ7I1MexX9fxazQGfbsmsaStR-cOgY,6494 +torch/nn/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/modules/__pycache__/_functions.cpython-310.pyc,, +torch/nn/modules/__pycache__/activation.cpython-310.pyc,, +torch/nn/modules/__pycache__/adaptive.cpython-310.pyc,, +torch/nn/modules/__pycache__/batchnorm.cpython-310.pyc,, +torch/nn/modules/__pycache__/channelshuffle.cpython-310.pyc,, +torch/nn/modules/__pycache__/container.cpython-310.pyc,, +torch/nn/modules/__pycache__/conv.cpython-310.pyc,, +torch/nn/modules/__pycache__/distance.cpython-310.pyc,, +torch/nn/modules/__pycache__/dropout.cpython-310.pyc,, +torch/nn/modules/__pycache__/flatten.cpython-310.pyc,, +torch/nn/modules/__pycache__/fold.cpython-310.pyc,, +torch/nn/modules/__pycache__/instancenorm.cpython-310.pyc,, +torch/nn/modules/__pycache__/lazy.cpython-310.pyc,, +torch/nn/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/modules/__pycache__/loss.cpython-310.pyc,, +torch/nn/modules/__pycache__/module.cpython-310.pyc,, +torch/nn/modules/__pycache__/normalization.cpython-310.pyc,, +torch/nn/modules/__pycache__/padding.cpython-310.pyc,, +torch/nn/modules/__pycache__/pixelshuffle.cpython-310.pyc,, +torch/nn/modules/__pycache__/pooling.cpython-310.pyc,, +torch/nn/modules/__pycache__/rnn.cpython-310.pyc,, +torch/nn/modules/__pycache__/sparse.cpython-310.pyc,, +torch/nn/modules/__pycache__/transformer.cpython-310.pyc,, +torch/nn/modules/__pycache__/upsampling.cpython-310.pyc,, +torch/nn/modules/__pycache__/utils.cpython-310.pyc,, +torch/nn/modules/_functions.py,sha256=rIB0N504VW29KrsF1I-NODZr1hrBHpRHgpCNrT1rilg,12104 +torch/nn/modules/activation.py,sha256=HnSODfDXBJH-gvAXjzISbubuE4pT3n_HZGOvEMdfVHc,57657 +torch/nn/modules/adaptive.py,sha256=cbbVAyUZUaMA94VQQWkIwOEBMLVnwfgBY7jM1mWIDCw,12360 +torch/nn/modules/batchnorm.py,sha256=lnjXWrV9-DkSVltbsRPowqccQaa8TPfjOADOS-NnJVk,38384 +torch/nn/modules/channelshuffle.py,sha256=xQ4wX3rFb-6VpWuBbwksB42clqzcUN_Wq9rzkAZwzAk,1548 +torch/nn/modules/container.py,sha256=M6-NH-T7VCy7pVQsUHmxvY74pkk6u-8ylBH3GyBx0k8,35029 +torch/nn/modules/conv.py,sha256=qdj4WEiYjr7XSFOn3IFyMQxwAvO7SUgYlHfoVVtw1Bg,75723 +torch/nn/modules/distance.py,sha256=5FDmPi74nbDBX4usOjCqnyb9abg613b-bi7ksmS0Rx4,3267 +torch/nn/modules/dropout.py,sha256=H8mXTDzyB43UvCV_c6-2pnFK3-wunQmdR8-SPnZtNJA,11187 +torch/nn/modules/flatten.py,sha256=xbIsPXNUeZRSaWHZDOfuskQKpBK2Cv7gZ6bruOpibfc,5544 +torch/nn/modules/fold.py,sha256=FwnpEP75IJJb2TXcR8RA5PB8RHuTW6OKboY5qFiaiRc,12895 +torch/nn/modules/instancenorm.py,sha256=n_h3A2JHB1jUlhvKDGEJW7k-N7jEwK4M4ApOnqD3jU4,20329 +torch/nn/modules/lazy.py,sha256=atgpS8AtjV9VJ1f0b-pkKvFFeIxHsbYIInf16HtXxEg,11847 +torch/nn/modules/linear.py,sha256=1_XK33c961q0vMn1dBToI5soWccNKqbxQVdt4eyRiCM,10722 +torch/nn/modules/loss.py,sha256=oCz_CffsLOSAmtZ10pzk-ELeQyOnF0RLB1GCxXOPAFg,93384 +torch/nn/modules/module.py,sha256=0SXm4ZtEleZXWNxMTY-9w-MHWMJ-3tZGQnlx8AmOsRc,124492 +torch/nn/modules/normalization.py,sha256=P99W8IjeLxr__OyMIngnnkcYIiJZOYO_YbJRWpUb4xM,14963 +torch/nn/modules/padding.py,sha256=ZIc7BMeT5XmnC8sTvyJLlT3hFQWoXPcy-M1TEz158ek,30261 +torch/nn/modules/pixelshuffle.py,sha256=WsifsWRqulNrpJtWLz3suAVK8HUwzBJFcYgSeHD-a9A,3680 +torch/nn/modules/pooling.py,sha256=vRSyYnrKk1jYXnY1OkCNjfC2Gr6Xe_QaTwzG4hnzAzc,58817 +torch/nn/modules/rnn.py,sha256=epMwoTz9uVGpLJgNUp7_--eaRAsqjLyqMVi8AmItvKU,73907 +torch/nn/modules/sparse.py,sha256=dInwCUeXBVsI89PJfiD2CFLWW_UYZLglSMpss7szdOE,24054 +torch/nn/modules/transformer.py,sha256=njGc3xFWR0AXUEyXWZcAeq2vEyKK1HHXXWG5tyFiJ6k,49182 +torch/nn/modules/upsampling.py,sha256=meTVgguUC7mM3CLO6TizKrJ8Iv-YoElJCRkqARqd6f8,11541 +torch/nn/modules/utils.py,sha256=momLcVhTtL_5gV4tRsNQsox21gynaaS3KDM39abJJfs,2590 +torch/nn/parallel/__init__.py,sha256=V4mU0YkHaUC8gKU6j61uwrkcYEOgKeMoqfdXACvnnzE,787 +torch/nn/parallel/__pycache__/__init__.cpython-310.pyc,, +torch/nn/parallel/__pycache__/_functions.cpython-310.pyc,, +torch/nn/parallel/__pycache__/comm.cpython-310.pyc,, +torch/nn/parallel/__pycache__/data_parallel.cpython-310.pyc,, +torch/nn/parallel/__pycache__/distributed.cpython-310.pyc,, +torch/nn/parallel/__pycache__/parallel_apply.cpython-310.pyc,, +torch/nn/parallel/__pycache__/replicate.cpython-310.pyc,, +torch/nn/parallel/__pycache__/scatter_gather.cpython-310.pyc,, +torch/nn/parallel/_functions.py,sha256=r9HQEPMvKUHwIrMss1GRlDPczOUR9K8RcmYJiElFMxA,4955 +torch/nn/parallel/comm.py,sha256=58XuG3O3Onf1P9u1iQ01iRaJFptfUO4h0fBdbdHEcyo,10917 +torch/nn/parallel/data_parallel.py,sha256=TsIaZ0sNB272yn3l633jirbqs0fyW4xj2CyWJKGF4b8,11729 +torch/nn/parallel/distributed.py,sha256=F2rTiS8NdjFIPeWiVF6pu9l2vDao_Rg-q2uFH9282XA,108479 +torch/nn/parallel/parallel_apply.py,sha256=Oof1gXkKhIzERmXuut1VL3U-meFMdUCjguwzdEebXpE,4419 +torch/nn/parallel/replicate.py,sha256=Ysr1PmOcCISU58shIuXgzJwYvvAKq-38hkCLCmcX9z0,6911 +torch/nn/parallel/scatter_gather.py,sha256=-LuR7WGz_2je9IDbqe4TsDKR47mvqlUJC6cu6JX_Rd0,4983 +torch/nn/parameter.py,sha256=cpzJDZtMoT7QHqjE8Ckx_4YjbR7vZcd8vp8XB2OFBiE,11380 +torch/nn/parameter.pyi,sha256=ksnePst84-MODsBDi2L8ZiLCRDXabpdmOARDYbET4L4,1135 +torch/nn/qat/__init__.py,sha256=yfqB19hirz0Sc5n_3XBvHNn8CO8_7Enk6pvq4UeBTdA,364 +torch/nn/qat/__pycache__/__init__.cpython-310.pyc,, +torch/nn/qat/dynamic/__init__.py,sha256=k6jdrqcs_h-ewM6xCcMLpnIF3uqv8_r9gIQprIfRGn8,207 +torch/nn/qat/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/nn/qat/dynamic/modules/__init__.py,sha256=xnBpUudYrmz9leiWC2UaMEGbDX2eO--nNluoaO_V-LE,78 +torch/nn/qat/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/qat/dynamic/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/qat/dynamic/modules/linear.py,sha256=6UAtM_9i3TRoQPuOyDnWtXTvNdRYZekKqBTopaHXLxQ,415 +torch/nn/qat/modules/__init__.py,sha256=oP1CGRWMDXdL9YEjAbdLn5Up_ExwCvB-ftNxVAYCIcA,500 +torch/nn/qat/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/qat/modules/__pycache__/conv.cpython-310.pyc,, +torch/nn/qat/modules/__pycache__/embedding_ops.cpython-310.pyc,, +torch/nn/qat/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/qat/modules/conv.py,sha256=hOAGSJ7oaQTf_9DWXPWrGGsQ9oPLASiCZaeHX4X3BMI,406 +torch/nn/qat/modules/embedding_ops.py,sha256=1BP3HyU1Xkr8yQBH1RNqv2rwZM9Uv6Wl6X5-DM_Jlls,458 +torch/nn/qat/modules/linear.py,sha256=W18E_sx1dwIj03StoKVN_VPKlJYq1hoy4KTCobCbA7M,391 +torch/nn/quantizable/__init__.py,sha256=O3C0GktZB_Ut6pNTL17JbCzJ2h2hCIK3KH8sVnhSuDs,57 +torch/nn/quantizable/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantizable/modules/__init__.py,sha256=gr03bjx4iwgg1n8zCu3B1UpgNqhb6UxxSVvwO5894V4,207 +torch/nn/quantizable/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantizable/modules/__pycache__/activation.cpython-310.pyc,, +torch/nn/quantizable/modules/__pycache__/rnn.cpython-310.pyc,, +torch/nn/quantizable/modules/activation.py,sha256=JvTGKm47nxF0xOdaJd-rVls-jrJFtYGu-YTZtrs5agI,439 +torch/nn/quantizable/modules/rnn.py,sha256=ymfl_ayTkuUunsNt9186sFOhtapsxsTHVtY_QlKFH9A,429 +torch/nn/quantized/__init__.py,sha256=OTMoZbhpm8nI1oMVMuB8tt4JvpDN3MsIoYww2nUJfQM,771 +torch/nn/quantized/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/__pycache__/functional.cpython-310.pyc,, +torch/nn/quantized/_reference/__init__.py,sha256=VrWZh6FzvyvAi744iiFBcYarBX3Akn6kTarYzEIX0_w,66 +torch/nn/quantized/_reference/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__init__.py,sha256=yjgG2MRfrWs-u8l1CCILBoPTa-FBORFZh9i86vYKdy8,1018 +torch/nn/quantized/_reference/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__pycache__/conv.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__pycache__/rnn.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__pycache__/sparse.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/__pycache__/utils.cpython-310.pyc,, +torch/nn/quantized/_reference/modules/conv.py,sha256=FclYIsMCbXH4erKiBDintHNWDsplOru0QY9LDToSL-s,579 +torch/nn/quantized/_reference/modules/linear.py,sha256=Cu-oS3rGRNtFQX8C8yBiLIeORYswqNnFbVw3ds6PMKQ,450 +torch/nn/quantized/_reference/modules/rnn.py,sha256=A6kV2QXCij5mpFFUTWBQwf6ynKQNJj0VMJbvHqkXa-Y,524 +torch/nn/quantized/_reference/modules/sparse.py,sha256=gSNXDYXlx1IoSrRN0i_pJHJt1znBH3sAPvDpivdfask,467 +torch/nn/quantized/_reference/modules/utils.py,sha256=nBSMp0qUV2OBmuCJYKGmpY6JtRinMoHhwPT2B8VbY08,590 +torch/nn/quantized/dynamic/__init__.py,sha256=lN8JfmyIwtMWNsOD0hWGgKMVPUXuUb5cu6QOvIlEJxg,58 +torch/nn/quantized/dynamic/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/dynamic/modules/__init__.py,sha256=ORZYUZYDqtgat8r4CkTj2GRbnR4y2CxoK3hNUT1AFEU,993 +torch/nn/quantized/dynamic/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/dynamic/modules/__pycache__/conv.cpython-310.pyc,, +torch/nn/quantized/dynamic/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/quantized/dynamic/modules/__pycache__/rnn.cpython-310.pyc,, +torch/nn/quantized/dynamic/modules/conv.py,sha256=bq0IjYdFIpdJbsQkIeF1LDHZOC8wvGInVohV5KwqqI0,669 +torch/nn/quantized/dynamic/modules/linear.py,sha256=9hI6zBpmeAs_hCwTr0QeEm7ouFfYjjGq0QHw7lwPvrs,447 +torch/nn/quantized/dynamic/modules/rnn.py,sha256=vWAb3OQKyHWubLC5U-qIGP-GgYq-GFpvltZCDXjogrA,740 +torch/nn/quantized/functional.py,sha256=YLagkINzH1ksH0zcEEizTr0Wdu8gvziqPkx-elbUkJE,276 +torch/nn/quantized/modules/__init__.py,sha256=oNogz5eH2YEYfHjRkYBEqp_XD2GduDqc9StknEGWx40,2101 +torch/nn/quantized/modules/__pycache__/__init__.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/activation.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/batchnorm.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/conv.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/dropout.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/embedding_ops.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/functional_modules.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/linear.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/normalization.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/rnn.cpython-310.pyc,, +torch/nn/quantized/modules/__pycache__/utils.cpython-310.pyc,, +torch/nn/quantized/modules/activation.py,sha256=52tIo-6xSFg2QIuNL-Q7AKs2IiDBCcGFIvWNupA3fjA,528 +torch/nn/quantized/modules/batchnorm.py,sha256=OpxwQosX9Pv53csZ4ZXKhFOkya_tXMbtndDMSETJKV8,437 +torch/nn/quantized/modules/conv.py,sha256=3JBXb4gZOKXHIyWnnPEVxlb-WRDSMiNEUvuofCSNsSw,666 +torch/nn/quantized/modules/dropout.py,sha256=qdWqDycnaodhJDr_EyIQCiaSg4xK5dqEZ21IBIsC_8M,442 +torch/nn/quantized/modules/embedding_ops.py,sha256=smsmXs2h7t2y6WbToNuJ060heMQbZOXOfkNbdwOqVO4,547 +torch/nn/quantized/modules/functional_modules.py,sha256=bk60spaa4_sUCDXyh4GOh937FVIwNCH-l93lAPi39-Q,554 +torch/nn/quantized/modules/linear.py,sha256=zsZr5Rb4z4XJvTjc-hF9cqG4uqpsbkHOc3jJOxcSM4A,481 +torch/nn/quantized/modules/normalization.py,sha256=vUyWsXC7CFIm0oohnco73FD3Kz9xMGHeJKq3dNKZxKQ,626 +torch/nn/quantized/modules/rnn.py,sha256=NWI5IuH_qaC1YD1ROeoUbodbN3yS0MrZKOhXLmGfPDo,411 +torch/nn/quantized/modules/utils.py,sha256=ZkyzQS1nmLaK5Nvq2eKtSha7mHG3HyM08_5ueUc9FpQ,539 +torch/nn/utils/__init__.py,sha256=90J4PKyfYX3mzA291Ymz8Q9oG6sPSurqKnM2ZEh8mAs,1082 +torch/nn/utils/__pycache__/__init__.cpython-310.pyc,, +torch/nn/utils/__pycache__/_deprecation_utils.cpython-310.pyc,, +torch/nn/utils/__pycache__/_named_member_accessor.cpython-310.pyc,, +torch/nn/utils/__pycache__/_per_sample_grad.cpython-310.pyc,, +torch/nn/utils/__pycache__/clip_grad.cpython-310.pyc,, +torch/nn/utils/__pycache__/convert_parameters.cpython-310.pyc,, +torch/nn/utils/__pycache__/fusion.cpython-310.pyc,, +torch/nn/utils/__pycache__/init.cpython-310.pyc,, +torch/nn/utils/__pycache__/memory_format.cpython-310.pyc,, +torch/nn/utils/__pycache__/parametrizations.cpython-310.pyc,, +torch/nn/utils/__pycache__/parametrize.cpython-310.pyc,, +torch/nn/utils/__pycache__/prune.cpython-310.pyc,, +torch/nn/utils/__pycache__/rnn.cpython-310.pyc,, +torch/nn/utils/__pycache__/spectral_norm.cpython-310.pyc,, +torch/nn/utils/__pycache__/stateless.cpython-310.pyc,, +torch/nn/utils/__pycache__/weight_norm.cpython-310.pyc,, +torch/nn/utils/_deprecation_utils.py,sha256=CUwuyJuOURT6rBuOtUiehKJADu_hC1P7myL1b6MWbDw,1695 +torch/nn/utils/_expanded_weights/__init__.py,sha256=PHIe9H3l0iuImQ6z3WqMkWIS37FDQVQ16vMo0a08mtA,452 +torch/nn/utils/_expanded_weights/__pycache__/__init__.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/conv_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/conv_utils.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/embedding_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/expanded_weights_impl.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/expanded_weights_utils.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/group_norm_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/instance_norm_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/layer_norm_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/__pycache__/linear_expanded_weights.cpython-310.pyc,, +torch/nn/utils/_expanded_weights/conv_expanded_weights.py,sha256=D6jjDrODG--LPTf9KEZLDL3RcX1G0BT3rAVPimAr6rc,2601 +torch/nn/utils/_expanded_weights/conv_utils.py,sha256=WiiXFjSDps9WUOd3VY-5eOuLcgTHr34DWwPuO7vrl0Q,10739 +torch/nn/utils/_expanded_weights/embedding_expanded_weights.py,sha256=jpIoElWFchrSj7X4ehBR4uPoBD4rImmM0cFLPVS1n4k,2856 +torch/nn/utils/_expanded_weights/expanded_weights_impl.py,sha256=DpA8D-mLu2-4bNnN1I-ZgRYG8e_bfZ2O7VJL6_nFSyw,6138 +torch/nn/utils/_expanded_weights/expanded_weights_utils.py,sha256=qymuok1L-XCz1mt6GW5nRdksOVAPPmIfigzwskPZaEs,7582 +torch/nn/utils/_expanded_weights/group_norm_expanded_weights.py,sha256=7nSu6ko3rlz5a035dM6--XrS_APNU02644e1MMSFz4g,3456 +torch/nn/utils/_expanded_weights/instance_norm_expanded_weights.py,sha256=LPrLT6CsxrW5QEBxoGJKbbt1uSJnRY-njs5udf5Celw,3735 +torch/nn/utils/_expanded_weights/layer_norm_expanded_weights.py,sha256=6N5ek-PK0bBpVIrGaHf4_1HKXrPKZdvzsIFWiA3GReE,3252 +torch/nn/utils/_expanded_weights/linear_expanded_weights.py,sha256=kZYzaAxadDGbxDxtJ1SuHlVdnHk5tq3QhQMNPZpces0,2222 +torch/nn/utils/_named_member_accessor.py,sha256=R9w5fxMyBMnLZ1sozNnI5jgAQ_7TRtF6qAvO1dYW38E,14173 +torch/nn/utils/_per_sample_grad.py,sha256=gHwwHf0esWt1_NcOQJr2nH_A6dAHO3nKvejUNk1kQBo,5745 +torch/nn/utils/clip_grad.py,sha256=oMxIt21PPvt6Qmbrjkwlpd-tSXK2CanVbEw_5Yr-KMk,7423 +torch/nn/utils/convert_parameters.py,sha256=4GJHQeh3QJ3lN7c_U12y-XxHJ5U1Kegb9HA2R-yTiNw,3217 +torch/nn/utils/fusion.py,sha256=8t4cpgWk34caUlDwTG9FKW70qZROeFAFX3H2K0ylqJI,6463 +torch/nn/utils/init.py,sha256=eA5Y0Ti3Qjhd-pCKEx9MpLHjJuz4bA3XNw6W0lC55XE,2250 +torch/nn/utils/memory_format.py,sha256=zsvGtWiVl5IYPUcbpOPdA5juEVI_bAEB7Vb7lvpsb4U,7899 +torch/nn/utils/parametrizations.py,sha256=c5WXxNbNRdNsmzu6u3yolT4SBLdw6QyhghIRuMNe3OE,25666 +torch/nn/utils/parametrize.py,sha256=Jf17IF3an7YTzT3a61fWS4cUOjTBS-j06vwG8-PXFRw,36008 +torch/nn/utils/prune.py,sha256=C1lEXALAbwwVOU2pXZpVxpGNTV03vi4uI4qJklaaCCU,57768 +torch/nn/utils/rnn.py,sha256=t5In2YRfKFjnH0GQW-sWPtgf9jXPmvluo_h7hjWUV7E,22867 +torch/nn/utils/spectral_norm.py,sha256=GgmPOuEicuWAAbcFqPDxJUCdEpxhffDKgQ--brkSwJg,14913 +torch/nn/utils/stateless.py,sha256=kY0s_UEjsMHGCj-si4Y_nxRVpBB2WxCrN5od0fFotfU,12291 +torch/nn/utils/weight_norm.py,sha256=4dZGNTi_ZAFLBWAmhR8Ns5EXDmXTjxHyQm7XsCaBKH4,5881 +torch/onnx/__init__.py,sha256=LjPl92doz0v-XHBT9BXSrML_FS2G2NBkJeZwf1MIN0c,19723 +torch/onnx/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/__pycache__/_constants.cpython-310.pyc,, +torch/onnx/__pycache__/_deprecation.cpython-310.pyc,, +torch/onnx/__pycache__/_experimental.cpython-310.pyc,, +torch/onnx/__pycache__/_exporter_states.cpython-310.pyc,, +torch/onnx/__pycache__/_flags.cpython-310.pyc,, +torch/onnx/__pycache__/_globals.cpython-310.pyc,, +torch/onnx/__pycache__/_onnx_supported_ops.cpython-310.pyc,, +torch/onnx/__pycache__/_type_utils.cpython-310.pyc,, +torch/onnx/__pycache__/errors.cpython-310.pyc,, +torch/onnx/__pycache__/operators.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_caffe2.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_helper.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset10.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset11.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset12.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset13.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset14.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset15.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset16.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset17.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset18.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset19.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset20.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset7.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset8.cpython-310.pyc,, +torch/onnx/__pycache__/symbolic_opset9.cpython-310.pyc,, +torch/onnx/__pycache__/utils.cpython-310.pyc,, +torch/onnx/__pycache__/verification.cpython-310.pyc,, +torch/onnx/_constants.py,sha256=85vkG1xUJkwE5xoKxUcFbswqEGEXaZbyrqs4KA29o-k,608 +torch/onnx/_deprecation.py,sha256=G9dxerhVvL98iNyUrHFjv7TFAb2Hve7asN_WdMCb-To,2272 +torch/onnx/_experimental.py,sha256=LoTAZTIvBKA00kRmHEgBLBXTZ125CDrUTQLEWC_ah9g,1017 +torch/onnx/_exporter_states.py,sha256=RvbLW99aUyKs98VEpQhRU36QfhS_A5BhWyD0SY2qR0k,444 +torch/onnx/_flags.py,sha256=e4gDoR1cZyVcN_mb_XUJp6bfqqFo1gsO2sUHNbmm9b8,1284 +torch/onnx/_globals.py,sha256=mWIC9T8L7mTEvyy3GrGWrjbfpuwPxF66_L6-zEbBk1Q,3004 +torch/onnx/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/onnx/_internal/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/_exporter_legacy.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/_lazy_import.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/io_adapter.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/jit_utils.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/onnx_proto_utils.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/onnxruntime.cpython-310.pyc,, +torch/onnx/_internal/__pycache__/registration.cpython-310.pyc,, +torch/onnx/_internal/_exporter_legacy.py,sha256=6d4HeiQt4p8VCPTCCdQGNi1eq1me_CTzSgL4OEInEWU,53999 +torch/onnx/_internal/_lazy_import.py,sha256=Yzlqb2ubyo7F8_xhXjn1JxlYcYhUiWx2KFGxRHdlY28,1201 +torch/onnx/_internal/diagnostics/__init__.py,sha256=RtmC_BYe_kCJGlsCxOU2WBGC5qAHurllBvvIJufoJzU,434 +torch/onnx/_internal/diagnostics/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/__pycache__/_diagnostic.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/__pycache__/_rules.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/_diagnostic.py,sha256=250IQVrvNeHRuEOsh7j9VqdYXsGIlTS7nPi536Rygos,6980 +torch/onnx/_internal/diagnostics/_rules.py,sha256=z-P2an_PcsM4D8aArDBIUO0fHtnNXkrJIpk05wx6YLU,37185 +torch/onnx/_internal/diagnostics/infra/__init__.py,sha256=eCVusIsWNEeZ9nsRVcXVWyJlG2WMaG4uvahctDzx9Is,574 +torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/__pycache__/decorator.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/_infra.py,sha256=94w0x8fuefhKhmHVrpZl6FUY-Ev94ZqGEHjVpanY3Gk,9709 +torch/onnx/_internal/diagnostics/infra/context.py,sha256=A5VkN1QO-6o8sFRZ776CMTjtuelkOe-FRHpxtV2uxE8,16319 +torch/onnx/_internal/diagnostics/infra/decorator.py,sha256=yaKhbtt41Jgm3oekAzfp3nKCdBt3vNqQIE8dzGoZRXc,5407 +torch/onnx/_internal/diagnostics/infra/formatter.py,sha256=6_pnh7xNd9HDzmdzdlv3xcFo16AFu7oRZq-_aGlSXVs,2794 +torch/onnx/_internal/diagnostics/infra/sarif/__init__.py,sha256=HfUf5-t2O7Yvo4zGKWkAFTLfyHVjYLS0KjsX6pxiEYs,4985 +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_address.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_change.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_content.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_artifact_location.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_attachment.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_configuration_override.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_properties.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_references.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_fix.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_logical_location.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_multiformat_message_string.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_notification.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_physical_location.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_rectangle.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_replacement.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_relationship.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run_automation_details.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_special_locations.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_suppression.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow_location.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_tool_component_reference.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_translation_metadata.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-310.pyc,, +torch/onnx/_internal/diagnostics/infra/sarif/_address.py,sha256=ws5WXm5Z4sCx9ZVtCT67FluQUInkR2zKDaMW-QUNa2g,1748 +torch/onnx/_internal/diagnostics/infra/sarif/_artifact.py,sha256=0JJfuGrzAxAmPQcCi29zGoL1MTDusEe0bkd4JZ8Q0tQ,3026 +torch/onnx/_internal/diagnostics/infra/sarif/_artifact_change.py,sha256=l95-2ctz3d44esToAwN35lbeE6HkAXuETBgezH-ahFc,893 +torch/onnx/_internal/diagnostics/infra/sarif/_artifact_content.py,sha256=Yp_0HXHpPxKS-rJkKPv6y7JnVNPWQ2Y41M3lSTc5kKU,1014 +torch/onnx/_internal/diagnostics/infra/sarif/_artifact_location.py,sha256=U9qsrtJXuzR3iTcSYPOmD9Lbih2NBJ6OPlJToDfAXcQ,1061 +torch/onnx/_internal/diagnostics/infra/sarif/_attachment.py,sha256=hLFkgETVh3cCL94A0emFQqAEfcxC2RT0gqdMuLlkxzA,1213 +torch/onnx/_internal/diagnostics/infra/sarif/_code_flow.py,sha256=qZJ7C8EL9m1tYrf_sw66PbB3ilJ2fClEAKh3UYcNFKk,935 +torch/onnx/_internal/diagnostics/infra/sarif/_configuration_override.py,sha256=BREXBUAT2cSNZlpZJ1b59rcGkbooMI_gdusu_Cslcrg,1004 +torch/onnx/_internal/diagnostics/infra/sarif/_conversion.py,sha256=HMAqaQmMouchsc_DutxOTAiD1QmI27gsa3KR140JtWE,1172 +torch/onnx/_internal/diagnostics/infra/sarif/_edge.py,sha256=av5c6koZeuTyRQCgE6fDBOk-JiOFhyHLx4_hwuuC2DY,983 +torch/onnx/_internal/diagnostics/infra/sarif/_edge_traversal.py,sha256=r2PBm018laYQPM_U8aAevhNx5T57R2lBrTQ2Vqv4b4Y,1083 +torch/onnx/_internal/diagnostics/infra/sarif/_exception.py,sha256=O70XX0b5HkMaSfaQvmnfKx5KxYgQqQ41gXfgOXToyj8,1168 +torch/onnx/_internal/diagnostics/infra/sarif/_external_properties.py,sha256=pqjtu6OK9LrMBbjNVCp8B7R4h4JcRfbaDoWMEmLGvZc,3836 +torch/onnx/_internal/diagnostics/infra/sarif/_external_property_file_reference.py,sha256=hpSt2mZnZRaZ0KNDxYdGUVF0yfarnRNq0nFAhrqmCpI,1120 +torch/onnx/_internal/diagnostics/infra/sarif/_external_property_file_references.py,sha256=_VSqpoX1i1-_PCSgalnEhBGMHSas8QUnz5kAoTaY0pk,3901 +torch/onnx/_internal/diagnostics/infra/sarif/_fix.py,sha256=FFAiJYI4CeDtIFld-dKk3HggBBPg4eCcMV3PCOAxmvQ,1069 +torch/onnx/_internal/diagnostics/infra/sarif/_graph.py,sha256=AXliROAwGC_Ff2chkUxqwdmQxswbQ2rqGT_ACsaWpfU,1090 +torch/onnx/_internal/diagnostics/infra/sarif/_graph_traversal.py,sha256=4k00If3e_57WKUBFJncbBjfA5e5ALlpQvoDqb0uCNhU,1423 +torch/onnx/_internal/diagnostics/infra/sarif/_invocation.py,sha256=vfUBEk4Bxi005AQlUSZ_6foTXiv0eGHxn0ofTQApMjE,4664 +torch/onnx/_internal/diagnostics/infra/sarif/_location.py,sha256=8n53TteyBb0s2t1scKbmFTUl3GAhmrP2WIZfTKMPSi0,1640 +torch/onnx/_internal/diagnostics/infra/sarif/_location_relationship.py,sha256=TTtVOdV-Ht7IP7lyRBGE-UUc2mN_Gb_HeUy9DOxsQIY,964 +torch/onnx/_internal/diagnostics/infra/sarif/_logical_location.py,sha256=spq1Legx7q1euHSPpQkY4ZnE0jSrZPtDLY5CoVgcBRI,1314 +torch/onnx/_internal/diagnostics/infra/sarif/_message.py,sha256=VO4FTcUK-7Wl3Diom7lYmuYBjMfa-KwjBY9W1n2TQvk,1062 +torch/onnx/_internal/diagnostics/infra/sarif/_multiformat_message_string.py,sha256=V9d9I84_iYcDPwOs7KUWJ2DjDVcSmNt-Kq4Xaggd_ps,805 +torch/onnx/_internal/diagnostics/infra/sarif/_node.py,sha256=ABeCrjO8hS5B-83MFoUv8gd5IoLMDlbaK1LcWwqS6Mo,1075 +torch/onnx/_internal/diagnostics/infra/sarif/_notification.py,sha256=EptwUCa06LGuLKKfX1on41MZgfa0Lu-fABjlQkXpKlE,1934 +torch/onnx/_internal/diagnostics/infra/sarif/_physical_location.py,sha256=-wrv-iFUk--SFcUZ9MgEQ8YSNUQjqptjuxnqUt1g_Jo,1347 +torch/onnx/_internal/diagnostics/infra/sarif/_property_bag.py,sha256=lsjMzdnz9g3dIMw9Mtm-0bTU5mQGSZNhEnydYRGyF8A,496 +torch/onnx/_internal/diagnostics/infra/sarif/_rectangle.py,sha256=k0VAKUBS4Sk1CWgYfehzAzCZxxOL6uzKM1V0Bkk_GZ4,1159 +torch/onnx/_internal/diagnostics/infra/sarif/_region.py,sha256=J3G25_1N--w2ueUGVGiazuTRUju47d1fJ-48HDgILT8,2031 +torch/onnx/_internal/diagnostics/infra/sarif/_replacement.py,sha256=ydAQKhmy2Mhq0GaPr62wTQstlPQNafSvMh3HYI_VQrU,905 +torch/onnx/_internal/diagnostics/infra/sarif/_reporting_configuration.py,sha256=4mShdhYP_MTlK6Xs22kaMej7a57ZCf2eafK7SSjfeG4,1136 +torch/onnx/_internal/diagnostics/infra/sarif/_reporting_descriptor.py,sha256=jzZPwUC8HjDD8uSe_itjTu7HILI7PVJkkcr0Aoh5Lgw,2768 +torch/onnx/_internal/diagnostics/infra/sarif/_reporting_descriptor_reference.py,sha256=7BA01H2muecHjU056yohvfZUrvOtBPdOghSQW4FT9T0,1176 +torch/onnx/_internal/diagnostics/infra/sarif/_reporting_descriptor_relationship.py,sha256=y2iu9t6UuREodZkdJtaoNEXgU8Zb0Kd23YQ9gQJQtd4,1110 +torch/onnx/_internal/diagnostics/infra/sarif/_result.py,sha256=T4wwLsYJTIBC0PHRI8AY3wV1CN-OGbHuHDntgXibDLY,5090 +torch/onnx/_internal/diagnostics/infra/sarif/_result_provenance.py,sha256=IDbsQXOJc36XcifMq64PvFnmk8nrqtnTJf6_VYLNUjc,1561 +torch/onnx/_internal/diagnostics/infra/sarif/_run.py,sha256=Ce6kycs1N56zhdGDkTOfggz3GB6MmvQhBcVGN1TPNQM,5358 +torch/onnx/_internal/diagnostics/infra/sarif/_run_automation_details.py,sha256=KRztrseIGyQm0JzBbAQJJ1-oqTwS9pt35MoXzwcpBF4,1136 +torch/onnx/_internal/diagnostics/infra/sarif/_sarif_log.py,sha256=wsCKQQKrcr9EzvHiIkiNyqbW52yj56VJwsE6v5pEirE,1237 +torch/onnx/_internal/diagnostics/infra/sarif/_special_locations.py,sha256=3YNBPg7CwcivAIf-s6OKur4s-5_xgTvt3pa18qKeuv8,782 +torch/onnx/_internal/diagnostics/infra/sarif/_stack.py,sha256=F2PUk8ubWkIdcFFhZ80pER1iCas4z8O1xH5APb_y02c,859 +torch/onnx/_internal/diagnostics/infra/sarif/_stack_frame.py,sha256=cvgDZszoMaleh_D8UHJPYQ619IVayITX7rWOxEHljm0,1088 +torch/onnx/_internal/diagnostics/infra/sarif/_suppression.py,sha256=GV1eMCGiusg5Ydb-jpewOltknZMKMLznVcFYBUWfluw,1249 +torch/onnx/_internal/diagnostics/infra/sarif/_thread_flow.py,sha256=YD_0OTAvRygQBiWMylIJeELuYxkqAAakECFDPlI4yko,1351 +torch/onnx/_internal/diagnostics/infra/sarif/_thread_flow_location.py,sha256=JEYgPHorvlV9zm8rI19-ecugrd_bb5fTxGhQ5jqjbA8,2517 +torch/onnx/_internal/diagnostics/infra/sarif/_tool.py,sha256=CkHTztGsphKqnKxTdVnEsf7IoLj8p-eH15R1puEjJSc,848 +torch/onnx/_internal/diagnostics/infra/sarif/_tool_component.py,sha256=VQ0R5OsdJbaasXY5a4ZmboZWYBje9jeGc7_20l1qFpc,5029 +torch/onnx/_internal/diagnostics/infra/sarif/_tool_component_reference.py,sha256=E_0IAvLbJZ2sDAbz04FgbrHW20DQX_DScF8tmbE8Eq8,947 +torch/onnx/_internal/diagnostics/infra/sarif/_translation_metadata.py,sha256=7uhmCQhYjV2EC7KLzcd-yIBzYX893sIIvCh9PqNA0D8,1504 +torch/onnx/_internal/diagnostics/infra/sarif/_version_control_details.py,sha256=N8lmfAcHW1BFZXxACnlKwlt1KveA3TL9uRDXoWQ8gEU,1436 +torch/onnx/_internal/diagnostics/infra/sarif/_web_request.py,sha256=d4Scw4N4JuZDGbDIOYQfhLXaPa2_mRUzRgi6B9XHK8Q,1543 +torch/onnx/_internal/diagnostics/infra/sarif/_web_response.py,sha256=-RkuvmR7VQcJ5g56G-5K36T6GjnVTzrUGv7uNM3b3zk,1611 +torch/onnx/_internal/diagnostics/infra/sarif/version.py,sha256=VMHsKymwtAjaf6aKtqPY735cVVYDJg5tIKkQWelRI4M,186 +torch/onnx/_internal/diagnostics/infra/utils.py,sha256=UsQCFjBDSBul-bM4knrVl7PTMEuoBY_xfIgMTUFX_HM,2342 +torch/onnx/_internal/exporter/__init__.py,sha256=AzHxfakOCRh0MbpEGcX6itAV_lPn6lsg8j0UPbxaGCo,429 +torch/onnx/_internal/exporter/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_analysis.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_building.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_capture_strategies.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_compat.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_core.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_decomp.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_dispatching.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_errors.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_fx_passes.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_ir_passes.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_isolated.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_onnx_program.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_registration.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_reporting.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_schemas.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_tensors.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_testing.cpython-310.pyc,, +torch/onnx/_internal/exporter/__pycache__/_verification.cpython-310.pyc,, +torch/onnx/_internal/exporter/_analysis.py,sha256=QVcDxu2qojmpOIHKwOT8j9-5WPs1FWFiP7LypVPA_4g,8810 +torch/onnx/_internal/exporter/_building.py,sha256=3KYoCMI534kbaU5gUgpEwvACPE6CI1E2nk0rEIp7ou8,27363 +torch/onnx/_internal/exporter/_capture_strategies.py,sha256=IlwKHRavIoLC2b6Mh7pqu4fcSICa_2pzCuVIUUm1Izk,12452 +torch/onnx/_internal/exporter/_compat.py,sha256=BnB7KK0Ay1UCn21SeaTddG-hSxxMe9AZ8CZCa3He718,7847 +torch/onnx/_internal/exporter/_core.py,sha256=ieCHWE3PkAYFOHPIPq9UN_FY3d2Jg7fJa3PIuEqJfT4,53788 +torch/onnx/_internal/exporter/_decomp.py,sha256=xh5GYYN5AzB5IQtpLEuIfMVteodLZjwa93xkvZU60_4,3916 +torch/onnx/_internal/exporter/_dispatching.py,sha256=QzRDfInc92fH145ll-mWwbsjy8p4BcfRPya0Jglfttc,14304 +torch/onnx/_internal/exporter/_errors.py,sha256=VziOIvyap9A_UDRP41HkC_s0J-R13dbCrVLIusF-6A8,535 +torch/onnx/_internal/exporter/_fx_passes.py,sha256=fKwnTWkTU9EUF1KXRD8RVOekR2ACysXCBN30HxzaKCE,2044 +torch/onnx/_internal/exporter/_ir_passes.py,sha256=GlR4g1wRwezvNTypgnAiloidAPESS6PvkyoY5QFJTLo,1513 +torch/onnx/_internal/exporter/_isolated.py,sha256=vVbfk6-O1QKOIwk6MeFOwmj6t82HnS31rDZgvoNCf0o,1564 +torch/onnx/_internal/exporter/_onnx_program.py,sha256=rhUuLLv6MbrRYxFKcYshQCYWM7BRZkW2iPAHt9ZQ7Qo,12470 +torch/onnx/_internal/exporter/_registration.py,sha256=GNqUj_rOdoScEwL897EV-GiT6wf9W8EVVMGC4zWIogo,9611 +torch/onnx/_internal/exporter/_reporting.py,sha256=LDm7c72aYNJp8fzxXlKeojonBd_Oi9WwGUnLQNOHTuc,6907 +torch/onnx/_internal/exporter/_schemas.py,sha256=AA7h7zomovs_vOTx_UsoElONB0h2rR8MjsEQbi2oDII,20380 +torch/onnx/_internal/exporter/_tensors.py,sha256=DE1j78H2T1CvM3f1QSxROoBJg9KO6rrWa0qa9pEPdUk,2467 +torch/onnx/_internal/exporter/_testing.py,sha256=sVtmYGWkWbIdLToSemPG0O9ZwGXZA6Hlnt9OmT7C9L8,2336 +torch/onnx/_internal/exporter/_verification.py,sha256=G5LX9E2K1rBsus6kLdnLuXhcuoEitMRW5934UAvCQNE,3751 +torch/onnx/_internal/fx/__init__.py,sha256=nDN5RQ1wcu4ktXSPWCLgPREAyDWHnhr1EOMqcKSVDZs,172 +torch/onnx/_internal/fx/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/_pass.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/decomposition_skip.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/decomposition_table.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/diagnostics.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/dynamo_graph_extractor.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/fx_onnx_interpreter.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/fx_symbolic_graph_extractor.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/onnxfunction_dispatcher.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/patcher.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/registration.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/serialization.cpython-310.pyc,, +torch/onnx/_internal/fx/__pycache__/type_utils.cpython-310.pyc,, +torch/onnx/_internal/fx/_pass.py,sha256=toi18cwDpHhApyoUqtv1B586gI-DPrnMmwAC7nLKMm4,12458 +torch/onnx/_internal/fx/analysis/__init__.py,sha256=TcuPzgDgAUjg_fLUkc5iEKxmObhF8gPKU4Y6F1qR7pg,108 +torch/onnx/_internal/fx/analysis/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/fx/analysis/__pycache__/unsupported_nodes.cpython-310.pyc,, +torch/onnx/_internal/fx/analysis/unsupported_nodes.py,sha256=P5dEg0MsApHLR2DrxXLoWdzSKJptJub9Ht_s7kBcQjo,3389 +torch/onnx/_internal/fx/decomposition_skip.py,sha256=g_LccE1zSWUsbTai9Jv-IGK32BTU1wfs-v4nLoAcUmE,8339 +torch/onnx/_internal/fx/decomposition_table.py,sha256=RsnGylbJdMwILyXFC_8dEkxlBQgTmzERnB7-onYMeLw,5156 +torch/onnx/_internal/fx/diagnostics.py,sha256=Ww0Ay2Ul9AFanDN6hiVWHwV79qm4iow39PDlbvX6ICI,8935 +torch/onnx/_internal/fx/dynamo_graph_extractor.py,sha256=y_rJ2MnpYgEDwNmqpsfsfx_L6clrxVS631RCFSvvkkw,8224 +torch/onnx/_internal/fx/fx_onnx_interpreter.py,sha256=bBQa1-uvXJLwvth3Y9JxVEaXqdHC7H2UXoSDwNlUsqg,34099 +torch/onnx/_internal/fx/fx_symbolic_graph_extractor.py,sha256=jO8EsUk75yYat4sa65qFPzR-4INDScbA1xSjS6ts-c0,10088 +torch/onnx/_internal/fx/onnxfunction_dispatcher.py,sha256=zkPU242VlN2UdhovmnTgLGq021RNLZ4lRpvg27Mj4a4,37770 +torch/onnx/_internal/fx/passes/__init__.py,sha256=SO_DWZEqZA0Bpw-ShxxQ4PuG7G4v_B8icIRWSyx2PcI,552 +torch/onnx/_internal/fx/passes/__pycache__/__init__.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/_utils.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/decomp.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/functionalization.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/modularization.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/readability.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/type_promotion.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/__pycache__/virtualization.cpython-310.pyc,, +torch/onnx/_internal/fx/passes/_utils.py,sha256=D_uNMEBCynPQzztj4QrDAchq1tiqocIO1ans22_IDxw,4235 +torch/onnx/_internal/fx/passes/decomp.py,sha256=AbkmbIiRleWPkgVIim4As4-rtE8gQHGQd2-y22fRAto,3557 +torch/onnx/_internal/fx/passes/functionalization.py,sha256=kDf3YxKICCOLbHjCQYm7h19jWZvZq00h_ay7xPZNmR4,6508 +torch/onnx/_internal/fx/passes/modularization.py,sha256=dNp7Sf0tS8Ji-91eSr30d6W-_Y2lsEwoLxYWrlOhTfo,34173 +torch/onnx/_internal/fx/passes/readability.py,sha256=cSLCDiQMekpeTncb_-Q3rg7rXAZe61RswVYsvuv5Fnw,5869 +torch/onnx/_internal/fx/passes/type_promotion.py,sha256=JJLZDGzBZCrWnUCA8uAr_BMug0FUdo3TQZlnIPsSpKk,66805 +torch/onnx/_internal/fx/passes/virtualization.py,sha256=uNr-DrgxEefSUaSlS_6RdFlFcJ1z2pk6G7CfSqYe2Xg,3813 +torch/onnx/_internal/fx/patcher.py,sha256=7dsyU-PnNpmarAZ8v0V7FgZ9E6Ssi-e1IDfGzfZiwWY,6055 +torch/onnx/_internal/fx/registration.py,sha256=FyXKoCfbiQEC6oUL97GpLISl-7AnuP1ZMLM25WVytZc,2984 +torch/onnx/_internal/fx/serialization.py,sha256=ihqpXlfeE0MrKp2lfFdBIUzLa_WnoQGNaHTm55tOrF4,11416 +torch/onnx/_internal/fx/type_utils.py,sha256=wK9DTzPY7dfheoXZ9PnALP78z7MR8OpCiiB5zthUf6k,8152 +torch/onnx/_internal/io_adapter.py,sha256=0zZ0uoJGcO8QOushqQ1xgUDfHIWo9Y4_ylSWMVZ68SY,22580 +torch/onnx/_internal/jit_utils.py,sha256=tUfNOj3NeNXSUmZo8WFljrH2on-0Ko5GwERP3Q8jbIE,14102 +torch/onnx/_internal/onnx_proto_utils.py,sha256=0ySJBxLx0nW5lSyE0FJfxEv9qkPWs7HC16YDyvdtGGY,10878 +torch/onnx/_internal/onnxruntime.py,sha256=CibvuzsCUq1RWcQpL0UWzUIdN6VwIstuFla_t5R8PI4,52776 +torch/onnx/_internal/registration.py,sha256=Hq-02LHOaRacUgiGTFQ9bh2Uej-IlCTQlz6VRnZVWd4,11050 +torch/onnx/_onnx_supported_ops.py,sha256=7VoLPwBIHC5KIoFMqMAi0PrwuBc7HAVMPpGgpIQj0Zc,3331 +torch/onnx/_type_utils.py,sha256=XqHQdvYMVx5y6t2fj3srw1Q8qtIVTJYdXz37_CN13bQ,13940 +torch/onnx/errors.py,sha256=RxhxE8GAvr8Qu-TXGp8UdYDwY2DxzycQ1yiKdZqT0uU,3734 +torch/onnx/operators.py,sha256=1AtFe8wdgIOsZwxfwzWfWCnSgcsofKwEyS8OQ-mZUjc,1300 +torch/onnx/symbolic_caffe2.py,sha256=FX6R1arFhrT9YwkYV2xieJVbxIFCk1LzNpKbK_OKqCE,10971 +torch/onnx/symbolic_helper.py,sha256=Aw9-dGmrla04P3P97J0iB_hZjGFO_AKDeVVhBQ5Sou8,82147 +torch/onnx/symbolic_opset10.py,sha256=CSlsn9NUsQ4oz0OHQ9SX7DmIVyyR8a2KVkWU0IyKIWU,37362 +torch/onnx/symbolic_opset11.py,sha256=gY1reBA0AekkTFOrGgyjjba6wE2b-6i1G40R3OmGC38,53379 +torch/onnx/symbolic_opset12.py,sha256=Q3BvYJBLcX5QRKfogkI-JbtWcQ3cHWyYrmP6GmgqO1Q,15691 +torch/onnx/symbolic_opset13.py,sha256=Fx5RyXeeQYIshnxlVGjU89hUT_tBpA-DqnlJheySyLg,41226 +torch/onnx/symbolic_opset14.py,sha256=SfuZGC4BdARMuba00BXmBvGg-SeEdEZ77nS4hXMp7eg,9455 +torch/onnx/symbolic_opset15.py,sha256=OSOAXpN-enLhjkSq28rY4kLE-cy4Yyu2R-U77Kyyv2Y,2872 +torch/onnx/symbolic_opset16.py,sha256=lBC9GWFVzqLcY_4BTdc5M6eGq_K2A7-v8TbQTryBPws,6410 +torch/onnx/symbolic_opset17.py,sha256=16xXgKL9kb1fJxT4r-HwSXlRNp5WVXS6HcBi22KbryA,7707 +torch/onnx/symbolic_opset18.py,sha256=4qsNEYCC7JakwwqY_ZtJo8Zj2aXADHEOQjZOYqLaR50,8080 +torch/onnx/symbolic_opset19.py,sha256=KlDTm3Ee3eDtFD61qw7v4HmecdF74o_TdhyH0rs3M2g,561 +torch/onnx/symbolic_opset20.py,sha256=pSOXuR9JXtteuts26neYyAyyhhOy92hz7vX74Iz5qbg,2446 +torch/onnx/symbolic_opset7.py,sha256=DK-UZL7LNb1Prfrefk5CFeJjy4qklMDywfHvZcXJwZY,2118 +torch/onnx/symbolic_opset8.py,sha256=AlDVGnoKOpPwGWtxMhl9kIwpZTAdpW_zCoxXFysTJdo,14992 +torch/onnx/symbolic_opset9.py,sha256=lNw0XZGtgaojRs9taGJm8mhVq1O82m8O2v4m0m4omg0,224042 +torch/onnx/utils.py,sha256=BDrx812rkEpRbppwH1GHevixHyyCflyCf9k0jwyGHoM,78620 +torch/onnx/verification.py,sha256=ZdaUETx6ut23BDClF0b7uUFbI76Bm25jb8KvdQpNZnM,68549 +torch/optim/__init__.py,sha256=1uogogvL5ff7-vhRV89Hwbr-7wToIdCtFtJBNzNxx8A,2118 +torch/optim/__pycache__/__init__.cpython-310.pyc,, +torch/optim/__pycache__/_adafactor.cpython-310.pyc,, +torch/optim/__pycache__/_functional.cpython-310.pyc,, +torch/optim/__pycache__/adadelta.cpython-310.pyc,, +torch/optim/__pycache__/adagrad.cpython-310.pyc,, +torch/optim/__pycache__/adam.cpython-310.pyc,, +torch/optim/__pycache__/adamax.cpython-310.pyc,, +torch/optim/__pycache__/adamw.cpython-310.pyc,, +torch/optim/__pycache__/asgd.cpython-310.pyc,, +torch/optim/__pycache__/lbfgs.cpython-310.pyc,, +torch/optim/__pycache__/lr_scheduler.cpython-310.pyc,, +torch/optim/__pycache__/nadam.cpython-310.pyc,, +torch/optim/__pycache__/optimizer.cpython-310.pyc,, +torch/optim/__pycache__/radam.cpython-310.pyc,, +torch/optim/__pycache__/rmsprop.cpython-310.pyc,, +torch/optim/__pycache__/rprop.cpython-310.pyc,, +torch/optim/__pycache__/sgd.cpython-310.pyc,, +torch/optim/__pycache__/sparse_adam.cpython-310.pyc,, +torch/optim/__pycache__/swa_utils.cpython-310.pyc,, +torch/optim/_adafactor.py,sha256=wVu5r5CcVS4m9g4BhddNpeH4Ymk_GIa6QMubMeh8Qz8,28365 +torch/optim/_functional.py,sha256=IKUBw1Lh7ZxXPTmP8gFTYVjjXLjntY8nfI7xONYPYjU,3265 +torch/optim/_multi_tensor/__init__.py,sha256=c9KWsMAMRXj_T1bDM-YYBkFRpDzJqPXjec997Y7O0k0,1026 +torch/optim/_multi_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/optim/adadelta.py,sha256=x9OjVwjY24BeEYNwchdTyKgu8bW8AG54rB0c4wCV158,16633 +torch/optim/adagrad.py,sha256=p1mOzrAesHGMnIZ41BY3K0-V4O4Ta2PupDHHPP3Gg8I,20761 +torch/optim/adam.py,sha256=vtIuKeVSXpEylhbti5Jnr7dyF-brHlrGHm__9dg4R1g,32055 +torch/optim/adamax.py,sha256=aL93ZNUt3_l0zM6EDWchtqBc_SE8OpnPv84N6OVt7WE,17290 +torch/optim/adamw.py,sha256=l15RlqOefIZefBxfpQMQqtadkIBk6BEoL8OcZTreakA,31153 +torch/optim/asgd.py,sha256=b4QDw1QxLcXBs9BzravsEZ9aueAE1lEU7dabJVI-Kb8,16129 +torch/optim/lbfgs.py,sha256=8YHpIkpyWW8PkCfPxCwL84Gf0TaCiu8P3AAqTpRJU2c,18037 +torch/optim/lr_scheduler.py,sha256=w0pnpLzcfb5EM3_D_r07MwEZZw-CKJlJ_glsVJzSMyw,84717 +torch/optim/nadam.py,sha256=1wn1xQ5fqqEeO4DN6DgqwshAsEhnTfZKACbrnCnFhW4,26207 +torch/optim/optimizer.py,sha256=XCvDvtsbHBYoJndrJxbodRSXJ6R1b_cL7LSvteKPyB0,44775 +torch/optim/radam.py,sha256=kszxcmX4DFKzX-p63dpk9hSdc6SGTITuzdjJpCDjX5Y,24511 +torch/optim/rmsprop.py,sha256=d_lO4uOx6rtlEiDigjcyCmIq9gr57RQy9PKWBzEKQnE,20269 +torch/optim/rprop.py,sha256=067htK5C55LGVH2wwhG7zDWUW1MEXMOEH2suLAF2LAQ,17556 +torch/optim/sgd.py,sha256=GPMC9XnrgdgQLM2P9sccEvGB8iFUyd5PB2rrQVvX9i0,19036 +torch/optim/sparse_adam.py,sha256=_dm8uCIZN4AYLV3AjdAK2fzD32K8UQv8a-HtMdbNYfA,8019 +torch/optim/swa_utils.py,sha256=xH4WKuiakNx18bm8iHFXXmzVnz3YqbRQ4erJhkKJCq0,18803 +torch/overrides.py,sha256=PhyEvJo3bHzsD0j1fLgcBiQJpaSlMo-KGNkfw3UvV9U,104497 +torch/package/__init__.py,sha256=ZLLvoviHHErV-XQZagde2I4cuNDK49dFLRGUC5oyOFc,388 +torch/package/__pycache__/__init__.cpython-310.pyc,, +torch/package/__pycache__/_digraph.cpython-310.pyc,, +torch/package/__pycache__/_directory_reader.cpython-310.pyc,, +torch/package/__pycache__/_importlib.cpython-310.pyc,, +torch/package/__pycache__/_mangling.cpython-310.pyc,, +torch/package/__pycache__/_mock.cpython-310.pyc,, +torch/package/__pycache__/_package_pickler.cpython-310.pyc,, +torch/package/__pycache__/_package_unpickler.cpython-310.pyc,, +torch/package/__pycache__/_stdlib.cpython-310.pyc,, +torch/package/__pycache__/file_structure_representation.cpython-310.pyc,, +torch/package/__pycache__/find_file_dependencies.cpython-310.pyc,, +torch/package/__pycache__/glob_group.cpython-310.pyc,, +torch/package/__pycache__/importer.cpython-310.pyc,, +torch/package/__pycache__/package_exporter.cpython-310.pyc,, +torch/package/__pycache__/package_importer.cpython-310.pyc,, +torch/package/_digraph.py,sha256=0lx8BBj-3XwFOoBqHB9rZN8zKJWq53OG0KjmNun70UA,5659 +torch/package/_directory_reader.py,sha256=JB099nQnrOpTTjBlMsL1OpwTKfRxnA45PkhoJMAoPhY,1922 +torch/package/_importlib.py,sha256=chdvY9cde326LrzXj8VnjV_3eNqrMlhuPwhuEO0BvaQ,2998 +torch/package/_mangling.py,sha256=oslLWUIWHG6_eBtUNY-p3b9q4hY70Jkm4q8OFULKh-8,1890 +torch/package/_mock.py,sha256=n4L1d-hoa5XO7kyAlxbYMjeZWa2vwQ_-7Ih_dZfH0SA,2866 +torch/package/_package_pickler.py,sha256=RS0sQH5WR56SErApYGb1M7DwuQu5lVKK2iZEQ3_8Vzk,4627 +torch/package/_package_unpickler.py,sha256=hBxGPJDOF1wWmGVAfvN4FMCmm1o1Qt4pDKPmCdSdH-k,992 +torch/package/_stdlib.py,sha256=Zqsdydc0of0NvJqZBkzL3Iy1hWBrkNp9Kyk-u4BX6R4,7265 +torch/package/analyze/__init__.py,sha256=RtjmM0jmYQwfuv9mQoKgZQZBij-GQ4cFcJX7_-aihDg,130 +torch/package/analyze/__pycache__/__init__.cpython-310.pyc,, +torch/package/analyze/__pycache__/find_first_use_of_broken_modules.cpython-310.pyc,, +torch/package/analyze/__pycache__/is_from_package.cpython-310.pyc,, +torch/package/analyze/__pycache__/trace_dependencies.cpython-310.pyc,, +torch/package/analyze/find_first_use_of_broken_modules.py,sha256=3pGELTVtiR0H7dHgu0ubgbeW9BUobmxTs1oKBwpZpXw,1066 +torch/package/analyze/is_from_package.py,sha256=xnYu_xdTqKdosT6pJtZrnrQaG14qGLlwyCBCwqCfu0I,404 +torch/package/analyze/trace_dependencies.py,sha256=FdlGRhRCVz_dTdxLxb0DIC6nY9FWa1LwZKN5bdhU_SY,2221 +torch/package/file_structure_representation.py,sha256=NylZYKrowvn5OhaPtFJKgVSrO9QCVQy8wQiLNm3wem8,4783 +torch/package/find_file_dependencies.py,sha256=PEMGg35P-CMBKrVrOtf3E8tk3mLIeF0HXtQPmFXHyKU,3982 +torch/package/glob_group.py,sha256=T4hVeMnbaOz1OeWn8vrZEZmFw4cqvXL4btsatFGoYUo,3638 +torch/package/importer.py,sha256=0uS4sLVfu8GqdJDhTofaXJe5zjcR53pcQWD7hD65nHM,8911 +torch/package/package_exporter.py,sha256=a7CTuXi6oE1vbTxVV9ruFZSCHp-DM3P9sOg92xt3acw,50901 +torch/package/package_importer.py,sha256=cLQCapYCKbx3YB4RjGdiQ67T2YNCv9-KKkGIEsCybiw,31513 +torch/profiler/__init__.py,sha256=q_RuLUR1thU_3EZ8fON3pNbullF71SN-iV0IOcFdlvQ,1462 +torch/profiler/__pycache__/__init__.cpython-310.pyc,, +torch/profiler/__pycache__/_memory_profiler.cpython-310.pyc,, +torch/profiler/__pycache__/_pattern_matcher.cpython-310.pyc,, +torch/profiler/__pycache__/_utils.cpython-310.pyc,, +torch/profiler/__pycache__/itt.cpython-310.pyc,, +torch/profiler/__pycache__/profiler.cpython-310.pyc,, +torch/profiler/__pycache__/python_tracer.cpython-310.pyc,, +torch/profiler/_memory_profiler.py,sha256=tE0JoZHZtr5Gud_NVyIKHMO0eirtza4HLibuZ3wPIGE,48138 +torch/profiler/_pattern_matcher.py,sha256=boLS-xvMJt39ZdqPwNdkhwntARLalh_XZ_ugmoxUbIA,24782 +torch/profiler/_utils.py,sha256=kJZYXIfSFk14-Eav8LF8yeTL2YApRyAwUNHt8T7uXY4,13945 +torch/profiler/itt.py,sha256=Ss_JzePIXOvQZnXu-gXFbZ0_8ncdZLR33Ivz_F_eWQk,1782 +torch/profiler/profiler.py,sha256=kvtuluvC2F9y_8ShRdKaNcbft4L_bQEmI1z7bZkBfmg,37366 +torch/profiler/python_tracer.py,sha256=t2HhbERUPlMtdDrQ2B63QbAwfuK8oFE5GT3uuBCYAaA,497 +torch/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/quantization/__init__.py,sha256=LmMJvgk3tMRKE6h0iFl4iHTHHfigyDpMhZ-EFf5ITbw,2653 +torch/quantization/__pycache__/__init__.cpython-310.pyc,, +torch/quantization/__pycache__/_numeric_suite.cpython-310.pyc,, +torch/quantization/__pycache__/_numeric_suite_fx.cpython-310.pyc,, +torch/quantization/__pycache__/_quantized_conversions.cpython-310.pyc,, +torch/quantization/__pycache__/fake_quantize.cpython-310.pyc,, +torch/quantization/__pycache__/fuse_modules.cpython-310.pyc,, +torch/quantization/__pycache__/fuser_method_mappings.cpython-310.pyc,, +torch/quantization/__pycache__/observer.cpython-310.pyc,, +torch/quantization/__pycache__/qconfig.cpython-310.pyc,, +torch/quantization/__pycache__/quant_type.cpython-310.pyc,, +torch/quantization/__pycache__/quantization_mappings.cpython-310.pyc,, +torch/quantization/__pycache__/quantize.cpython-310.pyc,, +torch/quantization/__pycache__/quantize_fx.cpython-310.pyc,, +torch/quantization/__pycache__/quantize_jit.cpython-310.pyc,, +torch/quantization/__pycache__/stubs.cpython-310.pyc,, +torch/quantization/__pycache__/utils.cpython-310.pyc,, +torch/quantization/_numeric_suite.py,sha256=kGqWAUJhc0DKg9RjXhiKRx7dxfmX-M5Y1w9jAEnU0Z0,779 +torch/quantization/_numeric_suite_fx.py,sha256=LkEYZQLt_CRTe4d_9EAvCzpuKKHTmTjYoXLXcawjN-I,752 +torch/quantization/_quantized_conversions.py,sha256=NEKk44IHWcW_ghu3FnR06YVxHmFU7onKMTi-PzbiuOY,4321 +torch/quantization/fake_quantize.py,sha256=AZes9LhE_KB-0Q_nV0Qy5kSOY0N9cvv7NPEpdjTwBz4,1015 +torch/quantization/fuse_modules.py,sha256=eLwRn_McF-cLDjCtgzvd6E3TWh7Fwpp3LuhS1ZrmEKM,731 +torch/quantization/fuser_method_mappings.py,sha256=AWk97tidfAL4jlhmYQgpObMusEsNc8aFAX0sxT3XH6Y,511 +torch/quantization/fx/__init__.py,sha256=s3Wh6JJbmUuv-pk4yieKjDpsHZRUkEqW04jRMT9iJPs,594 +torch/quantization/fx/__pycache__/__init__.cpython-310.pyc,, +torch/quantization/fx/__pycache__/_equalize.cpython-310.pyc,, +torch/quantization/fx/__pycache__/convert.cpython-310.pyc,, +torch/quantization/fx/__pycache__/fuse.cpython-310.pyc,, +torch/quantization/fx/__pycache__/fusion_patterns.cpython-310.pyc,, +torch/quantization/fx/__pycache__/graph_module.cpython-310.pyc,, +torch/quantization/fx/__pycache__/match_utils.cpython-310.pyc,, +torch/quantization/fx/__pycache__/pattern_utils.cpython-310.pyc,, +torch/quantization/fx/__pycache__/prepare.cpython-310.pyc,, +torch/quantization/fx/__pycache__/quantization_patterns.cpython-310.pyc,, +torch/quantization/fx/__pycache__/quantization_types.cpython-310.pyc,, +torch/quantization/fx/__pycache__/utils.cpython-310.pyc,, +torch/quantization/fx/_equalize.py,sha256=8LGj1MjWPuZ9i4hXy2QPmD9BBBFGhiMZtlkf9j5GPuA,1250 +torch/quantization/fx/convert.py,sha256=31cFTuIm23CT4JLdSJBHzi_spOsZs0peGyNzhny_rgk,386 +torch/quantization/fx/fuse.py,sha256=wc8SOcX_IxIWXS-E5WZhj0N6KCxpDHQGTUoAaq2Y9JI,380 +torch/quantization/fx/fusion_patterns.py,sha256=Ijjfig2bxM_YzaoEzsIPTg4NuZG6p2BPgV_Dw2bmYFw,415 +torch/quantization/fx/graph_module.py,sha256=bI7jvFL15dnuHLO8c8-tKebQO16XaWRir9jwIditoXU,573 +torch/quantization/fx/match_utils.py,sha256=ndaJFGvJfXCqUmzEyUEfRiNkDwoYV2Xxd39iFQdD2bo,456 +torch/quantization/fx/pattern_utils.py,sha256=SHrwfsEdA-cdEMur19So_gDm_cmdD9nfecvt84fv3DY,1298 +torch/quantization/fx/prepare.py,sha256=onVEmW0FUj5y3NFZVdGnYVK7KbJWroMF23pNFBSVDk4,386 +torch/quantization/fx/quantization_patterns.py,sha256=Fa7d9WBM6N44Anrq8MsanKa_M4x4hCiUMS8q-kqe5uI,2087 +torch/quantization/fx/quantization_types.py,sha256=Jo921qhPFWn9AqlZODQOjBvmQYwdXUt_vOWVpwkEvwE,395 +torch/quantization/fx/utils.py,sha256=RlQwgLPd_v1OKtjeSfFNqhbPRHW7a-rZLfKsek3uY8s,723 +torch/quantization/observer.py,sha256=ztgpjHb7Q1mB7f_i32tZRkdj8i7eYQP3uSqVGFDAWDA,1078 +torch/quantization/qconfig.py,sha256=avG-OasJvdG1szQJli3PW8eoNo7TVH6vrLO-Lv14SNE,910 +torch/quantization/quant_type.py,sha256=wwPL8MSv-G6QV5LogvkL4xRJ28Ido0yA7kPzh2dsIo0,399 +torch/quantization/quantization_mappings.py,sha256=yIflm9ncZNnznvmc6oPKxWD4fPA6C44-EeMVbtAcVl0,1147 +torch/quantization/quantize.py,sha256=g8mA0kRJIO2z63PuUcawc9bovat-uVjeUxIKFwvLb-I,804 +torch/quantization/quantize_fx.py,sha256=hBC2pUobd9oeYUliLKlIYE2dQ-6BQvdxg2xTyvkFLa8,736 +torch/quantization/quantize_jit.py,sha256=JNw2_M1d6EITYzYrXlrAsUac_4tX-IDMuAO_7prxhk8,714 +torch/quantization/stubs.py,sha256=ukEM_vZ34I3UdBnhFPn5GFL713MwFlEysXUAfYGshnA,392 +torch/quantization/utils.py,sha256=kOpHHmJ602vEybccn81hVLOHwY0CQ2YdnG9n9kOVcQM,833 +torch/quasirandom.py,sha256=H13LdePTsr7eSeCrp4v13L7T3YEKsQEcnTQvvD9WbVA,7948 +torch/random.py,sha256=PLPVMshU9l8p4-XqDcGoeKv7RKdAodDqtONbu1MCJ6s,7143 +torch/return_types.py,sha256=FptLQ227rc2ihQg89v3hGmt2LCqBizLVDAVVWCxZAWk,1485 +torch/return_types.pyi,sha256=z2h5r3y96us2RfyGpkkN82rg8D55NX_4aA-XHce5ZTM,14459 +torch/serialization.py,sha256=el1rN2Ki5GV_BJz1Nj9BbeTn76oRc6QBkxXyLyCQoPU,73332 +torch/share/cmake/ATen/ATenConfig.cmake,sha256=Fu4Jh3vhuxYcjsZfQ71erhUpBbyJr0-gwuAk6I7FqKQ,263 +torch/share/cmake/Caffe2/Caffe2Config.cmake,sha256=bXuUPJOCOrubZb5q4HfJyvjB-3oHodLdLviE6pCs6-Q,5107 +torch/share/cmake/Caffe2/Caffe2Targets-release.cmake,sha256=_hMee-otK3Wog4Dvs2EjYsXY-aEoG4nZGgVnSVpmy8g,2448 +torch/share/cmake/Caffe2/Caffe2Targets.cmake,sha256=R-4YyAduUud9fkvP5MyHrvml3RZqL0Kf9ro3hUwOLPY,7272 +torch/share/cmake/Caffe2/FindCUDAToolkit.cmake,sha256=kl99c2bqe1LwRcg7k8VkwqL99pQ9CF9vTwG-kfKIu84,38768 +torch/share/cmake/Caffe2/FindCUDSS.cmake,sha256=54i8T6FKzYP1KMtHTXaUhsuCuXHogM8SDadMAUNZT8I,2698 +torch/share/cmake/Caffe2/FindCUSPARSELT.cmake,sha256=cjNafMBEzBGv0kZAQqMxdpku0Vyf2Yrn3vJ_OVHxdUQ,3068 +torch/share/cmake/Caffe2/FindSYCLToolkit.cmake,sha256=T_86WwfKLM3iiDf12Vkh7DlismVkLzpYC_SNhDFqG1k,2553 +torch/share/cmake/Caffe2/Modules_CUDA_fix/FindCUDA.cmake,sha256=78mPFyIV7lR9kDJ8vAjJwKUDLpFhbssiLz5tR9CmDxk,525 +torch/share/cmake/Caffe2/Modules_CUDA_fix/FindCUDNN.cmake,sha256=NKwIx_LRJ3uu9oJC9_apdlRooZ-uR_izK1_deByfEQo,3085 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/CMakeInitializeConfigs.cmake,sha256=v1O1FBKmJWk1h-Zbh8M6qVlP4OqVsmCcYD8zwdVfb6Q,1657 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindCUDA.cmake,sha256=9zZASjPxjc9LVroI2zPfaP7-R1AdPQcL-bm9QFxtP58,86655 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindCUDA/make2cmake.cmake,sha256=_KLZxL3AhZehZKubThy4o2C_gEH5mm4h3kMpxLHgajU,3925 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindCUDA/parse_cubin.cmake,sha256=h3Ka8c-mmE2Majl0s8342faZWVPAaDbO1rM_4m--YzA,3439 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindCUDA/run_nvcc.cmake,sha256=2KcM296B27vgXYPm_QnN-k-6s16Xkv8NPwP7295j4sE,11813 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindCUDA/select_compute_arch.cmake,sha256=NwkcO-efBVPKltz0S6HRJ-b8vojQufXT6i9AOVhlHpw,10752 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindPackageHandleStandardArgs.cmake,sha256=aLA1Dg7qyjW9Eya73fiwm2RqiG0FGwPOV64Sm5-Nobc,14902 +torch/share/cmake/Caffe2/Modules_CUDA_fix/upstream/FindPackageMessage.cmake,sha256=ToKFxPt7HSmEA014cFkMZl79quM2gpF7tmcP8h1BuYs,1564 +torch/share/cmake/Caffe2/public/LoadHIP.cmake,sha256=NNGbqV5PBw4GFLKUYx7O7bpIrE2HRR-wILBW6EBiaXI,8605 +torch/share/cmake/Caffe2/public/cuda.cmake,sha256=98BXZnIld0aqVjWOyWlscJYPgRcHEF0ng8V1995jEOM,14033 +torch/share/cmake/Caffe2/public/gflags.cmake,sha256=YrTkm-nQX6N3mh3tpY9a-i1_s_lAlifW0V_UZdAHS-M,2620 +torch/share/cmake/Caffe2/public/glog.cmake,sha256=zy1mZaicXNUHxSG7eOZ4ZVnbgk5qN-IuG_-jXUi98nk,2320 +torch/share/cmake/Caffe2/public/mkl.cmake,sha256=y1E6axKvT-AvgfmJrVMptYDVJ5svt723jNuQYIJp_kg,1317 +torch/share/cmake/Caffe2/public/mkldnn.cmake,sha256=7D8oS35genLaLdSPfgh3foWXQMsa2bUsQnB5WKfKPAA,444 +torch/share/cmake/Caffe2/public/protobuf.cmake,sha256=weW3OuHBIqIt0KpWRWzAnzSCu-AJ9naUfkjxcvaRl5c,4003 +torch/share/cmake/Caffe2/public/utils.cmake,sha256=2ZJqRj37NPxO4Hd8CZCb4jq-1APR2QMq4AVmVMBd3-0,20621 +torch/share/cmake/Caffe2/public/xpu.cmake,sha256=V1OYG4Ar7UIJRI9Sq5_z9NNsxeGx8q4-R2bi7ODD-48,643 +torch/share/cmake/Tensorpipe/TensorpipeTargets-release.cmake,sha256=Z2BoA3v4t5CER3JhpvVXPyjHZMXUrxDE2d43oLRvtPg,1767 +torch/share/cmake/Tensorpipe/TensorpipeTargets.cmake,sha256=Du2evHbOEbIHNe9gzv7548p2XUtHcmvnOYX3op4aFG4,3956 +torch/share/cmake/Torch/TorchConfig.cmake,sha256=pAR8zmOrmsRD07g8uUnHFVX8RrI0Pw_AifWSVekYgBM,5121 +torch/share/cmake/Torch/TorchConfigVersion.cmake,sha256=LVDC_j0AczVgijZ64CXpytTJxMnSjmeG_7bt23vA-3s,366 +torch/signal/__init__.py,sha256=Eogmvuz6NdECfSTByt08-ZQw0Fec-GfQNNY77stj3zg,51 +torch/signal/__pycache__/__init__.cpython-310.pyc,, +torch/signal/windows/__init__.py,sha256=agqPFTqIfPJIMisuuSMnTJ3jQY7BKBI-NB6CWVB-j4U,383 +torch/signal/windows/__pycache__/__init__.cpython-310.pyc,, +torch/signal/windows/__pycache__/windows.cpython-310.pyc,, +torch/signal/windows/windows.py,sha256=dacPeAGSgaVOEIveFeubWds5ATW7Fy6Ee91FChKBoLs,23393 +torch/sparse/__init__.py,sha256=ugmkv1qO_RRRuk4vx3nEzqGeSa8TDn2lh6VDxCu7xUY,25529 +torch/sparse/__pycache__/__init__.cpython-310.pyc,, +torch/sparse/__pycache__/_semi_structured_conversions.cpython-310.pyc,, +torch/sparse/__pycache__/_semi_structured_ops.cpython-310.pyc,, +torch/sparse/__pycache__/_triton_ops.cpython-310.pyc,, +torch/sparse/__pycache__/_triton_ops_meta.cpython-310.pyc,, +torch/sparse/__pycache__/semi_structured.cpython-310.pyc,, +torch/sparse/_semi_structured_conversions.py,sha256=azpzC6RJB8TSRLEZs9LRBPBSEkvA6a0A-nx1zsIjgUk,14014 +torch/sparse/_semi_structured_ops.py,sha256=qn0Z8MNCOKVTFDAyq73-6_JLsLshkgK_XSCPOtWjWMQ,5203 +torch/sparse/_triton_ops.py,sha256=fKK4eKBGKBTNwyBhqMhDavpZh_aqgdXMQpl1cplBdQs,81876 +torch/sparse/_triton_ops_meta.py,sha256=rso7h1ognguTOwAjW-kP4F_NLJIIoUtwS70AHd99MJ4,475453 +torch/sparse/semi_structured.py,sha256=5J5e50ANUV1LN47Tj8sLOY50bVGc6kFjBWZhMk3jXPE,27467 +torch/special/__init__.py,sha256=cNcIkzmknQKqoPiCtcUsBJhCYs-d_E7c_RxU39st6tc,32694 +torch/special/__pycache__/__init__.cpython-310.pyc,, +torch/storage.py,sha256=QeizcNlcC02ts0KpdL3nJyknODETaNzgo9QbQXkwKLs,51392 +torch/testing/__init__.py,sha256=8bgnxGIy9DJeX9dIh6iTPqrlGDQqeAEkmWvfhLdDnP4,187 +torch/testing/__pycache__/__init__.cpython-310.pyc,, +torch/testing/__pycache__/_comparison.cpython-310.pyc,, +torch/testing/__pycache__/_creation.cpython-310.pyc,, +torch/testing/__pycache__/_utils.cpython-310.pyc,, +torch/testing/_comparison.py,sha256=8rUfK7IboJIIU2CDRoAnreOCtia8stkTOiBKHqvLajA,63134 +torch/testing/_creation.py,sha256=Zf3FPE56rGBqBKAGKv6kttvqBdGwx4jBX7v9JqoNX1M,11890 +torch/testing/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/__pycache__/autocast_test_lists.cpython-310.pyc,, +torch/testing/_internal/__pycache__/autograd_function_db.cpython-310.pyc,, +torch/testing/_internal/__pycache__/check_kernel_launches.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_cuda.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_device_type.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_dist_composable.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_distributed.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_dtype.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_fsdp.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_jit.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_methods_invocations.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_mkldnn.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_modules.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_nn.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_optimizers.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_pruning.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_quantization.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_quantized.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_subclass.cpython-310.pyc,, +torch/testing/_internal/__pycache__/common_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/composite_compliance.cpython-310.pyc,, +torch/testing/_internal/__pycache__/custom_op_db.cpython-310.pyc,, +torch/testing/_internal/__pycache__/custom_tensor.cpython-310.pyc,, +torch/testing/_internal/__pycache__/dist_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/dynamo_test_failures.cpython-310.pyc,, +torch/testing/_internal/__pycache__/hop_db.cpython-310.pyc,, +torch/testing/_internal/__pycache__/hypothesis_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/inductor_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/jit_metaprogramming_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/jit_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/logging_tensor.cpython-310.pyc,, +torch/testing/_internal/__pycache__/logging_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/quantization_torch_package_models.cpython-310.pyc,, +torch/testing/_internal/__pycache__/static_module.cpython-310.pyc,, +torch/testing/_internal/__pycache__/torchbind_impls.cpython-310.pyc,, +torch/testing/_internal/__pycache__/triton_utils.cpython-310.pyc,, +torch/testing/_internal/__pycache__/two_tensor.cpython-310.pyc,, +torch/testing/_internal/autocast_test_lists.py,sha256=ilCPryBx3ok6iiJ5CiwNLtFGqnVLZ2sKIUD0el3G2rw,28617 +torch/testing/_internal/autograd_function_db.py,sha256=r1_GSuXWuya7EXlgpBaCn_T1ATp7LwcJ4l2u9-eCsQQ,19689 +torch/testing/_internal/check_kernel_launches.py,sha256=fpt87LoSU6nmsbEI9JVB34z3DQv4Uu-pYxqXre3JMJA,6053 +torch/testing/_internal/codegen/__init__.py,sha256=8QLhisbHub6VJl6egijnrOPKK5QNAe5FJhfcxEelj4Y,22 +torch/testing/_internal/codegen/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/common_cuda.py,sha256=IG3pzxoH5GnYvhuJh8OjMwcr7lH3Ff6MCBYlj-0IHGY,12447 +torch/testing/_internal/common_device_type.py,sha256=PkxJ_urbepxPwV8XCV5SZdbnFXwkL2JnG7e095WMklo,69851 +torch/testing/_internal/common_dist_composable.py,sha256=sMyH-QpvDrjWTgqHuHKOqsMWvAz1gNsG_efWy_7zD94,3424 +torch/testing/_internal/common_distributed.py,sha256=r9B5-krnFll2Wr1__jOC_TUFPYnLeAGiLbG0mOoSWVg,50824 +torch/testing/_internal/common_dtype.py,sha256=FbijAmN-ywu3E7VEwf79deCxGheiCSVQBRJpZ5Jrbv4,4439 +torch/testing/_internal/common_fsdp.py,sha256=PzCdfuEmc4XfLEEjUyRvwAgTiUjjoa7s0knvTvBCpSU,56231 +torch/testing/_internal/common_jit.py,sha256=ACQlL_wDfxAMCd3cBYHKZ-jbx_V9NnvwxE_uKZBGLMY,15853 +torch/testing/_internal/common_methods_invocations.py,sha256=92B4Y8APvI465YbxXEVtMo6r2SbMrZ5XZhPPraWlDIA,1181768 +torch/testing/_internal/common_mkldnn.py,sha256=AEHUbUWDAbJ-y4csGwF9-0I6jMfzVb0AdvvPAtgbCfU,2314 +torch/testing/_internal/common_modules.py,sha256=wQA2RNFvY2jBvQSjZsSdZ_b9iBpU27VYBK-N5FV7sLs,217315 +torch/testing/_internal/common_nn.py,sha256=MXDqDOxrIGJIFtjXQqD8fWKAGGeX5eC82hCAKpk8q7I,165840 +torch/testing/_internal/common_optimizers.py,sha256=46q-_413bOotC6iXXlxwSJ6xTcffGqOJFzyPKdHiVPc,84338 +torch/testing/_internal/common_pruning.py,sha256=MldR_OyIPIlwpWyuEO49dFwRlCI8yxtSxEv_LF_12w8,13632 +torch/testing/_internal/common_quantization.py,sha256=jgaUwtX7HiqWsgxIFfXVX91Vfj_zKr_uX3ClLTPIiVY,109649 +torch/testing/_internal/common_quantized.py,sha256=h31x20TrhVmHAKh7eUc82nnfCeQDWIqO4awNxV69eM8,8702 +torch/testing/_internal/common_subclass.py,sha256=rzqGr0qGROGOytHn3SSFZaoNYLOhwzEjfSthiXTfWGQ,9356 +torch/testing/_internal/common_utils.py,sha256=7tdUlMnVeTq3YvIKgZ60hGSdMs84I7yMiV8EdbOENNo,220549 +torch/testing/_internal/composite_compliance.py,sha256=l4V4ek9S0KRIJayWiKUiyqXt0H30Io-0JU7u5dMjy30,25249 +torch/testing/_internal/custom_op_db.py,sha256=VB6dXOv5vNLxaXHoITp47XK01v1EK-Ig9Tdj9wVUQyY,19675 +torch/testing/_internal/custom_tensor.py,sha256=tCdFTgxoPBvSmHTMCTomtHmG7a_cX7xH3PQiriYVKpM,2255 +torch/testing/_internal/data/__init__.py,sha256=8QLhisbHub6VJl6egijnrOPKK5QNAe5FJhfcxEelj4Y,22 +torch/testing/_internal/data/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/data/__pycache__/network1.cpython-310.pyc,, +torch/testing/_internal/data/__pycache__/network2.cpython-310.pyc,, +torch/testing/_internal/data/network1.py,sha256=ksE5iUCq6Hhp8772pRhqpUna414huKdlzvh92oQgxTc,169 +torch/testing/_internal/data/network2.py,sha256=FlOrR6LuubeOhZHKURvkoWqIGHlvMNX2SyoZ22XPvfg,199 +torch/testing/_internal/dist_utils.py,sha256=oyAsjRbhS7OMLojgvOfgEhWNk8Yi34I9jE2jKZhCpzg,7280 +torch/testing/_internal/distributed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/checkpoint_utils.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/common_state_dict.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/ddp_under_dist_autograd_test.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/distributed_test.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/distributed_utils.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/fake_pg.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/multi_threaded_pg.cpython-310.pyc,, +torch/testing/_internal/distributed/__pycache__/rpc_utils.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/__init__.py,sha256=mIQlHRV-d0Zr3TOwF0Vr4fBPuPk4uNR8rzKPy1EhlPw,27 +torch/testing/_internal/distributed/_shard/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/__pycache__/test_common.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/sharded_tensor/__init__.py,sha256=3A1jQLTCCL2uRm9_4TZPobqajP80N0lT3GAIqCp7DA8,3179 +torch/testing/_internal/distributed/_shard/sharded_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/sharded_tensor/__pycache__/_test_ops_common.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/sharded_tensor/__pycache__/_test_st_common.cpython-310.pyc,, +torch/testing/_internal/distributed/_shard/sharded_tensor/_test_ops_common.py,sha256=_-nKiNMGMIjgLJcB19AVY4eFadBnOIoWwGhV__iX-n4,4010 +torch/testing/_internal/distributed/_shard/sharded_tensor/_test_st_common.py,sha256=iVN2VFZ3HF-cAzuO9nbXVJdkEW2Wqj3DJuZNE1dKVUA,1701 +torch/testing/_internal/distributed/_shard/test_common.py,sha256=A1pabFfzG9ZMWY5thDbZiFNFh9-D5QZrU1PQDo1Z90A,1220 +torch/testing/_internal/distributed/_tensor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/_tensor/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/_tensor/__pycache__/common_dtensor.cpython-310.pyc,, +torch/testing/_internal/distributed/_tensor/common_dtensor.py,sha256=K5BNqizpBTwDE6RwEcxdY0p-gUmuSRtVP80EyDh5kCs,19901 +torch/testing/_internal/distributed/checkpoint_utils.py,sha256=MgHGs3VZXTQntxnyX3kS0rGwyOI-IsdXAq46-Dsnck0,1519 +torch/testing/_internal/distributed/common_state_dict.py,sha256=csiOaCYCkgyseS0RylSv2EvC3s_ArSYGYywAZakLhCM,4650 +torch/testing/_internal/distributed/ddp_under_dist_autograd_test.py,sha256=woAvv0RjPG02qrbmOoPYuWHlQuW6z8gkWc9CHdV_iyg,26768 +torch/testing/_internal/distributed/distributed_test.py,sha256=tyn79PXvg0RgH3IMUIZHDwUAQR3SKUEhhn3sWK-t3mA,439515 +torch/testing/_internal/distributed/distributed_utils.py,sha256=KJ3W5wzwQ2fRipAFK2SvHh-qAAbiouZz0QhYc3VvqWM,1948 +torch/testing/_internal/distributed/fake_pg.py,sha256=OfRRXR7TtJ0e2VhTR2bvUsv34rqbQXkwDhPnz1i8Qck,1036 +torch/testing/_internal/distributed/multi_threaded_pg.py,sha256=PTaw2JUgTJNwZcLoR-Pa4wlmgKy35ssEwgFJuB__gY0,19165 +torch/testing/_internal/distributed/nn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/nn/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/nn/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/nn/api/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/nn/api/__pycache__/remote_module_test.cpython-310.pyc,, +torch/testing/_internal/distributed/nn/api/remote_module_test.py,sha256=-xOsoxc3Av5-b2JJcc5sd96BJPR7mSmJJ3eh_uOU2Xw,29258 +torch/testing/_internal/distributed/rpc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/rpc/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/dist_autograd_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/dist_optimizer_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/faulty_agent_rpc_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/faulty_rpc_agent_test_fixture.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/rpc_agent_test_fixture.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/rpc_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/__pycache__/tensorpipe_rpc_agent_test_fixture.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/dist_autograd_test.py,sha256=PHM0fs0P829s06OxotcR9VG8vbwa7ATfFh5TZBpxqsk,107721 +torch/testing/_internal/distributed/rpc/dist_optimizer_test.py,sha256=OnfP6ocCB_3NIKY8Yma814mDP35tGeSV1HkfWK5EsC4,10630 +torch/testing/_internal/distributed/rpc/examples/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/rpc/examples/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/examples/__pycache__/parameter_server_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/examples/__pycache__/reinforcement_learning_rpc_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/examples/parameter_server_test.py,sha256=wI0JC5F4e1S8O2D_fmg4SO57ZZZyzsf2x1L5bi0Yb28,4565 +torch/testing/_internal/distributed/rpc/examples/reinforcement_learning_rpc_test.py,sha256=8_eHKwh7xmrNneAZzGL2KjkBLe5B00T7vsRO4SDjHPo,9351 +torch/testing/_internal/distributed/rpc/faulty_agent_rpc_test.py,sha256=MWwx4_t8Nm91TI-STy_727CZHPd3qiWaAd_BxJSJONE,14132 +torch/testing/_internal/distributed/rpc/faulty_rpc_agent_test_fixture.py,sha256=iAO0RxouapS9mWadN9Dbly7YaL-RlGbGIYwzy28Xl8s,2223 +torch/testing/_internal/distributed/rpc/jit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/distributed/rpc/jit/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/jit/__pycache__/dist_autograd_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/jit/__pycache__/rpc_test.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/jit/__pycache__/rpc_test_faulty.cpython-310.pyc,, +torch/testing/_internal/distributed/rpc/jit/dist_autograd_test.py,sha256=GxbvFqIcbMrpm4T7uKb3hyvXh0tRltlLT2of-zmkCPk,4235 +torch/testing/_internal/distributed/rpc/jit/rpc_test.py,sha256=TXZVQrsLQAPn2-_kn-RJZDKuc3NYDFsT_VWCWP-TIqM,47284 +torch/testing/_internal/distributed/rpc/jit/rpc_test_faulty.py,sha256=zvi2woSyGEtuYqrrfF9KhPTWCw7z_b3V1_8tMIthzig,8032 +torch/testing/_internal/distributed/rpc/rpc_agent_test_fixture.py,sha256=y5P27BCxk82IPTVOzov4-Ojr3_tykIOGlLlSEwWEv_w,1874 +torch/testing/_internal/distributed/rpc/rpc_test.py,sha256=1fl-mE8jo1a0vF_GKO3JHSLrX55kOvVQxqxYblAwvw4,228949 +torch/testing/_internal/distributed/rpc/tensorpipe_rpc_agent_test_fixture.py,sha256=pZOWtt93H3CEccVdLQdk_lw2jwp20XiHhaQ9MaP4oZ0,1029 +torch/testing/_internal/distributed/rpc_utils.py,sha256=nQXoNou_DtWTgtcd1B5KXFz8QABborUyjhcUz1huMYg,6603 +torch/testing/_internal/dynamo_test_failures.py,sha256=qr42DfIJSvdp2-Bps1Kc9VKynhXdLOEX_FtGJW4YKDQ,4698 +torch/testing/_internal/generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/generated/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/generated/__pycache__/annotated_fn_args.cpython-310.pyc,, +torch/testing/_internal/generated/annotated_fn_args.py,sha256=_H6XTUvdFoQ0fHu4ezMioTeDOC6Q6biwDGXSMgM95x0,543382 +torch/testing/_internal/hop_db.py,sha256=xncf8VfC2c9JYcYTgv-qrPrxgq3wr4hr32Phvxdq_yA,8564 +torch/testing/_internal/hypothesis_utils.py,sha256=tyjRa4vgdnU4HF7pQvZIYXnNpEpNl_avnC4wcUMS2_k,14691 +torch/testing/_internal/inductor_utils.py,sha256=WvME7KJh_PJMkEriOJw06Mm6xf_06zVJa2SE5UtQbOg,3392 +torch/testing/_internal/jit_metaprogramming_utils.py,sha256=-2DErolT5Ih-mvA68miCJ1gTtI4jyNm8elhLiSNLS14,32976 +torch/testing/_internal/jit_utils.py,sha256=HD7dGvG-2MSjTZXc1w0F5tMB3S3SZPuSlN-zNAeEzek,34006 +torch/testing/_internal/logging_tensor.py,sha256=TtUU87tkLEc0jDVrvPf9rq8YJwpPlhgelj_e3eAZYhE,6941 +torch/testing/_internal/logging_utils.py,sha256=9lSYwirN_Gls0qaF2zP_DvJLl8GsC3Yxk5WKS3NpN2w,6926 +torch/testing/_internal/opinfo/__init__.py,sha256=6PWvlARagSjyrZW5xCOA5YGQjLqwaijc36TFX4UrU9g,116 +torch/testing/_internal/opinfo/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/opinfo/__pycache__/core.cpython-310.pyc,, +torch/testing/_internal/opinfo/__pycache__/refs.cpython-310.pyc,, +torch/testing/_internal/opinfo/__pycache__/utils.cpython-310.pyc,, +torch/testing/_internal/opinfo/core.py,sha256=ohQK9sWtOSEetXQRaeiimNhYXArwcpWOCwaYJ879D0A,113587 +torch/testing/_internal/opinfo/definitions/__init__.py,sha256=wwXg5PkkHOqH3hskQn9YkBu_afa04o0BQlOS5_dJiJQ,477 +torch/testing/_internal/opinfo/definitions/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/_masked.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/fft.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/linalg.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/nested.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/signal.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/sparse.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/__pycache__/special.cpython-310.pyc,, +torch/testing/_internal/opinfo/definitions/_masked.py,sha256=OfPUav2SDwy6SuD2fwi-YykwxMeeLkqVzWJK3TmMYII,47096 +torch/testing/_internal/opinfo/definitions/fft.py,sha256=LEZ-jx6Y5rvVPN4VNHov_Li9_yQeRFNNXvuMEhie-cc,29469 +torch/testing/_internal/opinfo/definitions/linalg.py,sha256=DAINCikO9GjczhNGHErId3ZNmbC_Fbk5W2ZNVW3CdBA,88164 +torch/testing/_internal/opinfo/definitions/nested.py,sha256=TneEVdJE6K74Q0vPzip9eChW5NqXNAPJsEhneCwsPV4,10507 +torch/testing/_internal/opinfo/definitions/signal.py,sha256=1YUw2i0DtLwC-MXolSEEGKq3ULOb-RjZrZy4Dm8W85I,15293 +torch/testing/_internal/opinfo/definitions/sparse.py,sha256=CmqiVIZiKC0Oyz8Z2pevz7G7CztU4TkSzENRdP3GIEo,33900 +torch/testing/_internal/opinfo/definitions/special.py,sha256=5xhNglpHEx2vWOJM5gGDQJ6vwziIi33JaM8U3A3Nldw,27763 +torch/testing/_internal/opinfo/refs.py,sha256=rhbUtkE6NiONK48DR72RB6-JLzIbqXh5JvJ5vw0daQk,8039 +torch/testing/_internal/opinfo/utils.py,sha256=w5tS8T_1mWxtISX3ztvEAVAOFJ-xhk90bkBjLS2Fdp8,8692 +torch/testing/_internal/optests/__init__.py,sha256=o-8t0Cva860Tcw0Ig_-rBr6-9Uuc9bDRWRMG6FgKzbA,372 +torch/testing/_internal/optests/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/optests/__pycache__/aot_autograd.cpython-310.pyc,, +torch/testing/_internal/optests/__pycache__/autograd_registration.cpython-310.pyc,, +torch/testing/_internal/optests/__pycache__/fake_tensor.cpython-310.pyc,, +torch/testing/_internal/optests/__pycache__/generate_tests.cpython-310.pyc,, +torch/testing/_internal/optests/__pycache__/make_fx.cpython-310.pyc,, +torch/testing/_internal/optests/aot_autograd.py,sha256=QJnWa6Vs-uv3jNlRw8cefeSa23tkaq-mVSt0_BpuIHc,6239 +torch/testing/_internal/optests/autograd_registration.py,sha256=NTcmeU6cAPjNIfzcet__ZD4tPZIW2gpXy27U0DQprPo,5692 +torch/testing/_internal/optests/fake_tensor.py,sha256=WuT0PGbogjTGPJeRYo2JiQYoTzj_iZET56L1zkv0W4w,257 +torch/testing/_internal/optests/generate_tests.py,sha256=MmlPzyzISrbUKIuxgtObvcNvWez5KAjChL3SGTJhxWI,31030 +torch/testing/_internal/optests/make_fx.py,sha256=NtdQVRYx1E7dkxjVDGcpd5ZNSoGlntV7Mqr17ANPGuY,3268 +torch/testing/_internal/quantization_torch_package_models.py,sha256=DnChjrnLG54TWRhINvfTxOFmNZt5WflDBJDpam4fj9w,951 +torch/testing/_internal/static_module.py,sha256=bwmblZ7N3UtKuO6UEJDUUJdqdhxxIpCwvmrJEESauOk,893 +torch/testing/_internal/test_module/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/testing/_internal/test_module/__pycache__/__init__.cpython-310.pyc,, +torch/testing/_internal/test_module/__pycache__/future_div.cpython-310.pyc,, +torch/testing/_internal/test_module/__pycache__/no_future_div.cpython-310.pyc,, +torch/testing/_internal/test_module/future_div.py,sha256=298hLJlLz2QCJ80OVQXQM6CgN71nbkFMNnkesgzSnY8,114 +torch/testing/_internal/test_module/no_future_div.py,sha256=sksxzWFUupRBbSThS69P366IwnWBNGJe9EqQx8m5EKM,145 +torch/testing/_internal/torchbind_impls.py,sha256=NjSFSNUT_RkG5TkiPQWqQPMiT9jNkXjEq1U5fC49I2Y,3830 +torch/testing/_internal/triton_utils.py,sha256=F_c6YD9U29RDQ6E7nwlJA6shkuAt6OrhkogHNhps8HI,13905 +torch/testing/_internal/two_tensor.py,sha256=_YIfMViaISS-wWk60WushC9tCDDTusZxKHbNDT_1wLE,3029 +torch/testing/_utils.py,sha256=Y33pJmIJgUhwNQvPQ1lFJNDaRO7_sU5p33SWjdh9Nfs,2039 +torch/torch_version.py,sha256=RutzrEArFr3YlxqXNvuwY_C69wjsLdffuiRPng_gCzM,2483 +torch/types.py,sha256=XZOfbjIV35QPmrNUOjspZKw6OrWzWnNagKt34PW6W5Y,3482 +torch/utils/__init__.py,sha256=McH6WeVu1fwTyFuHPlXFTIe6j6udvuMc9YyYyO-RxuI,4061 +torch/utils/__pycache__/__init__.cpython-310.pyc,, +torch/utils/__pycache__/_backport_slots.cpython-310.pyc,, +torch/utils/__pycache__/_config_module.cpython-310.pyc,, +torch/utils/__pycache__/_content_store.cpython-310.pyc,, +torch/utils/__pycache__/_contextlib.cpython-310.pyc,, +torch/utils/__pycache__/_cpp_extension_versioner.cpython-310.pyc,, +torch/utils/__pycache__/_cxx_pytree.cpython-310.pyc,, +torch/utils/__pycache__/_device.cpython-310.pyc,, +torch/utils/__pycache__/_exposed_in.cpython-310.pyc,, +torch/utils/__pycache__/_foreach_utils.cpython-310.pyc,, +torch/utils/__pycache__/_freeze.cpython-310.pyc,, +torch/utils/__pycache__/_get_clean_triton.cpython-310.pyc,, +torch/utils/__pycache__/_import_utils.cpython-310.pyc,, +torch/utils/__pycache__/_mode_utils.cpython-310.pyc,, +torch/utils/__pycache__/_ordered_set.cpython-310.pyc,, +torch/utils/__pycache__/_python_dispatch.cpython-310.pyc,, +torch/utils/__pycache__/_pytree.cpython-310.pyc,, +torch/utils/__pycache__/_stats.cpython-310.pyc,, +torch/utils/__pycache__/_thunk.cpython-310.pyc,, +torch/utils/__pycache__/_traceback.cpython-310.pyc,, +torch/utils/__pycache__/_triton.cpython-310.pyc,, +torch/utils/__pycache__/_typing_utils.cpython-310.pyc,, +torch/utils/__pycache__/_zip.cpython-310.pyc,, +torch/utils/__pycache__/backend_registration.cpython-310.pyc,, +torch/utils/__pycache__/bundled_inputs.cpython-310.pyc,, +torch/utils/__pycache__/checkpoint.cpython-310.pyc,, +torch/utils/__pycache__/collect_env.cpython-310.pyc,, +torch/utils/__pycache__/cpp_backtrace.cpython-310.pyc,, +torch/utils/__pycache__/cpp_extension.cpython-310.pyc,, +torch/utils/__pycache__/deterministic.cpython-310.pyc,, +torch/utils/__pycache__/dlpack.cpython-310.pyc,, +torch/utils/__pycache__/file_baton.cpython-310.pyc,, +torch/utils/__pycache__/flop_counter.cpython-310.pyc,, +torch/utils/__pycache__/hooks.cpython-310.pyc,, +torch/utils/__pycache__/mkldnn.cpython-310.pyc,, +torch/utils/__pycache__/mobile_optimizer.cpython-310.pyc,, +torch/utils/__pycache__/model_zoo.cpython-310.pyc,, +torch/utils/__pycache__/module_tracker.cpython-310.pyc,, +torch/utils/__pycache__/show_pickle.cpython-310.pyc,, +torch/utils/__pycache__/throughput_benchmark.cpython-310.pyc,, +torch/utils/__pycache__/weak.cpython-310.pyc,, +torch/utils/_backport_slots.py,sha256=4R2u-jJMQ6U8omz5-1BsBfFW4rgMa-e5KncSwzCiv-0,4585 +torch/utils/_config_module.py,sha256=YBu7YJLSAHHP1f5yGtTUtugjwuttK3HNzxDjO4KX_T4,13571 +torch/utils/_content_store.py,sha256=TDBdUPBrBc439b6iAGfNmp9qr8TTfeP3lRXhIDrSJxQ,9077 +torch/utils/_contextlib.py,sha256=z3qlsI9El0uowdCM1x73D_0T0cSKSTFXbrI1MGv6RrU,6022 +torch/utils/_cpp_extension_versioner.py,sha256=nfdpz8vTUMsazJkIu948jLRoZ07bRmVcgN8-Qjjy_lw,2010 +torch/utils/_cxx_pytree.py,sha256=rP-PbUjmcJ9paDLPhAEBK4VV0yOsUWexXIN5Srh9TLs,35150 +torch/utils/_device.py,sha256=5nsLM9KJcg8wew97Brt3hRuztyAOzst-7RHM7u3VYoo,3829 +torch/utils/_exposed_in.py,sha256=qr_dgn92ed90Eesvr-YoQuZ8BLq63P-W3yMaimcBnKk,629 +torch/utils/_foreach_utils.py,sha256=cjpOiuoky3eKJg42p5hvRIB-oP1yjsgbrpi8LeyIyQE,2342 +torch/utils/_freeze.py,sha256=p6-u5SRyr3oW6HhQQ0MfJFVQ3Jne3w3IIO-KKk53smc,10044 +torch/utils/_get_clean_triton.py,sha256=SsLFuvalDvBbCqhnEWV4YYJum1zTpLRHNJzGAw-Th4Y,5222 +torch/utils/_import_utils.py,sha256=8Uv-lHLgZ6qsCqOPrJZuGdcJLq_Jy-67DjmLHG2jyC4,1267 +torch/utils/_mode_utils.py,sha256=MkuPh97nbSNvh7fvyY87XE-odxEGqwEu0_wLsqaXgZY,251 +torch/utils/_ordered_set.py,sha256=STfryi2CFFqb-r0y-l5VdVy1VF_ePQU1RVI3BM3RRVs,5799 +torch/utils/_python_dispatch.py,sha256=HYNmy1gm4Ak0WaUnriLrh7RnoQLW1Kx-f6KvyAU49hA,28362 +torch/utils/_pytree.py,sha256=ypDTo4prdTmM8h3MRjuZYOi_rb9FyNKfTY6PjTCHSIo,54522 +torch/utils/_stats.py,sha256=rHnNk8IymuhWloQoiyh2S0zPqaWXrFN8vydTJKv7rzc,837 +torch/utils/_strobelight/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/_strobelight/__pycache__/__init__.cpython-310.pyc,, +torch/utils/_strobelight/__pycache__/cli_function_profiler.cpython-310.pyc,, +torch/utils/_strobelight/cli_function_profiler.py,sha256=XTbgsT5mB_zySbtZtWZq4CQzIazhcDI4UY_nbU-u_98,11050 +torch/utils/_sympy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/_sympy/__pycache__/__init__.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/functions.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/interp.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/numbers.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/reference.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/singleton_int.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/solve.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/symbol.cpython-310.pyc,, +torch/utils/_sympy/__pycache__/value_ranges.cpython-310.pyc,, +torch/utils/_sympy/functions.py,sha256=Dy8_7WErt7D0YPs4civC_avNJkI_8VPDRljIMApeI7c,42027 +torch/utils/_sympy/interp.py,sha256=YJWueEdqrORr0BlLIHxtLmkUGJYV76ftB3vnkFdF7M0,6090 +torch/utils/_sympy/numbers.py,sha256=OYV2s_DBM7wpniUM9FTrS2fYvcBxVUIrHk9-m3BChjQ,11401 +torch/utils/_sympy/reference.py,sha256=_d8PW-Pm9VCUuKcinSU52ifPum6e06A2eeoPhMjn4F4,5964 +torch/utils/_sympy/singleton_int.py,sha256=9PUdm9IPUBHJRrDyvRN_ycN_b4Js22Zr-6Uje17KLTM,2967 +torch/utils/_sympy/solve.py,sha256=3xiSXM-hUmm6bF6flKtEhNXKSoqCMJRp0u-yIWiV7vs,6383 +torch/utils/_sympy/symbol.py,sha256=hMF5yeRKFaE8FCT42ntYn19qbN2MzVRO4h8_jIGncZI,3563 +torch/utils/_sympy/value_ranges.py,sha256=7iCl9yGI0l8EboO-ZINyXI-Kh5N7jpHg3eaMwCcFMrw,36297 +torch/utils/_thunk.py,sha256=ahGim6yrtjXwVmcSyyw-3J0eQzr9hloQE3vaXt0IbmY,625 +torch/utils/_traceback.py,sha256=ouVJSfFruZekBUvPDTa2ZblUe5JsROXqKOzGLjw355Q,10303 +torch/utils/_triton.py,sha256=cbHmHzU6B8VzNnbU-fbFpwJzEg6owvobQ9ZROZMgAfU,2259 +torch/utils/_typing_utils.py,sha256=3PQ_pcjyV9gxhP2cL-D3XWVIv2asfY1wR4JUBIUlK_I,378 +torch/utils/_zip.py,sha256=MH2x7uphZkAEd_hZ6icJ6kO3z0bFLWDRtex8OPdbUxI,2454 +torch/utils/backcompat/__init__.py,sha256=p0A1haxs45DMaETdPPBv6p23SpDxN-PdM48YDNVw9kM,694 +torch/utils/backcompat/__pycache__/__init__.cpython-310.pyc,, +torch/utils/backend_registration.py,sha256=D6f6XMSc89-ihDtqUENU8yGSmaj1pPy8Tg2c5CkufBU,19258 +torch/utils/benchmark/__init__.py,sha256=VMZoFTt8YGaScu3-5L1uWwMOED8FRYoEBAX37RFJ3c0,411 +torch/utils/benchmark/__pycache__/__init__.cpython-310.pyc,, +torch/utils/benchmark/examples/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/benchmark/examples/__pycache__/__init__.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/blas_compare_setup.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/compare.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/fuzzer.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/op_benchmark.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/simple_timeit.cpython-310.pyc,, +torch/utils/benchmark/examples/__pycache__/spectral_ops_fuzz_test.cpython-310.pyc,, +torch/utils/benchmark/examples/blas_compare_setup.py,sha256=a1e5sV1uOB62qObaSQoAZjJRzRBiGrdtCWRz00pJUwY,7175 +torch/utils/benchmark/examples/compare.py,sha256=Vp5KSGUDOYAE54NMXRHXImY7MjEfpfgO3XZRcntJsUI,2915 +torch/utils/benchmark/examples/fuzzer.py,sha256=xMSTZ5wa8d0TK5XLDBdffNHGManXq0NTcdqGHQhvlGs,2650 +torch/utils/benchmark/examples/op_benchmark.py,sha256=oCr3uM1db1zmVrgSMyaB9cxMwjSKELQXjbGFEv6mu4E,4227 +torch/utils/benchmark/examples/simple_timeit.py,sha256=gemlx8iEpXc_nY6OrZgMWCW9WA6GIHr_CzQ1Q8-AHHk,560 +torch/utils/benchmark/examples/spectral_ops_fuzz_test.py,sha256=3yRGHfcj2AxlB4Hf1iL2Eq7Ho6LI4JV0G59ATHWPLg8,4779 +torch/utils/benchmark/op_fuzzers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/benchmark/op_fuzzers/__pycache__/__init__.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/__pycache__/binary.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/__pycache__/sparse_binary.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/__pycache__/sparse_unary.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/__pycache__/spectral.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/__pycache__/unary.cpython-310.pyc,, +torch/utils/benchmark/op_fuzzers/binary.py,sha256=hqr5_VG-GMwDiNCRNE4nqjBZm9mwf0QvOqtzQ-LYjIk,4136 +torch/utils/benchmark/op_fuzzers/sparse_binary.py,sha256=4COEJAYa4kQWyPOQYBXFuSDFd4BLKtoLxYqTV15TStE,4218 +torch/utils/benchmark/op_fuzzers/sparse_unary.py,sha256=dTC5UmD-BJO9d99RMYxbFuRPJhLFHsJgRgyt1IQZVAM,3246 +torch/utils/benchmark/op_fuzzers/spectral.py,sha256=tb2YizjJzwteeoQpS1e3l5jqzx5j7QmNHKXRAz0h6Ik,3624 +torch/utils/benchmark/op_fuzzers/unary.py,sha256=7MpUeiaL6_0zhEKB6oHz0Rt_CHgNSZYhTKOy1hvZvNA,3146 +torch/utils/benchmark/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/benchmark/utils/__pycache__/__init__.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/_stubs.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/common.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/compare.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/compile.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/cpp_jit.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/fuzzer.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/sparse_fuzzer.cpython-310.pyc,, +torch/utils/benchmark/utils/__pycache__/timer.cpython-310.pyc,, +torch/utils/benchmark/utils/_stubs.py,sha256=YapnkkFuitztF2XrMHTR9A2EsGm0lsrhmqTRcHPYhjM,976 +torch/utils/benchmark/utils/common.py,sha256=Oe9uNGIaFe7OuybeDXNcfsnp4aOv9-aKyScBmWT7Rzc,13653 +torch/utils/benchmark/utils/compare.py,sha256=kpwhak8X6B2uzaUfFse3QodoYG_1zS0F1I6LXbL6JZY,13324 +torch/utils/benchmark/utils/compile.py,sha256=a6x3-HMvIBd-P6g1PGc9fo6deMRSqLFtkzYLyBZNgxw,7608 +torch/utils/benchmark/utils/cpp_jit.py,sha256=Lfphq1CRzmhr0WKibv_A5b2DgeHRVJ8SHGiFGroC_I4,6811 +torch/utils/benchmark/utils/fuzzer.py,sha256=lyCumLpz87-y2jfAGlknOCcsjT9DtCfKCi8RRvy9-WA,18361 +torch/utils/benchmark/utils/sparse_fuzzer.py,sha256=Ejnnu3Fup95iwLwqzuYkgHJjQTjCWTMvo48EGraveB0,5167 +torch/utils/benchmark/utils/timeit_template.cpp,sha256=Wzz-o6Yjgq3tkmUTxeRldQxrsETh48T2hwL8xbOkRSg,1009 +torch/utils/benchmark/utils/timer.py,sha256=yjPmylTGRChfeu3aYF5m84MGH9GuNBaqGaRnykNEqJA,21097 +torch/utils/benchmark/utils/valgrind_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/benchmark/utils/valgrind_wrapper/__pycache__/__init__.cpython-310.pyc,, +torch/utils/benchmark/utils/valgrind_wrapper/__pycache__/timer_interface.cpython-310.pyc,, +torch/utils/benchmark/utils/valgrind_wrapper/callgrind.h,sha256=wK1NVdRImF_4WVLlQrXkufunvE0qLr5dw50v86KPIo0,5744 +torch/utils/benchmark/utils/valgrind_wrapper/compat_bindings.cpp,sha256=ysuof0blt-4g76St1LUrdlZLiaCBKjwjwZXE0L4nI74,813 +torch/utils/benchmark/utils/valgrind_wrapper/timer_callgrind_template.cpp,sha256=ILVnffXHThuakEj3hzKv4usmt8rICdXpSTBeWKz7jU4,1676 +torch/utils/benchmark/utils/valgrind_wrapper/timer_interface.py,sha256=mRzHX_MtLPaSD-lGRXt3dPS_0fIgrsfx3l05G2sUC4Y,36967 +torch/utils/benchmark/utils/valgrind_wrapper/valgrind.h,sha256=8MpV41sjwR0bIML04pxlLIjVuGhRtWdy1Kmtax4jFLI,422653 +torch/utils/bottleneck/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/bottleneck/__main__.py,sha256=efcGahb61sZa6sq5rauybY4ysJLCt3Fv7k_DtfqxURs,7214 +torch/utils/bottleneck/__pycache__/__init__.cpython-310.pyc,, +torch/utils/bottleneck/__pycache__/__main__.cpython-310.pyc,, +torch/utils/bundled_inputs.py,sha256=6bji47LvpopuWrMhOuhNXzCC2qlnnhIq0NmRQDOkmZQ,22574 +torch/utils/checkpoint.py,sha256=MGAMqrhmcq0SkZu3PkEGbMhPySB1_Kec8S-J---2vHs,66118 +torch/utils/collect_env.py,sha256=ZL--FHoFkgRHQhB9GbEGlk09xHqY3A8VCKu5dYfyHvU,23357 +torch/utils/cpp_backtrace.py,sha256=GxSqoJwxCh9nbgm7EJS9KRJ0Mn5UwIwJUje-0vC4C-Y,483 +torch/utils/cpp_extension.py,sha256=Ly9S6Ds6BGneC9pf-vKs1tciwP8PpvkfLUmq_nR2XtE,105063 +torch/utils/data/__init__.py,sha256=Otj8bSQaqlFBPEvdrTGo81j_wSk6V2ok4ZD0g9RatYg,1654 +torch/utils/data/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/__pycache__/backward_compatibility.cpython-310.pyc,, +torch/utils/data/__pycache__/dataloader.cpython-310.pyc,, +torch/utils/data/__pycache__/dataset.cpython-310.pyc,, +torch/utils/data/__pycache__/distributed.cpython-310.pyc,, +torch/utils/data/__pycache__/graph.cpython-310.pyc,, +torch/utils/data/__pycache__/graph_settings.cpython-310.pyc,, +torch/utils/data/__pycache__/sampler.cpython-310.pyc,, +torch/utils/data/_utils/__init__.py,sha256=teNSfYfmOXzz8rjO0ypF8IWXnMOIX7jHpvoDK-VPXJA,1625 +torch/utils/data/_utils/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/_utils/__pycache__/collate.cpython-310.pyc,, +torch/utils/data/_utils/__pycache__/fetch.cpython-310.pyc,, +torch/utils/data/_utils/__pycache__/pin_memory.cpython-310.pyc,, +torch/utils/data/_utils/__pycache__/signal_handling.cpython-310.pyc,, +torch/utils/data/_utils/__pycache__/worker.cpython-310.pyc,, +torch/utils/data/_utils/collate.py,sha256=OWodLzDBZG-5J9YyPfW3F_Ha-TJ2SWuF8msc1VwKofU,15983 +torch/utils/data/_utils/fetch.py,sha256=V_nDlVcBJqpooFee1a_k0yf9HVVnkpr3TNPZX4i1Z74,1953 +torch/utils/data/_utils/pin_memory.py,sha256=NrxXrubfnj8zfhWAvuDsnsUyBw9tvzoJHlGw4u8XgmU,4372 +torch/utils/data/_utils/signal_handling.py,sha256=-ZW2LiTfOn6P2Mqi6pQBPoqFi2ZTGxXwC1v6BFvDQk0,3171 +torch/utils/data/_utils/worker.py,sha256=X5wPaN4KwDIWxtA8lbH9uegrbocxEUK9i7mLskQaJ1Y,13836 +torch/utils/data/backward_compatibility.py,sha256=iVwgc7nHC98OaKE5yYMXPiMoWjAqxSVNewbENz9WU00,309 +torch/utils/data/dataloader.py,sha256=S1pKRj6q4DWvnKTZpqROJ77aHvQznb3cg0dgAsckMXg,76363 +torch/utils/data/datapipes/__init__.py,sha256=OIYy5fRjiWdLc8SRrug0pOjrXouG0I-f2G8QQB0ynto,88 +torch/utils/data/datapipes/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/datapipes/__pycache__/_decorator.cpython-310.pyc,, +torch/utils/data/datapipes/__pycache__/_hook_iterator.cpython-310.pyc,, +torch/utils/data/datapipes/__pycache__/_typing.cpython-310.pyc,, +torch/utils/data/datapipes/__pycache__/datapipe.cpython-310.pyc,, +torch/utils/data/datapipes/__pycache__/gen_pyi.cpython-310.pyc,, +torch/utils/data/datapipes/_decorator.py,sha256=LjdH38VNb9XhjMWKnBNX9tgAHkfX58Wn8ncpiYCqCTE,7838 +torch/utils/data/datapipes/_hook_iterator.py,sha256=dmccJeTkxdDOD52pr87Al6AJ2YUuT2H8uWKPieW0kUc,11954 +torch/utils/data/datapipes/_typing.py,sha256=XOk2oQ_MLF1BnNZDh5yDAL6I5yN4sGCftoTWfg1IyNw,16283 +torch/utils/data/datapipes/dataframe/__init__.py,sha256=zg3Y8ywGmOOMnssugIi2Hd2h95uG2Kx3VmMjRVOaNOU,331 +torch/utils/data/datapipes/dataframe/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/datapipes/dataframe/__pycache__/dataframe_wrapper.cpython-310.pyc,, +torch/utils/data/datapipes/dataframe/__pycache__/dataframes.cpython-310.pyc,, +torch/utils/data/datapipes/dataframe/__pycache__/datapipes.cpython-310.pyc,, +torch/utils/data/datapipes/dataframe/__pycache__/structures.cpython-310.pyc,, +torch/utils/data/datapipes/dataframe/dataframe_wrapper.py,sha256=cJ7u_aPzzg3_OHhvfWkh3RKfd5Uq6_v-4JTlYRzDqVY,3293 +torch/utils/data/datapipes/dataframe/dataframes.py,sha256=Wb3ku-aej8Vk2qaLki6UiVHAKzWXIigziVjJ1Qqybbs,13480 +torch/utils/data/datapipes/dataframe/datapipes.py,sha256=K_27evwElGuGMmfs6Goj2vy2ESiTRda7hFv0B9q53GQ,4490 +torch/utils/data/datapipes/dataframe/structures.py,sha256=E7T0isNlNJ6deF-KO3q3HgmCRP1pWljFlO8-aMRaGP4,604 +torch/utils/data/datapipes/datapipe.py,sha256=9Vc5GH9lznS9PtWbT4iQs3Xl_NvalvizfcK3-5CaW1s,16781 +torch/utils/data/datapipes/datapipe.pyi,sha256=PQS6t8vx3NsQlTMTr4ftz5edYKe4X3tw5dWNg-IbCB8,32297 +torch/utils/data/datapipes/gen_pyi.py,sha256=74onJrBskpOIdtjceYLREcHNoK7A9w4ohMxiDr3mGQs,10796 +torch/utils/data/datapipes/iter/__init__.py,sha256=Hr1OY3BP9JmWuHzG2_6Lw0rsfrMZLfe3S7CpshN5tYc,1815 +torch/utils/data/datapipes/iter/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/callable.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/combinatorics.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/combining.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/filelister.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/fileopener.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/grouping.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/routeddecoder.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/selecting.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/sharding.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/streamreader.cpython-310.pyc,, +torch/utils/data/datapipes/iter/__pycache__/utils.cpython-310.pyc,, +torch/utils/data/datapipes/iter/callable.py,sha256=9NfXT1DqU91R8WugP46A0mvNhUT5Bhq-_lvmmzlm-9s,9050 +torch/utils/data/datapipes/iter/combinatorics.py,sha256=ctyDJ10odbocoRrMeTCc5Hi1GaXo47FJuwjiuwhhZ2I,6458 +torch/utils/data/datapipes/iter/combining.py,sha256=9e4jMCp4nUd-m2jDaUTm8v3dV-g0y3SqYIg1DCEaBIw,27359 +torch/utils/data/datapipes/iter/filelister.py,sha256=ZCQZ1eZ9318pElv0n8LGpznv5joEA8By85ZGBJ-58RY,2595 +torch/utils/data/datapipes/iter/fileopener.py,sha256=rU0HOPKavFZN7uRLQdLL_I6i--wfd_d8DcWJdNC9MIc,2803 +torch/utils/data/datapipes/iter/grouping.py,sha256=l05y4HA2W2x40hgAUPptbktDZL36SwOojPr-LTE61Ac,12393 +torch/utils/data/datapipes/iter/routeddecoder.py,sha256=Dgv1rrS1QRYBy0d-EYlBWZixVQat7crUldVO01WKI44,2711 +torch/utils/data/datapipes/iter/selecting.py,sha256=qejbZsqzGW1VtLd8iH4XEtN45jz0HkxIzaIXx9hGxCE,3293 +torch/utils/data/datapipes/iter/sharding.py,sha256=mWRMEO0jnKwyQgeeEXpv7_Na7YOArj0rsu4X4gH7wjc,3538 +torch/utils/data/datapipes/iter/streamreader.py,sha256=ECZ5Fk3UtkKnA4hFj87UGSIxOSXTBcp8eWenuIvojyM,1429 +torch/utils/data/datapipes/iter/utils.py,sha256=Wpasg7MfXcCiCB7QnlqPBjNeyMih674liCFoXf0EEcI,1809 +torch/utils/data/datapipes/map/__init__.py,sha256=gLZThKg4SBTjZlzOWyt3zFVZkY04WG6Ia731L9LyRho,667 +torch/utils/data/datapipes/map/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/datapipes/map/__pycache__/callable.cpython-310.pyc,, +torch/utils/data/datapipes/map/__pycache__/combinatorics.cpython-310.pyc,, +torch/utils/data/datapipes/map/__pycache__/combining.cpython-310.pyc,, +torch/utils/data/datapipes/map/__pycache__/grouping.cpython-310.pyc,, +torch/utils/data/datapipes/map/__pycache__/utils.cpython-310.pyc,, +torch/utils/data/datapipes/map/callable.py,sha256=o4GBT6ullXxvbHFHLNqiabkbuHLXOVMMTA1kNHB4BLM,1860 +torch/utils/data/datapipes/map/combinatorics.py,sha256=FB5vFwbwRq2yYABjhI_sgasWWucnKgsEY6x_CLMLVjQ,4170 +torch/utils/data/datapipes/map/combining.py,sha256=idPtoCv8KDulYSIfahxnjCA5DMlOiKae7Ta_gJO1IZM,3679 +torch/utils/data/datapipes/map/grouping.py,sha256=WAyYNx_2ttATMaeGmlU3BqbtsiKdR11CB-dgc12VwS0,2459 +torch/utils/data/datapipes/map/utils.py,sha256=dDeCKb2Kc7jK8i1PBZgtJQU63coB_BZtUi0HLTmRxJw,1575 +torch/utils/data/datapipes/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/data/datapipes/utils/__pycache__/__init__.cpython-310.pyc,, +torch/utils/data/datapipes/utils/__pycache__/common.cpython-310.pyc,, +torch/utils/data/datapipes/utils/__pycache__/decoder.cpython-310.pyc,, +torch/utils/data/datapipes/utils/__pycache__/snapshot.cpython-310.pyc,, +torch/utils/data/datapipes/utils/common.py,sha256=sz1fdAUoQuGK6oIjUSlWMVRG-o3llw08gVNwMHAe7no,13692 +torch/utils/data/datapipes/utils/decoder.py,sha256=_qYa45p6VFc7VRvYYge9PeIzfCLTsXo3w7J6m2CMHHs,11995 +torch/utils/data/datapipes/utils/snapshot.py,sha256=A1pbz3hTPOWq_AbsyKBwBA6-bqleeXLnIbWuqapUcJc,3103 +torch/utils/data/dataset.py,sha256=QBRPcopUYmYmRXCRx80xaR3FuONSRRAGNNcfV8JgJk8,19190 +torch/utils/data/distributed.py,sha256=eLS9U0Io0Mav8-DDOT8woViAapbiGU5nfdFzo-w7xKo,6098 +torch/utils/data/graph.py,sha256=Q2GmTPg1-ABCZjHCUMizt2ar0Kgptqjkb_7MdqAr5BY,5909 +torch/utils/data/graph_settings.py,sha256=gi8p4cIr3yX7-im67riRFhOAezDUc04SEhCrJaNLVJw,5578 +torch/utils/data/sampler.py,sha256=LyiFdvy5qeTH2nYtJ6WURD-b-ljGllAcoxYD91H5CTA,13088 +torch/utils/deterministic.py,sha256=aDwP89FjYTIT-3g2RtWBoagN59_JbqE2BZbgV4HttMU,611 +torch/utils/dlpack.py,sha256=eb7fXKDRQcr0oc0P49qpxE1_hsklavXoRSEUI8wPD00,4438 +torch/utils/file_baton.py,sha256=c1fCu9u9OjgEwYXau4MCEJoEx55s5Uz_VwcILE__Cro,1419 +torch/utils/flop_counter.py,sha256=kNSv-iCMGzVeBNTN0rpuGbL0TB51HfKBzu5Nbcjn1J0,26571 +torch/utils/hipify/__init__.py,sha256=Jzb_RfgvXCrm_SQ4AfeGVi1N36YybxnM5mpyxrnihgI,33 +torch/utils/hipify/__pycache__/__init__.cpython-310.pyc,, +torch/utils/hipify/__pycache__/constants.cpython-310.pyc,, +torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc,, +torch/utils/hipify/__pycache__/hipify_python.cpython-310.pyc,, +torch/utils/hipify/__pycache__/version.cpython-310.pyc,, +torch/utils/hipify/constants.py,sha256=sogTIVpPGdJWQA7OvnjfeAgNwbp8BpbWtO12xV-KBFE,1174 +torch/utils/hipify/cuda_to_hip_mappings.py,sha256=adRP3OlN6EOUQ4E4WgvitS9XH2Ovd1W8mKM3Odqq9dE,350124 +torch/utils/hipify/hipify_python.py,sha256=fwKHBcpJdPt8YvF9m6QPuFj-skKybLd67UKv8GODYJQ,46348 +torch/utils/hipify/version.py,sha256=RsZjRjMprNcDm97wqRRSk6rTLgTX8N0GyicZyZ8OsBQ,22 +torch/utils/hooks.py,sha256=QtW2wjVZroyInSbACBGfUbD-psu0mzNazxo7GEKi-JI,9558 +torch/utils/jit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/jit/__pycache__/__init__.cpython-310.pyc,, +torch/utils/jit/__pycache__/log_extract.cpython-310.pyc,, +torch/utils/jit/log_extract.py,sha256=mkmsch7WoCVKf2TvpOOcVtgve2xPAQg-7B3n3pY9bo4,3781 +torch/utils/mkldnn.py,sha256=Tmjrw5aTght4xAnX__P6LK9_WTE2s54-2MNb40CBUuE,7908 +torch/utils/mobile_optimizer.py,sha256=liU6eDm-aybOgfcR2wt1o279zFuawbXyUxtDll7bzGI,6494 +torch/utils/model_dump/__init__.py,sha256=4MNim9VrVugODXNpaYzWERFWjVT1YJll7ggl0VK3lPU,16837 +torch/utils/model_dump/__main__.py,sha256=jYGPuoI11jzWgGKtK1-E550XLfPkrbHq_BRUbLusK-A,79 +torch/utils/model_dump/__pycache__/__init__.cpython-310.pyc,, +torch/utils/model_dump/__pycache__/__main__.cpython-310.pyc,, +torch/utils/model_dump/code.js,sha256=70w_JAT7N8dkWHrpQsA1enZCJK7VJOIy23ukbbbXQAg,19251 +torch/utils/model_dump/htm.mjs,sha256=m-psDFjVL3_BzcZnYkiRT8AIYhFhCY5ERrh_LP4sEH4,1230 +torch/utils/model_dump/preact.mjs,sha256=005yDhrtmGbeMNCyf7SMhNktZT4VisnDde1R-TMD5gk,10078 +torch/utils/model_dump/skeleton.html,sha256=vq4r1yFKZEchXwQmky1zpj3q65MCPDBD7wDt6SqzJGg,384 +torch/utils/model_zoo.py,sha256=o2NC-XaU8fqDXbUBQDv7thQBld_LOK-Ko63GS4x8Iyg,117 +torch/utils/module_tracker.py,sha256=oUnTXkDLj10CLebC6eYakAl89x2LUF235b_FxpSU2sU,5070 +torch/utils/show_pickle.py,sha256=0WKgqTteE5wEJcfdQLmRlThOhBDYzVreEpRQbtiRHJQ,5425 +torch/utils/tensorboard/__init__.py,sha256=9NIYMYOGpyaLBWDL3a1sp44BdncOM2aYXYuZkNsnNJM,434 +torch/utils/tensorboard/__pycache__/__init__.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_convert_np.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_embedding.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_onnx_graph.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_proto_graph.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_pytorch_graph.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/_utils.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/summary.cpython-310.pyc,, +torch/utils/tensorboard/__pycache__/writer.cpython-310.pyc,, +torch/utils/tensorboard/_convert_np.py,sha256=lcVRd0CBoPHW20nSk3bTyJQzzR9UuNKPlEUsJISwp1s,705 +torch/utils/tensorboard/_embedding.py,sha256=6ExXXIQiSnA2cIAS9RLxGdKFmuBT01PqdB739x3yT6E,3224 +torch/utils/tensorboard/_onnx_graph.py,sha256=RcIUnGjdzDS9yxI6rios1hafL3jRhRq3yHoFo-TATE4,1923 +torch/utils/tensorboard/_proto_graph.py,sha256=9GOFVhEEovaFOScEzeeXAkCouCZ4B4Blai4M9KCRREA,1758 +torch/utils/tensorboard/_pytorch_graph.py,sha256=ayU_7mWz9g7-3OzKGa2ZVSkdtJgsNzLswjD8AkJjs_g,13879 +torch/utils/tensorboard/_utils.py,sha256=_t_4ZCyhUU4oCpw9aQ8wrmKZK9cy-gCHq9GXY-WecjY,4159 +torch/utils/tensorboard/summary.py,sha256=_SYc4gO5jj4evhY6wXKdZYocgI15_CswJaQWsFN4hTM,34471 +torch/utils/tensorboard/writer.py,sha256=Qp8LuYYq6JYh3_oQhqQdw39s0TosPPHRtoIt31fVWxU,46669 +torch/utils/throughput_benchmark.py,sha256=f50CNVwm_mvpevFUkpz7dgDdWab519jmZIReB8qhVB0,6502 +torch/utils/viz/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torch/utils/viz/__pycache__/__init__.cpython-310.pyc,, +torch/utils/viz/__pycache__/_cycles.cpython-310.pyc,, +torch/utils/viz/_cycles.py,sha256=mFdGmG5QsHMWB4MsC2HcX5un82HLcW-nySRye7r3pRs,14758 +torch/utils/weak.py,sha256=WYMGXPlryZPak6xtA31__d949fUYUO2pon0S8BM3l_k,11070 +torch/version.py,sha256=gTRZZNFChK-Jp93qQQQ0-VH9tMMrfmSUkzKuDmAubMU,248 +torch/xpu/__init__.py,sha256=la1OHSg8nu_zxr_MrGsUObMDirW6zunqV1W006I56fs,15697 +torch/xpu/__pycache__/__init__.cpython-310.pyc,, +torch/xpu/__pycache__/_gpu_trace.cpython-310.pyc,, +torch/xpu/__pycache__/_utils.cpython-310.pyc,, +torch/xpu/__pycache__/memory.cpython-310.pyc,, +torch/xpu/__pycache__/random.cpython-310.pyc,, +torch/xpu/__pycache__/streams.cpython-310.pyc,, +torch/xpu/_gpu_trace.py,sha256=Bdv8ITvAHj19qhb84z85zT-IxRJuBFHlv2wlsf7lkxM,2373 +torch/xpu/_utils.py,sha256=JTETOkI3J7tp4SFKV3ukRlvQDO-6Qg850KkJ8wO6gh0,1591 +torch/xpu/memory.py,sha256=M7eUL462HhEGfaPmFNyV5HLjqmlxxZazQuqBUXo7_68,7391 +torch/xpu/random.py,sha256=9XpTJ-ObDn2o_8_zN73mb3G6C7tucX6f-aDjH75kmQ0,5248 +torch/xpu/streams.py,sha256=95sEEdmQJUq4ZxCa0S7KGWwf6vkfDYA9fq9aH-uZsbk,5562 +torchgen/__init__.py,sha256=iirTpG38WcCsNMhEbi1dg7_jad6ptk_uzZ-BzaGBFyU,348 +torchgen/__pycache__/__init__.cpython-310.pyc,, +torchgen/__pycache__/code_template.cpython-310.pyc,, +torchgen/__pycache__/context.cpython-310.pyc,, +torchgen/__pycache__/gen.cpython-310.pyc,, +torchgen/__pycache__/gen_aoti_c_shim.cpython-310.pyc,, +torchgen/__pycache__/gen_backend_stubs.cpython-310.pyc,, +torchgen/__pycache__/gen_executorch.cpython-310.pyc,, +torchgen/__pycache__/gen_functionalization_type.cpython-310.pyc,, +torchgen/__pycache__/gen_lazy_tensor.cpython-310.pyc,, +torchgen/__pycache__/gen_schema_utils.cpython-310.pyc,, +torchgen/__pycache__/gen_vmap_plumbing.cpython-310.pyc,, +torchgen/__pycache__/local.cpython-310.pyc,, +torchgen/__pycache__/model.cpython-310.pyc,, +torchgen/__pycache__/native_function_generation.cpython-310.pyc,, +torchgen/__pycache__/utils.cpython-310.pyc,, +torchgen/__pycache__/yaml_utils.cpython-310.pyc,, +torchgen/aoti/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/aoti/__pycache__/__init__.cpython-310.pyc,, +torchgen/aoti/__pycache__/fallback_ops.cpython-310.pyc,, +torchgen/aoti/fallback_ops.py,sha256=yCgvAQmN1-kuxmFiCOE0fSiphIb0R7x_r_fzubaEGMY,5729 +torchgen/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/api/__pycache__/__init__.cpython-310.pyc,, +torchgen/api/__pycache__/autograd.cpython-310.pyc,, +torchgen/api/__pycache__/cpp.cpython-310.pyc,, +torchgen/api/__pycache__/dispatcher.cpython-310.pyc,, +torchgen/api/__pycache__/functionalization.cpython-310.pyc,, +torchgen/api/__pycache__/lazy.cpython-310.pyc,, +torchgen/api/__pycache__/meta.cpython-310.pyc,, +torchgen/api/__pycache__/native.cpython-310.pyc,, +torchgen/api/__pycache__/python.cpython-310.pyc,, +torchgen/api/__pycache__/structured.cpython-310.pyc,, +torchgen/api/__pycache__/translate.cpython-310.pyc,, +torchgen/api/__pycache__/ufunc.cpython-310.pyc,, +torchgen/api/__pycache__/unboxing.cpython-310.pyc,, +torchgen/api/autograd.py,sha256=QSc8CwsEu9XhY-7f6hZA0l7g2UfpDHkqT2WkZYeTHmU,38893 +torchgen/api/cpp.py,sha256=vjrMEQUNz6rbleo_3AU1tRwThh92DX2yjRwrZtbyYAQ,16539 +torchgen/api/dispatcher.py,sha256=KxY43Q-nVtgMd9DWSxMyPEOJzCmoQs5whAPpURyStEs,3384 +torchgen/api/functionalization.py,sha256=HZg1Rgl6Yn22oZ-4-yh2bWye3Ry0xp-LbJ6FmT9iWwI,7566 +torchgen/api/lazy.py,sha256=5PryHH51-R22w9kDPWj7t2OZADWlMlLLzDurqTkowvg,17024 +torchgen/api/meta.py,sha256=zJYzviYI2gY9V9yPUyiZ_fShcdm_5LbigqGUH_WfaWw,483 +torchgen/api/native.py,sha256=rX5iO7aI2Ny06s0PcR9FTuEwVtjxmWLSj4a975g5Uh8,5136 +torchgen/api/python.py,sha256=wXijrnYssSQhAxZnGma10Njqpk1NlZ06zGBvwSi9YRU,58784 +torchgen/api/structured.py,sha256=4LXogU4OTBVgk8TLY0qVB7ub64me2wCjcz4s9TtXbdQ,6111 +torchgen/api/translate.py,sha256=vqlcMsiBlboSLysOybqiAmEpPN3TmXDConSVqteCqTg,19217 +torchgen/api/types/__init__.py,sha256=bQ29sz_GNJGgqoDUsE6i_AcYZq81pOs1ATXQJQhUhLY,144 +torchgen/api/types/__pycache__/__init__.cpython-310.pyc,, +torchgen/api/types/__pycache__/signatures.cpython-310.pyc,, +torchgen/api/types/__pycache__/types.cpython-310.pyc,, +torchgen/api/types/__pycache__/types_base.cpython-310.pyc,, +torchgen/api/types/signatures.py,sha256=sdCUgWsKGiS1WilhXSSL2hUT375bB3U1Yd7LNVyfYoo,15690 +torchgen/api/types/types.py,sha256=NlQz-vBOBr0MCQkfycWOsOyRT9ck52rr2bM0JgzvObM,6542 +torchgen/api/types/types_base.py,sha256=UwlK_2LXfuS1CI-zOwPnkzr1lwu5zpcxTagJNRi1Lx4,8964 +torchgen/api/ufunc.py,sha256=BukIDKwJTKZMoasfvzKLTiP-kGOIoc7ptAiLtrM3egk,6693 +torchgen/api/unboxing.py,sha256=cUeqAb_QQ8S4NQ1Zy2RIvl878euvJhTQueV9Mq8VOH8,9493 +torchgen/code_template.py,sha256=BLwLk8YnFkfONvonQ70kLOhfxbrSSr8iWlSYAYKrv_0,2921 +torchgen/context.py,sha256=wuELWcdr8LrLb9lYw9uOXCOAK2Q40m-CMDDKii8uIhc,4000 +torchgen/dest/__init__.py,sha256=qECRwrljRjK-kMdBqfc9X8JPVVPU6XlUuWNEdFD9u0w,805 +torchgen/dest/__pycache__/__init__.cpython-310.pyc,, +torchgen/dest/__pycache__/lazy_ir.cpython-310.pyc,, +torchgen/dest/__pycache__/lazy_ts_lowering.cpython-310.pyc,, +torchgen/dest/__pycache__/native_functions.cpython-310.pyc,, +torchgen/dest/__pycache__/register_dispatch_key.cpython-310.pyc,, +torchgen/dest/__pycache__/ufunc.cpython-310.pyc,, +torchgen/dest/lazy_ir.py,sha256=lhkuZ80QKWjDzM_QyV-16ckqeWZBW-niCAXk6P0TjaI,28990 +torchgen/dest/lazy_ts_lowering.py,sha256=9QUvL_Z-mGqRfhtK_X2RSEHlGmFOqDr_J8cYJrALUG4,1831 +torchgen/dest/native_functions.py,sha256=fd3kE7GqY6_GnjVhcF5oCbk4Cd8au3hjS9N27ZvH7_E,2312 +torchgen/dest/register_dispatch_key.py,sha256=jOxS-Dmvuo_YTfBAVrwzDm5BzxmUk_jYeVOGR5AHd0Y,40903 +torchgen/dest/ufunc.py,sha256=gAc7iucR5lWiIm4tS1k9ViGit1TsJwBNXTL0FPK3g1Q,17811 +torchgen/executorch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/executorch/__pycache__/__init__.cpython-310.pyc,, +torchgen/executorch/__pycache__/model.cpython-310.pyc,, +torchgen/executorch/__pycache__/parse.cpython-310.pyc,, +torchgen/executorch/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/executorch/api/__pycache__/__init__.cpython-310.pyc,, +torchgen/executorch/api/__pycache__/custom_ops.cpython-310.pyc,, +torchgen/executorch/api/__pycache__/et_cpp.cpython-310.pyc,, +torchgen/executorch/api/__pycache__/unboxing.cpython-310.pyc,, +torchgen/executorch/api/custom_ops.py,sha256=SCsy1XkVN_cJCCLKx5fB4oTZqBpzRffBFjA2fb930kQ,5477 +torchgen/executorch/api/et_cpp.py,sha256=n88pEjqLzW_AkkwKjHx-3vzms5nSTkOIiGKx-dO4lR0,12952 +torchgen/executorch/api/types/__init__.py,sha256=ws3m0NagLBCxZAKJDPhGHrmWlAtdgJI6L25lpXj0s3E,122 +torchgen/executorch/api/types/__pycache__/__init__.cpython-310.pyc,, +torchgen/executorch/api/types/__pycache__/signatures.cpython-310.pyc,, +torchgen/executorch/api/types/__pycache__/types.cpython-310.pyc,, +torchgen/executorch/api/types/signatures.py,sha256=hdcQBvHZAZ2dQmmPeXwZ8xp8rah-AwdsIzfec15Gn8A,2567 +torchgen/executorch/api/types/types.py,sha256=upEoRkGfzJ74iSJoyGQsH3vmA41T684t0nrvo-SZ34A,2437 +torchgen/executorch/api/unboxing.py,sha256=WqmeY7KgjfMmQNBPKOcNGG69xHEn-h76kJgVZodWeGY,7862 +torchgen/executorch/model.py,sha256=vCSk4syh0xs0O2sxPCYnO_pgdDpJgQepCjZYhoE4RgU,7665 +torchgen/executorch/parse.py,sha256=qy2wCct6heZ6xePVe9aYWDMI3yfp_XzSUnAZVdyLg8c,5422 +torchgen/gen.py,sha256=rZ12oS-hWpUUYqMVtvhV96Uw09K2X29s6PQ9lH7xJJw,113643 +torchgen/gen_aoti_c_shim.py,sha256=COHLTKiiqLq69NNrpL5KrJkuiDTRExgpEcJLVU9x9EI,16583 +torchgen/gen_backend_stubs.py,sha256=drKViyHZsCSpLsnPFuMIkX737aSLYlabQ2pTlhfDwDI,22323 +torchgen/gen_executorch.py,sha256=Jxsd6f_0qUT9PX5A7s6x6y-crZOfxW0fqm4_eTEQmGA,36229 +torchgen/gen_functionalization_type.py,sha256=U9jQ4tonciKe4r8GTpQLnO699eFb6A7Qv1uUlcAtAX0,38070 +torchgen/gen_lazy_tensor.py,sha256=nl5LZgRMt9ngmAPgjzgkoh5Q3DZ42wVMxu__1J8CJZg,22730 +torchgen/gen_schema_utils.py,sha256=CU_Jjn1KyM07KA6HIXiEVwSmA6ZHiqJ1EZlJfWOx-R0,3324 +torchgen/gen_vmap_plumbing.py,sha256=qELstuia9TzdJ4MM3SqJdypbB6zZKiUc9MQndIb19ic,9327 +torchgen/local.py,sha256=O-6-1uuuzbm0P12gGQ7cWM0oz6MljwSHFgEMgQW5ero,2112 +torchgen/model.py,sha256=epsG0HQm_LS2S2UuFThvKQ1Z__JI-5tfufApOAGlmB8,112857 +torchgen/native_function_generation.py,sha256=lwWeM5qSG0OYeXkXbWknXOLUpof4gNIvlbVL9mUAnUM,29564 +torchgen/operator_versions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/operator_versions/__pycache__/__init__.cpython-310.pyc,, +torchgen/operator_versions/__pycache__/gen_mobile_upgraders.cpython-310.pyc,, +torchgen/operator_versions/__pycache__/gen_mobile_upgraders_constant.cpython-310.pyc,, +torchgen/operator_versions/gen_mobile_upgraders.py,sha256=m1r6erVOU1NE35gOcfZi6_I0V8zlmtj98m3keh9TMjA,12709 +torchgen/operator_versions/gen_mobile_upgraders_constant.py,sha256=C-U6rHQybm_FTcxsz27RMgJDj464NOLhlOzVjSjEn0w,243 +torchgen/packaged/ATen/native/native_functions.yaml,sha256=PbBnzwKzoIobq04QmkMOJmi9loVpiRTrFXqOUdLzR_A,596675 +torchgen/packaged/ATen/native/tags.yaml,sha256=ThsHWvO3t4r4G3YvdWU1-u9Kq5fUH6PhCqTkGv3xlX8,3893 +torchgen/packaged/ATen/templates/ATenOpList.cpp,sha256=YobnhIm91ECCc6uYD2uDOrvFM4WqutKQbQ5x_Fh_5IE,1059 +torchgen/packaged/ATen/templates/CompositeViewCopyKernels.cpp,sha256=H64AHoCBB7MJIECAHNzki8NiTVPND0hy2vJ-KqiSF2c,2077 +torchgen/packaged/ATen/templates/DispatchKeyFunction.h,sha256=npUU8WpU76sZv8oqUQqBpcV_QHT6RW9j42EVTSA6pvA,702 +torchgen/packaged/ATen/templates/DispatchKeyFunctions.h,sha256=KlLfLZSYEG_7miq0fD8yuFtgVtm2mO4CQ9WiT6xoEpY,1937 +torchgen/packaged/ATen/templates/DispatchKeyFunctions_inl.h,sha256=nlAU0xWHQqRZn7JNV163YgAv5wwlykwsikMyjzZeZkI,824 +torchgen/packaged/ATen/templates/DispatchKeyNativeFunctions.cpp,sha256=DYjxJmYQ5Yegq1XW4hakqjvZo5skNWi2f5ZNLPUyQ4c,184 +torchgen/packaged/ATen/templates/DispatchKeyNativeFunctions.h,sha256=e8lUNJZ4jt0uMHjio6HOupVpMv83DtAaFBudAG6pEDw,384 +torchgen/packaged/ATen/templates/Function.h,sha256=qbBOA1FemCV0AAeFPgjCkMAKmIjeWowI4gaveroQB9U,496 +torchgen/packaged/ATen/templates/FunctionalInverses.h,sha256=azlMYM1eJqDFCBeDRBEgpKgbeDIsWLIPEWBVyLVoocY,1231 +torchgen/packaged/ATen/templates/Functions.cpp,sha256=RvU0_zzKVPasa8w9Cg-zD9MBkWj_zdazTxsma0PBS8g,3028 +torchgen/packaged/ATen/templates/Functions.h,sha256=3WE9DmsTPozVLTXJSuqYNO8Ap-p_uub6U4T9dUT1fa4,4677 +torchgen/packaged/ATen/templates/LazyIr.h,sha256=-aVOIdjB719-6SLBsUURho6x0xdKyXsN6h5QP76HTd8,585 +torchgen/packaged/ATen/templates/LazyNonNativeIr.h,sha256=KQbjyZ0Q8qK8JcgqAaF-M-ZhvRE5UhTj8JIHyceNK9Y,178 +torchgen/packaged/ATen/templates/MethodOperators.h,sha256=ifirYleNPll8bjo_OYaC8jEuLWIJlP0Asy0xjtdGfQo,830 +torchgen/packaged/ATen/templates/NativeFunction.h,sha256=C8rQosVZF3aESwI4IvZitd60sdGov6kvClkDAtnx3Oc,366 +torchgen/packaged/ATen/templates/NativeFunctions.h,sha256=N351coNgM9F_jpdma5OK3DvlNUw2oY51iBXKe5dK81Y,1149 +torchgen/packaged/ATen/templates/NativeMetaFunction.h,sha256=C0alnIY6J-5Mlca9_0ocI7bnFKo5jQ_QTuLzo8iCUDQ,452 +torchgen/packaged/ATen/templates/NativeMetaFunctions.h,sha256=mIOwmpkQY9zYolxUXK39c4nRDyG32vBsqEeSDo76p_k,306 +torchgen/packaged/ATen/templates/Operator.h,sha256=ymuBaaHDKS1QZ-7yiHOUmjX-eyCxsjD1v6_ZUGKvz8o,425 +torchgen/packaged/ATen/templates/Operators.cpp,sha256=cjfjkIMtfc8n1w0TDy_JJDJq0DK2cT9DfhkZw3YnTWM,347 +torchgen/packaged/ATen/templates/Operators.h,sha256=oU939CI59Drfg2QlKfkCdU6yVQBVi0y9Ia_kQ0rFC5k,3200 +torchgen/packaged/ATen/templates/RedispatchFunctions.cpp,sha256=pNhfp3gMBw4km2c_4EfeF6ge3DGZi8xtGCnkmjzfpi0,307 +torchgen/packaged/ATen/templates/RedispatchFunctions.h,sha256=HFJ8SLBmg2LNRbsp9MdhHBGGztTGhcjzdWCgYPx5f7c,882 +torchgen/packaged/ATen/templates/RegisterBackendSelect.cpp,sha256=Op-RV7_8UnF_dbxO-hZ8X-7DBnFHsP9s11cM0JkJOWY,752 +torchgen/packaged/ATen/templates/RegisterCodegenUnboxedKernels.cpp,sha256=tux2wSt9RalK0o5AfliXJB3N4diTi-88PwNHOobDbmM,1119 +torchgen/packaged/ATen/templates/RegisterDispatchDefinitions.ini,sha256=E8PNfTGFdvYUkk6YN0K-cfXRf61KazOov2ogDWmDFDU,476 +torchgen/packaged/ATen/templates/RegisterDispatchKey.cpp,sha256=qDT1B5ycCr9lHwcC9JvU6k-0chMBd8YjnOkYyWEA3NI,1634 +torchgen/packaged/ATen/templates/RegisterFunctionalization.cpp,sha256=eLvCt4pTbOkb_PG1bHlKPO6tK0X3KGlDpiUIph_pow8,3372 +torchgen/packaged/ATen/templates/RegisterSchema.cpp,sha256=HoLTDNwRhe8xJJucgd6qCkqTglPtxLYCRGCD_4-S0j0,383 +torchgen/packaged/ATen/templates/RegistrationDeclarations.h,sha256=KImic_ILyhxavbGxVna-Ascf--okibalZJlK44a5dic,160 +torchgen/packaged/ATen/templates/TensorBody.h,sha256=66WFR8BcLwUyENUH2bnlp867YKu1cBffjVLjluML_cU,29135 +torchgen/packaged/ATen/templates/TensorMethods.cpp,sha256=x541hSdseYz2jdqFdKNHW2cEWAHxzV313FYfHg2s1OQ,2624 +torchgen/packaged/ATen/templates/UfuncCPU.cpp,sha256=LrnISndBkXtdugvOWeRk9ZGYQlztFI5yqytoaZiKOQk,445 +torchgen/packaged/ATen/templates/UfuncCPUKernel.cpp,sha256=paz66F7U6E9e2X-rpbxlVDGcxevcXIOialaEqAaoArc,350 +torchgen/packaged/ATen/templates/UfuncCUDA.cu,sha256=HOBz8yO4QFxxmX_6gCF7L8MJvrGPwGQjoE-qDf8kF9Y,494 +torchgen/packaged/ATen/templates/UnboxingFunctions.cpp,sha256=wwdlYUaaCjXwhlaoqieeO-3fOqoQSBj62j6Is4n-UKY,709 +torchgen/packaged/ATen/templates/UnboxingFunctions.h,sha256=bcs4ET0LLtzs7nSSWhKA8jJzongib5CGlNA5yaExfKw,1026 +torchgen/packaged/ATen/templates/aten_interned_strings.h,sha256=_FM2jXAhATj9GZ66dXXUPx72q2uYnzT8iN4hkTz0rmI,805 +torchgen/packaged/ATen/templates/enum_tag.h,sha256=w3hCov4CToJ5qyHrnadei9907AIZkDLALSOlOZ1gP2Q,179 +torchgen/packaged/autograd/BUILD.bazel,sha256=Jd76gG6LQlmmEKK9IYlDbkVSmc8L5bTvGjG159L3rJA,104 +torchgen/packaged/autograd/README.md,sha256=hGiUzBaCs0wzBhEXB3vkWUXU4Lima6y5_wPKjAoKQ-Q,147 +torchgen/packaged/autograd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/packaged/autograd/__pycache__/__init__.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/context.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_annotated_fn_args.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_autograd.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_autograd_functions.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_inplace_or_view_type.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_python_functions.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_trace_type.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_variable_factories.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_variable_type.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/gen_view_funcs.cpython-310.pyc,, +torchgen/packaged/autograd/__pycache__/load_derivatives.cpython-310.pyc,, +torchgen/packaged/autograd/build.bzl,sha256=P4Ox76V35gWtKl8p08d-av4YEb10BC5Lcr5qHiqK7uE,348 +torchgen/packaged/autograd/context.py,sha256=zGoFTCXNnWbHWdkLRKYxIv2Xg41lEv7evO8WIvg1LKI,943 +torchgen/packaged/autograd/deprecated.yaml,sha256=UbtajzWo89jf1Q5sw8JNJIZT5s-iDBagnrGJthtDS2Q,6250 +torchgen/packaged/autograd/derivatives.yaml,sha256=r5VVBWmFmtXa6VrX8eolzUJSGlH2_9Q1p-yHD-WiZpA,179477 +torchgen/packaged/autograd/gen_annotated_fn_args.py,sha256=AFz0DtRJDKTuRuhgD0fhrbtUOPSamaDr6Nov0RMzWWY,4444 +torchgen/packaged/autograd/gen_autograd.py,sha256=2WWaThbNZMfSYo_2flzDQlfxgUVUqoAzrHoopnNWGpM,4615 +torchgen/packaged/autograd/gen_autograd_functions.py,sha256=_SPAWy6t9DGmGIDGq1RQJESIB9Kq1zdb5Q1IOwY8L0c,32484 +torchgen/packaged/autograd/gen_inplace_or_view_type.py,sha256=5dkequnqj3KSZEPXetkjHjS2J8F92I38z0YmsnDPCj0,22749 +torchgen/packaged/autograd/gen_python_functions.py,sha256=ifA8NNkIql7nAtdakokneDfjtnPh3FngW_DWaf4K0TU,46349 +torchgen/packaged/autograd/gen_trace_type.py,sha256=OSo4JjTJru6g0Eg7S5E9FJgdqYn5091KjsqpnAHFQso,18938 +torchgen/packaged/autograd/gen_variable_factories.py,sha256=AYl1c5EisjOcqrmh7mHpUeBb1d0YD-Q8rkmASj_XM6I,4479 +torchgen/packaged/autograd/gen_variable_type.py,sha256=0BH8ZY9ZDhWqJcimS76O_Ef6eZjQXuc6qDAexbvmJj4,83783 +torchgen/packaged/autograd/gen_view_funcs.py,sha256=qyEhYewOhn-OWVtqGTD04L4ws0YtWYpFl4arDJ5k1YA,11600 +torchgen/packaged/autograd/load_derivatives.py,sha256=rFUSFwQQ79n7Ti5wwueHMIBbNQIMzkIRoxkBi1W9VgM,40278 +torchgen/packaged/autograd/templates/ADInplaceOrViewType.cpp,sha256=6juXEdMJaxhS1nv6bqUmx-cJLWPyc4Yb3teAeTyEBG4,790 +torchgen/packaged/autograd/templates/Functions.cpp,sha256=T2Mf85iPLHVAs3-Qen3YQ5f6-rSLQPEa9fvxn5wWujs,623 +torchgen/packaged/autograd/templates/Functions.h,sha256=9vjETYlNoQKCJVRNBmIvBoj6T2GA9-c16T5FPVmWpa0,1577 +torchgen/packaged/autograd/templates/TraceType.cpp,sha256=aqACTyrT05ElIiBLJYkfSgxNjwhum6_QUPDAtXwoKqo,695 +torchgen/packaged/autograd/templates/VariableType.cpp,sha256=40JaiDOkY_86ndvGzap_UWqNg-b_XQQxPqXnsKSMhzc,1859 +torchgen/packaged/autograd/templates/VariableType.h,sha256=jf5Q5UKN9dOahf54IEryjU-8t2NkyB18r3DPbWMJgFo,1692 +torchgen/packaged/autograd/templates/ViewFuncs.cpp,sha256=oas1Pw6wWyAfRy2uh5K7fGD3qR60LD2Oiv-B5OW6Tvc,269 +torchgen/packaged/autograd/templates/ViewFuncs.h,sha256=7RoIEE9NQ6jWdg3SfdbRsQxACKdscsrnZN45JTJER5I,498 +torchgen/packaged/autograd/templates/annotated_fn_args.py.in,sha256=gRgF9BZmylhyfXrVSAVjg9Y4TUYpq1FgYVd3_hk_9no,199 +torchgen/packaged/autograd/templates/python_enum_tag.cpp,sha256=2cTLq6vaU-qjAXLrChZAnCmpVqA4oDvZLOsBY3GWmDA,495 +torchgen/packaged/autograd/templates/python_fft_functions.cpp,sha256=g8Ub5jh_YhgeourCXjB2zlH15Lm_oGwLa37IjibMoEc,1951 +torchgen/packaged/autograd/templates/python_functions.cpp,sha256=b20LJcBbbQHQ4hTt-2h__DIuk8uHksD4ZVD5csfM43Q,1121 +torchgen/packaged/autograd/templates/python_functions.h,sha256=WX52FzntisoprdObQvGXrzMhehgWZ2rIfnHzgNpX-5U,345 +torchgen/packaged/autograd/templates/python_linalg_functions.cpp,sha256=3NyK1tMOYwAsnxyb1k04RPiN9MQkriblY3GnPVOrfSY,1614 +torchgen/packaged/autograd/templates/python_nested_functions.cpp,sha256=2wivRfIBY8pkU9mqHVQfqnsIff6K1Al-BTJyRFuUeAw,2029 +torchgen/packaged/autograd/templates/python_nn_functions.cpp,sha256=s3bTYskH2tKfU79HmQicznSdrCPSQI0YagbitVy51xU,3492 +torchgen/packaged/autograd/templates/python_return_types.cpp,sha256=GB75OiT-5X3rmgXZlc30MB67qMlvqkzhrDIfIj1mZp4,1219 +torchgen/packaged/autograd/templates/python_return_types.h,sha256=ZDLPH-bxSjCpeyWLhU6kZsuAxa2pnCgTE57YpxXmDmQ,198 +torchgen/packaged/autograd/templates/python_sparse_functions.cpp,sha256=NW_L2mF7ZrR2FvXtDE7iNfw0BM-wTyUzBa_C1B5RZ7A,1551 +torchgen/packaged/autograd/templates/python_special_functions.cpp,sha256=JqcAExUPCnbzUujY2PIDL5Gap-ZKlltKiZmDTleO_8s,1972 +torchgen/packaged/autograd/templates/python_torch_functions.cpp,sha256=QEgUPJbi5TunkkQog-o6zoIjpLFZ6VFHQYxKe7BUAiw,2601 +torchgen/packaged/autograd/templates/python_variable_methods.cpp,sha256=3DubUV-F8e_-_CAJE6CaXBCcRPA45KnWFmuGFhVSO6Q,53389 +torchgen/packaged/autograd/templates/variable_factories.h,sha256=g-WOn2XuHBsLNGUk-emw0-KcrNsosDjqAu4NjPJCFA4,5637 +torchgen/selective_build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/selective_build/__pycache__/__init__.cpython-310.pyc,, +torchgen/selective_build/__pycache__/operator.cpython-310.pyc,, +torchgen/selective_build/__pycache__/selector.cpython-310.pyc,, +torchgen/selective_build/operator.py,sha256=21lWnZeGQ9LiBusCMMFGr8rUosEiDxRwhI05taXViio,6509 +torchgen/selective_build/selector.py,sha256=fZckUgNq4stv6nGT74vG483RlQ8hQGv5cdHq5XejyZU,12666 +torchgen/static_runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +torchgen/static_runtime/__pycache__/__init__.cpython-310.pyc,, +torchgen/static_runtime/__pycache__/config.cpython-310.pyc,, +torchgen/static_runtime/__pycache__/gen_static_runtime_ops.cpython-310.pyc,, +torchgen/static_runtime/__pycache__/generator.cpython-310.pyc,, +torchgen/static_runtime/config.py,sha256=uLZmNneGTXweyLclv-CwBDlbBHNqU_84zS_4qWVGiWI,14485 +torchgen/static_runtime/gen_static_runtime_ops.py,sha256=FtrK2KFNj73YfKADMMRVnGs8YKmVOQEfW6VjyTN0i-w,7360 +torchgen/static_runtime/generator.py,sha256=adx7Fj21OKxofodxGQa7hB67HniOR0yX8P2r_SmL8fo,26994 +torchgen/utils.py,sha256=A4QT6RIpv8E_iCrtfv0wykYifkeyiXtzLmFVBbELc0M,16516 +torchgen/yaml_utils.py,sha256=Xhwu0FkP6tsRKYQi34JG9BfgHclejg_FhI8mAzzD5DM,1080 diff --git a/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/top_level.txt b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..90d81bec1d35eb3996334268974885a5398b6c6c --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/torch-2.5.1.dist-info/top_level.txt @@ -0,0 +1,3 @@ +functorch +torch +torchgen diff --git a/deepseek/lib/python3.10/site-packages/typing_extensions.py b/deepseek/lib/python3.10/site-packages/typing_extensions.py new file mode 100644 index 0000000000000000000000000000000000000000..dec429ca8723950097217d2140dd8daccb064e2d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/typing_extensions.py @@ -0,0 +1,3641 @@ +import abc +import collections +import collections.abc +import contextlib +import functools +import inspect +import operator +import sys +import types as _types +import typing +import warnings + +__all__ = [ + # Super-special typing primitives. + 'Any', + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVar', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'Buffer', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'deprecated', + 'Doc', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_original_bases', + 'get_protocol_members', + 'get_type_hints', + 'IntVar', + 'is_protocol', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'override', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeAliasType', + 'TypeGuard', + 'TypeIs', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'ReadOnly', + 'Required', + 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'NoDefault', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type +_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + +Final = typing.Final + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): + Literal = typing.Literal +else: + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +DefaultDict = typing.DefaultDict +OrderedDict = typing.OrderedDict +Counter = typing.Counter +ChainMap = typing.ChainMap +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +if sys.version_info >= (3, 13, 0, "beta"): + from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator +else: + def _is_dunder(attr): + return attr.startswith('__') and attr.endswith('__') + + # Python <3.9 doesn't have typing._SpecialGenericAlias + _special_generic_alias_base = getattr( + typing, "_SpecialGenericAlias", typing._GenericAlias + ) + + class _SpecialGenericAlias(_special_generic_alias_base, _root=True): + def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): + if _special_generic_alias_base is typing._GenericAlias: + # Python <3.9 + self.__origin__ = origin + self._nparams = nparams + super().__init__(origin, nparams, special=True, inst=inst, name=name) + else: + # Python >= 3.9 + super().__init__(origin, nparams, inst=inst, name=name) + self._defaults = defaults + + def __setattr__(self, attr, val): + allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'} + if _special_generic_alias_base is typing._GenericAlias: + # Python <3.9 + allowed_attrs.add("__origin__") + if _is_dunder(attr) or attr in allowed_attrs: + object.__setattr__(self, attr, val) + else: + setattr(self.__origin__, attr, val) + + @typing._tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if ( + self._defaults + and len(params) < self._nparams + and len(params) + len(self._defaults) >= self._nparams + ): + params = (*params, *self._defaults[len(params) - self._nparams:]) + actual_len = len(params) + + if actual_len != self._nparams: + if self._defaults: + expected = f"at least {self._nparams - len(self._defaults)}" + else: + expected = str(self._nparams) + if not self._nparams: + raise TypeError(f"{self} is not a generic class") + raise TypeError( + f"Too {'many' if actual_len > self._nparams else 'few'}" + f" arguments for {self};" + f" actual {actual_len}, expected {expected}" + ) + return self.copy_with(params) + + _NoneType = type(None) + Generator = _SpecialGenericAlias( + collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) + ) + AsyncGenerator = _SpecialGenericAlias( + collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) + ) + ContextManager = _SpecialGenericAlias( + contextlib.AbstractContextManager, + 2, + name="ContextManager", + defaults=(typing.Optional[bool],) + ) + AsyncContextManager = _SpecialGenericAlias( + contextlib.AbstractAsyncContextManager, + 2, + name="AsyncContextManager", + defaults=(typing.Optional[bool],) + ) + + +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { + "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__", + "__final__", +} + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in {'Protocol', 'Generic'}: + continue + annotations = getattr(base, '__annotations__', {}) + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): + attrs.add(attr) + return attrs + + +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# `__match_args__` attribute was removed from protocol members in 3.13, +# we want to backport this change to older Python versions. +if sys.version_info >= (3, 13): + Protocol = typing.Protocol +else: + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + def _type_check_issubclass_arg_1(arg): + """Raise TypeError if `arg` is not an instance of `type` + in `issubclass(arg, )`. + + In most cases, this is verified by type.__subclasscheck__. + Checking it again unnecessarily would slow down issubclass() checks, + so, we don't perform this check unless we absolutely have to. + + For various error paths, however, + we want to ensure that *this* error message is shown to the user + where relevant, rather than a typing.py-specific error message. + """ + if not isinstance(arg, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + class _ProtocolMeta(type(typing.Protocol)): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, typing.Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, typing.Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not getattr(cls, '_is_runtime_protocol', False): + _type_check_issubclass_arg_1(other) + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + if ( + # this attribute is set by @runtime_checkable: + cls.__non_callable_proto_members__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + _type_check_issubclass_arg_1(other) + non_method_attrs = sorted(cls.__non_callable_proto_members__) + raise TypeError( + "Protocols with non-method members don't support issubclass()." + f" Non-method members: {str(non_method_attrs)[1:-1]}." + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + # this attribute is set by @runtime_checkable: + if val is None and attr not in cls.__non_callable_proto_members__: + break + else: + return True + + return False + + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return cls is Protocol and other is typing.Protocol + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True + + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +if sys.version_info >= (3, 13): + runtime_checkable = typing.runtime_checkable +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol. + + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. + + For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + + Warning: this will check only the presence of the required methods, + not their type signatures! + """ + if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): + raise TypeError(f'@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + + # typing.Protocol classes on <=3.11 break if we execute this block, + # because typing.Protocol classes on <=3.11 don't have a + # `__protocol_attrs__` attribute, and this block relies on the + # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ + # break if we *don't* execute this block, because *they* assume that all + # protocol classes have a `__non_callable_proto_members__` attribute + # (which this block sets) + if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + # See gh-113320 for why we compute this attribute here, + # rather than in `_ProtocolMeta.__init__` + cls.__non_callable_proto_members__ = set() + for attr in cls.__protocol_attrs__: + try: + is_callable = callable(getattr(cls, attr, None)) + except Exception as e: + raise TypeError( + f"Failed to determine whether protocol member {attr!r} " + "is a method member" + ) from e + else: + if not is_callable: + cls.__non_callable_proto_members__.add(attr) + + return cls + + +# The "runtime" alias exists for backwards compatibility. +runtime = runtime_checkable + + +# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound +else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +# Update this to something like >=3.13.0b1 if and when +# PEP 728 is implemented in CPython +_PEP_728_IMPLEMENTED = False + +if _PEP_728_IMPLEMENTED: + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. + # PEP 728 (still pending) makes more changes. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + + def _get_typeddict_qualifiers(annotation_type): + while True: + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + else: + break + elif annotation_origin is Required: + yield Required + annotation_type, = get_args(annotation_type) + elif annotation_origin is NotRequired: + yield NotRequired + annotation_type, = get_args(annotation_type) + elif annotation_origin is ReadOnly: + yield ReadOnly + annotation_type, = get_args(annotation_type) + else: + break + + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, *, total=True, closed=False): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') + + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () + + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" + tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == "Protocol": + tp_dict.__qualname__ = name + + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases + + annotations = {} + if "__annotations__" in ns: + own_annotations = ns["__annotations__"] + elif "__annotate__" in ns: + # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated + own_annotations = ns["__annotate__"](1) + else: + own_annotations = {} + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + readonly_keys = set() + mutable_keys = set() + extra_items_type = None + + for base in bases: + base_dict = base.__dict__ + + annotations.update(base_dict.get('__annotations__', {})) + required_keys.update(base_dict.get('__required_keys__', ())) + optional_keys.update(base_dict.get('__optional_keys__', ())) + readonly_keys.update(base_dict.get('__readonly_keys__', ())) + mutable_keys.update(base_dict.get('__mutable_keys__', ())) + base_extra_items_type = base_dict.get('__extra_items__', None) + if base_extra_items_type is not None: + extra_items_type = base_extra_items_type + + if closed and extra_items_type is None: + extra_items_type = Never + if closed and "__extra_items__" in own_annotations: + annotation_type = own_annotations.pop("__extra_items__") + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + if Required in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "Required" + ) + if NotRequired in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "NotRequired" + ) + extra_items_type = annotation_type + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + + if Required in qualifiers: + required_keys.add(annotation_key) + elif NotRequired in qualifiers: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + if ReadOnly in qualifiers: + mutable_keys.discard(annotation_key) + readonly_keys.add(annotation_key) + else: + mutable_keys.add(annotation_key) + readonly_keys.discard(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + tp_dict.__readonly_keys__ = frozenset(readonly_keys) + tp_dict.__mutable_keys__ = frozenset(mutable_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + tp_dict.__closed__ = closed + tp_dict.__extra_items__ = extra_items_type + return tp_dict + + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is + associated with a value of a consistent type. This expectation + is not checked at runtime. + + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports an additional equivalent form:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. + """ + if fields is _marker or fields is None: + if fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{typename} = TypedDict({typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + if closed is not False and closed is not True: + kwargs["closed"] = closed + closed = False + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + if sys.version_info >= (3, 13): + raise TypeError("TypedDict takes no keyword arguments") + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(typename, (), ns, total=total, closed=closed) + td.__orig_bases__ = (TypedDict,) + return td + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(val, typ, /): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return val + + +if hasattr(typing, "ReadOnly"): # 3.13+ + get_type_hints = typing.get_type_hints +else: # <=3.13 + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): # 3.9+ + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: # 3.8 + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__, *self.__metadata__) + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.8-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__, *tp.__metadata__) + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.8 +else: + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""" + ) + + +if hasattr(typing, "NoDefault"): + NoDefault = typing.NoDefault +else: + class NoDefaultTypeMeta(type): + def __setattr__(cls, attr, value): + # TypeError is consistent with the behavior of NoneType + raise TypeError( + f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" + ) + + class NoDefaultType(metaclass=NoDefaultTypeMeta): + """The type of the NoDefault singleton.""" + + __slots__ = () + + def __new__(cls): + return globals().get("NoDefault") or object.__new__(cls) + + def __repr__(self): + return "typing_extensions.NoDefault" + + def __reduce__(self): + return "NoDefault" + + NoDefault = NoDefaultType() + del NoDefaultType, NoDefaultTypeMeta + + +def _set_default(type_param, default): + type_param.has_default = lambda: default is not NoDefault + type_param.__default__ = default + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + __init__ = _set_default + + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVar +else: + # Add default and infer_variance parameters from PEP 696 and 695 + class TypeVar(metaclass=_TypeVarLikeMeta): + """Type variable.""" + + _backported_typevarlike = typing.TypeVar + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=NoDefault, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + + _set_default(typevar, default) + _set_module(typevar) + + def _tvar_prepare_subst(alias, args): + if ( + typevar.has_default() + and alias.__parameters__.index(typevar) == len(args) + ): + args += (typevar.__default__,) + return args + + typevar.__typing_prepare_subst__ = _tvar_prepare_subst + return typevar + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.8-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + + +if _PEP_696_IMPLEMENTED: + from typing import ParamSpec + +# 3.10+ +elif hasattr(typing, 'ParamSpec'): + + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=NoDefault): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance + + _set_default(paramspec, default) + _set_module(paramspec) + + def _paramspec_prepare_subst(alias, args): + params = alias.__parameters__ + i = params.index(paramspec) + if i == len(args) and paramspec.has_default(): + args = [*args, paramspec.__default__] + if i >= len(args): + raise TypeError(f"Too few arguments for {alias}") + # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. + if len(params) == 1 and not typing._is_param_expr(args[0]): + assert i == 0 + args = (args,) + # Convert lists to tuples to help other libraries cache the results. + elif isinstance(args[i], list): + args = (*args[:i], tuple(args[i]), *args[i + 1:]) + return args + + paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst + return paramspec + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") + +# 3.8-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + infer_variance=False, default=NoDefault): + list.__init__(self, [self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.8-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.8-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.8 +else: + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.8 +else: + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + +# 3.13+ +if hasattr(typing, 'TypeIs'): + TypeIs = typing.TypeIs +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeIs(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.8 +else: + class _TypeIsForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeIs = _TypeIsForm( + 'TypeIs', + doc="""Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): # 3.11+ + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): # 3.11+ + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): # 3.11+ + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): # 3.11+ + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + class _RequiredForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +if hasattr(typing, 'ReadOnly'): + ReadOnly = typing.ReadOnly +elif sys.version_info[:2] >= (3, 9): # 3.9-3.12 + @_ExtensionsSpecialForm + def ReadOnly(self, parameters): + """A special typing construct to mark an item of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this property. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + ReadOnly = _ReadOnlyForm( + 'ReadOnly', + doc="""A special typing construct to mark a key of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this propery. + """) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] + Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +elif sys.version_info[:2] >= (3, 9): # 3.9+ + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @property + def __typing_unpacked_tuple_args__(self): + assert self.__origin__ is Unpack + assert len(self.__args__) == 1 + arg, = self.__args__ + if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): + if arg.__origin__ is not tuple: + raise TypeError("Unpack[...] must be used with a tuple type") + return arg.__args__ + return None + + @_UnpackSpecialForm + def Unpack(self, parameters): + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: # 3.8 + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVarTuple + +elif hasattr(typing, "TypeVarTuple"): # 3.11+ + + def _unpack_args(*args): + newargs = [] + for arg in args: + subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) + if subargs is not None and not (subargs and subargs[-1] is ...): + newargs.extend(subargs) + else: + newargs.append(arg) + return newargs + + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): + """Type variable tuple.""" + + _backported_typevarlike = typing.TypeVarTuple + + def __new__(cls, name, *, default=NoDefault): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + + def _typevartuple_prepare_subst(alias, args): + params = alias.__parameters__ + typevartuple_index = params.index(tvt) + for param in params[typevartuple_index + 1:]: + if isinstance(param, TypeVarTuple): + raise TypeError( + f"More than one TypeVarTuple parameter in {alias}" + ) + + alen = len(args) + plen = len(params) + left = typevartuple_index + right = plen - typevartuple_index - 1 + var_tuple_index = None + fillarg = None + for k, arg in enumerate(args): + if not isinstance(arg, type): + subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) + if subargs and len(subargs) == 2 and subargs[-1] is ...: + if var_tuple_index is not None: + raise TypeError( + "More than one unpacked " + "arbitrary-length tuple argument" + ) + var_tuple_index = k + fillarg = subargs[0] + if var_tuple_index is not None: + left = min(left, var_tuple_index) + right = min(right, alen - var_tuple_index - 1) + elif left + right > alen: + raise TypeError(f"Too few arguments for {alias};" + f" actual {alen}, expected at least {plen - 1}") + if left == alen - right and tvt.has_default(): + replacement = _unpack_args(tvt.__default__) + else: + replacement = args[left: alen - right] + + return ( + *args[:left], + *([fillarg] * (typevartuple_index - left)), + replacement, + *([fillarg] * (plen - right - left - typevartuple_index - 1)), + *args[alen - right:], + ) + + tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: # <=3.10 + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=NoDefault): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): # 3.11+ + reveal_type = typing.reveal_type +else: # <=3.10 + def reveal_type(obj: T, /) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) + return obj + + +if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ + _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH +else: # <=3.10 + _ASSERT_NEVER_REPR_MAX_LENGTH = 100 + + +if hasattr(typing, "assert_never"): # 3.11+ + assert_never = typing.assert_never +else: # <=3.10 + def assert_never(arg: Never, /) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + value = repr(arg) + if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: + value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...' + raise AssertionError(f"Expected code to be unreachable, but got: {value}") + + +if sys.version_info >= (3, 12): # 3.12+ + # dataclass_transform exists in 3.11 but lacks the frozen_default parameter + dataclass_transform = typing.dataclass_transform +else: # <=3.11 + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "frozen_default": frozen_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +if hasattr(typing, "override"): # 3.12+ + override = typing.override +else: # <=3.11 + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(arg: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + try: + arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return arg + + +if hasattr(warnings, "deprecated"): + deprecated = warnings.deprecated +else: + _T = typing.TypeVar("_T") + + class deprecated: + """Indicate that a class, function or overload is deprecated. + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + def __init__( + self, + message: str, + /, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> None: + if not isinstance(message, str): + raise TypeError( + "Expected an object of type str for 'message', not " + f"{type(message).__name__!r}" + ) + self.message = message + self.category = category + self.stacklevel = stacklevel + + def __call__(self, arg: _T, /) -> _T: + # Make sure the inner functions created below don't + # retain a reference to self. + msg = self.message + category = self.category + stacklevel = self.stacklevel + if category is None: + arg.__deprecated__ = msg + return arg + elif isinstance(arg, type): + import functools + from types import MethodType + + original_new = arg.__new__ + + @functools.wraps(original_new) + def __new__(cls, *args, **kwargs): + if cls is arg: + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif cls.__init__ is object.__init__ and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + else: + return original_new(cls) + + arg.__new__ = staticmethod(__new__) + + original_init_subclass = arg.__init_subclass__ + # We need slightly different behavior if __init_subclass__ + # is a bound method (likely if it was implemented in Python) + if isinstance(original_init_subclass, MethodType): + original_init_subclass = original_init_subclass.__func__ + + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = classmethod(__init_subclass__) + # Or otherwise, which likely means it's a builtin such as + # object's implementation of __init_subclass__. + else: + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = __init_subclass__ + + arg.__deprecated__ = __new__.__deprecated__ = msg + __init_subclass__.__deprecated__ = msg + return arg + elif callable(arg): + import functools + + @functools.wraps(arg) + def wrapper(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return arg(*args, **kwargs) + + arg.__deprecated__ = wrapper.__deprecated__ = msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {arg!r}" + ) + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], '__default__', NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum(getattr(p, '__default__', NoDefault) + is not NoDefault for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + things = "arguments" if sys.version_info >= (3, 10) else "parameters" + raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" + f" for {cls}; actual {alen}, expected {expect_val}") +else: + # Python 3.11+ + + def _check_generic(cls, parameters, elen): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], '__default__', NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum(getattr(p, '__default__', NoDefault) + is not NoDefault for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" + f" for {cls}; actual {alen}, expected {expect_val}") + +if not _PEP_696_IMPLEMENTED: + typing._check_generic = _check_generic + + +def _has_generic_or_protocol_as_origin() -> bool: + try: + frame = sys._getframe(2) + # - Catch AttributeError: not all Python implementations have sys._getframe() + # - Catch ValueError: maybe we're called from an unexpected module + # and the call stack isn't deep enough + except (AttributeError, ValueError): + return False # err on the side of leniency + else: + # If we somehow get invoked from outside typing.py, + # also err on the side of leniency + if frame.f_globals.get("__name__") != "typing": + return False + origin = frame.f_locals.get("origin") + # Cannot use "in" because origin may be an object with a buggy __eq__ that + # throws an error. + return origin is typing.Generic or origin is Protocol or origin is typing.Protocol + + +_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} + + +def _is_unpacked_typevartuple(x) -> bool: + if get_origin(x) is not Unpack: + return False + args = get_args(x) + return ( + bool(args) + and len(args) == 1 + and type(args[0]) in _TYPEVARTUPLE_TYPES + ) + + +# Python 3.11+ _collect_type_vars was renamed to _collect_parameters +if hasattr(typing, '_collect_type_vars'): + def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with a default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in types: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + elif isinstance(t, typevar_types) and t not in tvars: + if enforce_default_ordering: + has_default = getattr(t, '__default__', NoDefault) is not NoDefault + if has_default: + if type_var_tuple_encountered: + raise TypeError('Type parameter with a default' + ' follows TypeVarTuple') + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + typing._collect_type_vars = _collect_type_vars +else: + def _collect_parameters(args): + """Collect all type variables and parameter specifications in args + in order of first appearance (lexicographic order). + + For example:: + + assert _collect_parameters((T, Callable[P, T])) == (T, P) + """ + parameters = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in args: + if isinstance(t, type): + # We don't want __parameters__ descriptor of a bare Python class. + pass + elif isinstance(t, tuple): + # `t` might be a tuple, when `ParamSpec` is substituted with + # `[T, int]`, or `[int, *Ts]`, etc. + for x in t: + for collected in _collect_parameters([x]): + if collected not in parameters: + parameters.append(collected) + elif hasattr(t, '__typing_subst__'): + if t not in parameters: + if enforce_default_ordering: + has_default = ( + getattr(t, '__default__', NoDefault) is not NoDefault + ) + + if type_var_tuple_encountered and has_default: + raise TypeError('Type parameter with a default' + ' follows TypeVarTuple') + + if has_default: + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + parameters.append(t) + else: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + for x in getattr(t, '__parameters__', ()): + if x not in parameters: + parameters.append(x) + + return tuple(parameters) + + if not _PEP_696_IMPLEMENTED: + typing._collect_parameters = _collect_parameters + +# Backport typing.NamedTuple as it exists in Python 3.13. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): + NamedTuple = typing.NamedTuple +else: + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + if "__annotations__" in ns: + types = ns["__annotations__"] + elif "__annotate__" in ns: + # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated + types = ns["__annotate__"](1) + else: + types = {} + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key, val in ns.items(): + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields: + if key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + try: + set_name = type(val).__set_name__ + except AttributeError: + pass + else: + try: + set_name(val, nm_tpl, key) + except BaseException as e: + msg = ( + f"Error calling __set_name__ on {type(val).__name__!r} " + f"instance {key!r} in {typename!r}" + ) + # BaseException.add_note() existed on py311, + # but the __set_name__ machinery didn't start + # using add_note() until py312. + # Making sure exceptions are raised in the same way + # as in "normal" classes seems most important here. + if sys.version_info >= (3, 12): + e.add_note(msg) + raise + else: + raise RuntimeError(msg) from e + + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(typename, fields=_marker, /, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + if fields is _marker or fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + fields = kwargs.items() + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): # noqa: B024 + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__dict__.get("__orig_bases__", cls.__bases__) + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj, /): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, name: str, value: object, /) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(name) + super().__setattr__(name, value) + + def __delattr__(self, name: str, /) -> Never: + self._raise_attribute_error(name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(tp, type) + and getattr(tp, '_is_protocol', False) + and tp is not Protocol + and tp is not typing.Protocol + ) + + def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(tp): + raise TypeError(f'{tp!r} is not a Protocol') + if hasattr(tp, '__protocol_attrs__'): + return frozenset(tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(tp)) + + +if hasattr(typing, "Doc"): + Doc = typing.Doc +else: + class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute ``documentation``. + + Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation + + +_CapsuleType = getattr(_types, "CapsuleType", None) + +if _CapsuleType is None: + try: + import _socket + except ImportError: + pass + else: + _CAPI = getattr(_socket, "CAPI", None) + if _CAPI is not None: + _CapsuleType = type(_CAPI) + +if _CapsuleType is not None: + CapsuleType = _CapsuleType + __all__.append("CapsuleType") + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_basinhopping.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_basinhopping.py new file mode 100644 index 0000000000000000000000000000000000000000..333a7af410de7ed84ecd46b1cbc8d2b0c4362f4b --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_basinhopping.py @@ -0,0 +1,753 @@ +""" +basinhopping: The basinhopping global optimization algorithm +""" +import numpy as np +import math +import inspect +import scipy.optimize +from scipy._lib._util import check_random_state + +__all__ = ['basinhopping'] + + +_params = (inspect.Parameter('res_new', kind=inspect.Parameter.KEYWORD_ONLY), + inspect.Parameter('res_old', kind=inspect.Parameter.KEYWORD_ONLY)) +_new_accept_test_signature = inspect.Signature(parameters=_params) + + +class Storage: + """ + Class used to store the lowest energy structure + """ + def __init__(self, minres): + self._add(minres) + + def _add(self, minres): + self.minres = minres + self.minres.x = np.copy(minres.x) + + def update(self, minres): + if minres.success and (minres.fun < self.minres.fun + or not self.minres.success): + self._add(minres) + return True + else: + return False + + def get_lowest(self): + return self.minres + + +class BasinHoppingRunner: + """This class implements the core of the basinhopping algorithm. + + x0 : ndarray + The starting coordinates. + minimizer : callable + The local minimizer, with signature ``result = minimizer(x)``. + The return value is an `optimize.OptimizeResult` object. + step_taking : callable + This function displaces the coordinates randomly. Signature should + be ``x_new = step_taking(x)``. Note that `x` may be modified in-place. + accept_tests : list of callables + Each test is passed the kwargs `f_new`, `x_new`, `f_old` and + `x_old`. These tests will be used to judge whether or not to accept + the step. The acceptable return values are True, False, or ``"force + accept"``. If any of the tests return False then the step is rejected. + If ``"force accept"``, then this will override any other tests in + order to accept the step. This can be used, for example, to forcefully + escape from a local minimum that ``basinhopping`` is trapped in. + disp : bool, optional + Display status messages. + + """ + def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False): + self.x = np.copy(x0) + self.minimizer = minimizer + self.step_taking = step_taking + self.accept_tests = accept_tests + self.disp = disp + + self.nstep = 0 + + # initialize return object + self.res = scipy.optimize.OptimizeResult() + self.res.minimization_failures = 0 + + # do initial minimization + minres = minimizer(self.x) + if not minres.success: + self.res.minimization_failures += 1 + if self.disp: + print("warning: basinhopping: local minimization failure") + self.x = np.copy(minres.x) + self.energy = minres.fun + self.incumbent_minres = minres # best minimize result found so far + if self.disp: + print("basinhopping step %d: f %g" % (self.nstep, self.energy)) + + # initialize storage class + self.storage = Storage(minres) + + if hasattr(minres, "nfev"): + self.res.nfev = minres.nfev + if hasattr(minres, "njev"): + self.res.njev = minres.njev + if hasattr(minres, "nhev"): + self.res.nhev = minres.nhev + + def _monte_carlo_step(self): + """Do one Monte Carlo iteration + + Randomly displace the coordinates, minimize, and decide whether + or not to accept the new coordinates. + """ + # Take a random step. Make a copy of x because the step_taking + # algorithm might change x in place + x_after_step = np.copy(self.x) + x_after_step = self.step_taking(x_after_step) + + # do a local minimization + minres = self.minimizer(x_after_step) + x_after_quench = minres.x + energy_after_quench = minres.fun + if not minres.success: + self.res.minimization_failures += 1 + if self.disp: + print("warning: basinhopping: local minimization failure") + if hasattr(minres, "nfev"): + self.res.nfev += minres.nfev + if hasattr(minres, "njev"): + self.res.njev += minres.njev + if hasattr(minres, "nhev"): + self.res.nhev += minres.nhev + + # accept the move based on self.accept_tests. If any test is False, + # then reject the step. If any test returns the special string + # 'force accept', then accept the step regardless. This can be used + # to forcefully escape from a local minimum if normal basin hopping + # steps are not sufficient. + accept = True + for test in self.accept_tests: + if inspect.signature(test) == _new_accept_test_signature: + testres = test(res_new=minres, res_old=self.incumbent_minres) + else: + testres = test(f_new=energy_after_quench, x_new=x_after_quench, + f_old=self.energy, x_old=self.x) + + if testres == 'force accept': + accept = True + break + elif testres is None: + raise ValueError("accept_tests must return True, False, or " + "'force accept'") + elif not testres: + accept = False + + # Report the result of the acceptance test to the take step class. + # This is for adaptive step taking + if hasattr(self.step_taking, "report"): + self.step_taking.report(accept, f_new=energy_after_quench, + x_new=x_after_quench, f_old=self.energy, + x_old=self.x) + + return accept, minres + + def one_cycle(self): + """Do one cycle of the basinhopping algorithm + """ + self.nstep += 1 + new_global_min = False + + accept, minres = self._monte_carlo_step() + + if accept: + self.energy = minres.fun + self.x = np.copy(minres.x) + self.incumbent_minres = minres # best minimize result found so far + new_global_min = self.storage.update(minres) + + # print some information + if self.disp: + self.print_report(minres.fun, accept) + if new_global_min: + print("found new global minimum on step %d with function" + " value %g" % (self.nstep, self.energy)) + + # save some variables as BasinHoppingRunner attributes + self.xtrial = minres.x + self.energy_trial = minres.fun + self.accept = accept + + return new_global_min + + def print_report(self, energy_trial, accept): + """print a status update""" + minres = self.storage.get_lowest() + print("basinhopping step %d: f %g trial_f %g accepted %d " + " lowest_f %g" % (self.nstep, self.energy, energy_trial, + accept, minres.fun)) + + +class AdaptiveStepsize: + """ + Class to implement adaptive stepsize. + + This class wraps the step taking class and modifies the stepsize to + ensure the true acceptance rate is as close as possible to the target. + + Parameters + ---------- + takestep : callable + The step taking routine. Must contain modifiable attribute + takestep.stepsize + accept_rate : float, optional + The target step acceptance rate + interval : int, optional + Interval for how often to update the stepsize + factor : float, optional + The step size is multiplied or divided by this factor upon each + update. + verbose : bool, optional + Print information about each update + + """ + def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9, + verbose=True): + self.takestep = takestep + self.target_accept_rate = accept_rate + self.interval = interval + self.factor = factor + self.verbose = verbose + + self.nstep = 0 + self.nstep_tot = 0 + self.naccept = 0 + + def __call__(self, x): + return self.take_step(x) + + def _adjust_step_size(self): + old_stepsize = self.takestep.stepsize + accept_rate = float(self.naccept) / self.nstep + if accept_rate > self.target_accept_rate: + # We're accepting too many steps. This generally means we're + # trapped in a basin. Take bigger steps. + self.takestep.stepsize /= self.factor + else: + # We're not accepting enough steps. Take smaller steps. + self.takestep.stepsize *= self.factor + if self.verbose: + print(f"adaptive stepsize: acceptance rate {accept_rate:f} target " + f"{self.target_accept_rate:f} new stepsize " + f"{self.takestep.stepsize:g} old stepsize {old_stepsize:g}") + + def take_step(self, x): + self.nstep += 1 + self.nstep_tot += 1 + if self.nstep % self.interval == 0: + self._adjust_step_size() + return self.takestep(x) + + def report(self, accept, **kwargs): + "called by basinhopping to report the result of the step" + if accept: + self.naccept += 1 + + +class RandomDisplacement: + """Add a random displacement of maximum size `stepsize` to each coordinate. + + Calling this updates `x` in-place. + + Parameters + ---------- + stepsize : float, optional + Maximum stepsize in any dimension + random_gen : {None, int, `numpy.random.Generator`, + `numpy.random.RandomState`}, optional + + If `seed` is None (or `np.random`), the `numpy.random.RandomState` + singleton is used. + If `seed` is an int, a new ``RandomState`` instance is used, + seeded with `seed`. + If `seed` is already a ``Generator`` or ``RandomState`` instance then + that instance is used. + + """ + + def __init__(self, stepsize=0.5, random_gen=None): + self.stepsize = stepsize + self.random_gen = check_random_state(random_gen) + + def __call__(self, x): + x += self.random_gen.uniform(-self.stepsize, self.stepsize, + np.shape(x)) + return x + + +class MinimizerWrapper: + """ + wrap a minimizer function as a minimizer class + """ + def __init__(self, minimizer, func=None, **kwargs): + self.minimizer = minimizer + self.func = func + self.kwargs = kwargs + + def __call__(self, x0): + if self.func is None: + return self.minimizer(x0, **self.kwargs) + else: + return self.minimizer(self.func, x0, **self.kwargs) + + +class Metropolis: + """Metropolis acceptance criterion. + + Parameters + ---------- + T : float + The "temperature" parameter for the accept or reject criterion. + random_gen : {None, int, `numpy.random.Generator`, + `numpy.random.RandomState`}, optional + + If `seed` is None (or `np.random`), the `numpy.random.RandomState` + singleton is used. + If `seed` is an int, a new ``RandomState`` instance is used, + seeded with `seed`. + If `seed` is already a ``Generator`` or ``RandomState`` instance then + that instance is used. + Random number generator used for acceptance test. + + """ + + def __init__(self, T, random_gen=None): + # Avoid ZeroDivisionError since "MBH can be regarded as a special case + # of the BH framework with the Metropolis criterion, where temperature + # T = 0." (Reject all steps that increase energy.) + self.beta = 1.0 / T if T != 0 else float('inf') + self.random_gen = check_random_state(random_gen) + + def accept_reject(self, res_new, res_old): + """ + Assuming the local search underlying res_new was successful: + If new energy is lower than old, it will always be accepted. + If new is higher than old, there is a chance it will be accepted, + less likely for larger differences. + """ + with np.errstate(invalid='ignore'): + # The energy values being fed to Metropolis are 1-length arrays, and if + # they are equal, their difference is 0, which gets multiplied by beta, + # which is inf, and array([0]) * float('inf') causes + # + # RuntimeWarning: invalid value encountered in multiply + # + # Ignore this warning so when the algorithm is on a flat plane, it always + # accepts the step, to try to move off the plane. + prod = -(res_new.fun - res_old.fun) * self.beta + w = math.exp(min(0, prod)) + + rand = self.random_gen.uniform() + return w >= rand and (res_new.success or not res_old.success) + + def __call__(self, *, res_new, res_old): + """ + f_new and f_old are mandatory in kwargs + """ + return bool(self.accept_reject(res_new, res_old)) + + +def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5, + minimizer_kwargs=None, take_step=None, accept_test=None, + callback=None, interval=50, disp=False, niter_success=None, + seed=None, *, target_accept_rate=0.5, stepwise_factor=0.9): + """Find the global minimum of a function using the basin-hopping algorithm. + + Basin-hopping is a two-phase method that combines a global stepping + algorithm with local minimization at each step. Designed to mimic + the natural process of energy minimization of clusters of atoms, it works + well for similar problems with "funnel-like, but rugged" energy landscapes + [5]_. + + As the step-taking, step acceptance, and minimization methods are all + customizable, this function can also be used to implement other two-phase + methods. + + Parameters + ---------- + func : callable ``f(x, *args)`` + Function to be optimized. ``args`` can be passed as an optional item + in the dict `minimizer_kwargs` + x0 : array_like + Initial guess. + niter : integer, optional + The number of basin-hopping iterations. There will be a total of + ``niter + 1`` runs of the local minimizer. + T : float, optional + The "temperature" parameter for the acceptance or rejection criterion. + Higher "temperatures" mean that larger jumps in function value will be + accepted. For best results `T` should be comparable to the + separation (in function value) between local minima. + stepsize : float, optional + Maximum step size for use in the random displacement. + minimizer_kwargs : dict, optional + Extra keyword arguments to be passed to the local minimizer + `scipy.optimize.minimize` Some important options could be: + + method : str + The minimization method (e.g. ``"L-BFGS-B"``) + args : tuple + Extra arguments passed to the objective function (`func`) and + its derivatives (Jacobian, Hessian). + + take_step : callable ``take_step(x)``, optional + Replace the default step-taking routine with this routine. The default + step-taking routine is a random displacement of the coordinates, but + other step-taking algorithms may be better for some systems. + `take_step` can optionally have the attribute ``take_step.stepsize``. + If this attribute exists, then `basinhopping` will adjust + ``take_step.stepsize`` in order to try to optimize the global minimum + search. + accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional + Define a test which will be used to judge whether to accept the + step. This will be used in addition to the Metropolis test based on + "temperature" `T`. The acceptable return values are True, + False, or ``"force accept"``. If any of the tests return False + then the step is rejected. If the latter, then this will override any + other tests in order to accept the step. This can be used, for example, + to forcefully escape from a local minimum that `basinhopping` is + trapped in. + callback : callable, ``callback(x, f, accept)``, optional + A callback function which will be called for all minima found. ``x`` + and ``f`` are the coordinates and function value of the trial minimum, + and ``accept`` is whether that minimum was accepted. This can + be used, for example, to save the lowest N minima found. Also, + `callback` can be used to specify a user defined stop criterion by + optionally returning True to stop the `basinhopping` routine. + interval : integer, optional + interval for how often to update the `stepsize` + disp : bool, optional + Set to True to print status messages + niter_success : integer, optional + Stop the run if the global minimum candidate remains the same for this + number of iterations. + seed : {None, int, `numpy.random.Generator`, `numpy.random.RandomState`}, optional + + If `seed` is None (or `np.random`), the `numpy.random.RandomState` + singleton is used. + If `seed` is an int, a new ``RandomState`` instance is used, + seeded with `seed`. + If `seed` is already a ``Generator`` or ``RandomState`` instance then + that instance is used. + Specify `seed` for repeatable minimizations. The random numbers + generated with this seed only affect the default Metropolis + `accept_test` and the default `take_step`. If you supply your own + `take_step` and `accept_test`, and these functions use random + number generation, then those functions are responsible for the state + of their random number generator. + target_accept_rate : float, optional + The target acceptance rate that is used to adjust the `stepsize`. + If the current acceptance rate is greater than the target, + then the `stepsize` is increased. Otherwise, it is decreased. + Range is (0, 1). Default is 0.5. + + .. versionadded:: 1.8.0 + + stepwise_factor : float, optional + The `stepsize` is multiplied or divided by this stepwise factor upon + each update. Range is (0, 1). Default is 0.9. + + .. versionadded:: 1.8.0 + + Returns + ------- + res : OptimizeResult + The optimization result represented as a `OptimizeResult` object. + Important attributes are: ``x`` the solution array, ``fun`` the value + of the function at the solution, and ``message`` which describes the + cause of the termination. The ``OptimizeResult`` object returned by the + selected minimizer at the lowest minimum is also contained within this + object and can be accessed through the ``lowest_optimization_result`` + attribute. See `OptimizeResult` for a description of other attributes. + + See Also + -------- + minimize : + The local minimization function called once for each basinhopping step. + `minimizer_kwargs` is passed to this routine. + + Notes + ----- + Basin-hopping is a stochastic algorithm which attempts to find the global + minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_ + [4]_. The algorithm in its current form was described by David Wales and + Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/. + + The algorithm is iterative with each cycle composed of the following + features + + 1) random perturbation of the coordinates + + 2) local minimization + + 3) accept or reject the new coordinates based on the minimized function + value + + The acceptance test used here is the Metropolis criterion of standard Monte + Carlo algorithms, although there are many other possibilities [3]_. + + This global minimization method has been shown to be extremely efficient + for a wide variety of problems in physics and chemistry. It is + particularly useful when the function has many minima separated by large + barriers. See the `Cambridge Cluster Database + `_ for databases of molecular + systems that have been optimized primarily using basin-hopping. This + database includes minimization problems exceeding 300 degrees of freedom. + + See the free software program `GMIN `_ + for a Fortran implementation of basin-hopping. This implementation has many + variations of the procedure described above, including more + advanced step taking algorithms and alternate acceptance criterion. + + For stochastic global optimization there is no way to determine if the true + global minimum has actually been found. Instead, as a consistency check, + the algorithm can be run from a number of different random starting points + to ensure the lowest minimum found in each example has converged to the + global minimum. For this reason, `basinhopping` will by default simply + run for the number of iterations `niter` and return the lowest minimum + found. It is left to the user to ensure that this is in fact the global + minimum. + + Choosing `stepsize`: This is a crucial parameter in `basinhopping` and + depends on the problem being solved. The step is chosen uniformly in the + region from x0-stepsize to x0+stepsize, in each dimension. Ideally, it + should be comparable to the typical separation (in argument values) between + local minima of the function being optimized. `basinhopping` will, by + default, adjust `stepsize` to find an optimal value, but this may take + many iterations. You will get quicker results if you set a sensible + initial value for ``stepsize``. + + Choosing `T`: The parameter `T` is the "temperature" used in the + Metropolis criterion. Basinhopping steps are always accepted if + ``func(xnew) < func(xold)``. Otherwise, they are accepted with + probability:: + + exp( -(func(xnew) - func(xold)) / T ) + + So, for best results, `T` should to be comparable to the typical + difference (in function values) between local minima. (The height of + "walls" between local minima is irrelevant.) + + If `T` is 0, the algorithm becomes Monotonic Basin-Hopping, in which all + steps that increase energy are rejected. + + .. versionadded:: 0.12.0 + + References + ---------- + .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press, + Cambridge, UK. + .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and + the Lowest Energy Structures of Lennard-Jones Clusters Containing up to + 110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111. + .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the + multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA, + 1987, 84, 6611. + .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters, + crystals, and biomolecules, Science, 1999, 285, 1368. + .. [5] Olson, B., Hashmi, I., Molloy, K., and Shehu1, A., Basin Hopping as + a General and Versatile Optimization Framework for the Characterization + of Biological Macromolecules, Advances in Artificial Intelligence, + Volume 2012 (2012), Article ID 674832, :doi:`10.1155/2012/674832` + + Examples + -------- + The following example is a 1-D minimization problem, with many + local minima superimposed on a parabola. + + >>> import numpy as np + >>> from scipy.optimize import basinhopping + >>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x + >>> x0 = [1.] + + Basinhopping, internally, uses a local minimization algorithm. We will use + the parameter `minimizer_kwargs` to tell basinhopping which algorithm to + use and how to set up that minimizer. This parameter will be passed to + `scipy.optimize.minimize`. + + >>> minimizer_kwargs = {"method": "BFGS"} + >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs, + ... niter=200) + >>> # the global minimum is: + >>> ret.x, ret.fun + -0.1951, -1.0009 + + Next consider a 2-D minimization problem. Also, this time, we + will use gradient information to significantly speed up the search. + + >>> def func2d(x): + ... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + + ... 0.2) * x[0] + ... df = np.zeros(2) + ... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + ... df[1] = 2. * x[1] + 0.2 + ... return f, df + + We'll also use a different local minimization algorithm. Also, we must tell + the minimizer that our function returns both energy and gradient (Jacobian). + + >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True} + >>> x0 = [1.0, 1.0] + >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, + ... niter=200) + >>> print("global minimum: x = [%.4f, %.4f], f(x) = %.4f" % (ret.x[0], + ... ret.x[1], + ... ret.fun)) + global minimum: x = [-0.1951, -0.1000], f(x) = -1.0109 + + Here is an example using a custom step-taking routine. Imagine you want + the first coordinate to take larger steps than the rest of the coordinates. + This can be implemented like so: + + >>> class MyTakeStep: + ... def __init__(self, stepsize=0.5): + ... self.stepsize = stepsize + ... self.rng = np.random.default_rng() + ... def __call__(self, x): + ... s = self.stepsize + ... x[0] += self.rng.uniform(-2.*s, 2.*s) + ... x[1:] += self.rng.uniform(-s, s, x[1:].shape) + ... return x + + Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude + of `stepsize` to optimize the search. We'll use the same 2-D function as + before + + >>> mytakestep = MyTakeStep() + >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, + ... niter=200, take_step=mytakestep) + >>> print("global minimum: x = [%.4f, %.4f], f(x) = %.4f" % (ret.x[0], + ... ret.x[1], + ... ret.fun)) + global minimum: x = [-0.1951, -0.1000], f(x) = -1.0109 + + Now, let's do an example using a custom callback function which prints the + value of every minimum found + + >>> def print_fun(x, f, accepted): + ... print("at minimum %.4f accepted %d" % (f, int(accepted))) + + We'll run it for only 10 basinhopping steps this time. + + >>> rng = np.random.default_rng() + >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, + ... niter=10, callback=print_fun, seed=rng) + at minimum 0.4159 accepted 1 + at minimum -0.4317 accepted 1 + at minimum -1.0109 accepted 1 + at minimum -0.9073 accepted 1 + at minimum -0.4317 accepted 0 + at minimum -0.1021 accepted 1 + at minimum -0.7425 accepted 1 + at minimum -0.9073 accepted 1 + at minimum -0.4317 accepted 0 + at minimum -0.7425 accepted 1 + at minimum -0.9073 accepted 1 + + The minimum at -1.0109 is actually the global minimum, found already on the + 8th iteration. + + """ # numpy/numpydoc#87 # noqa: E501 + if target_accept_rate <= 0. or target_accept_rate >= 1.: + raise ValueError('target_accept_rate has to be in range (0, 1)') + if stepwise_factor <= 0. or stepwise_factor >= 1.: + raise ValueError('stepwise_factor has to be in range (0, 1)') + + x0 = np.array(x0) + + # set up the np.random generator + rng = check_random_state(seed) + + # set up minimizer + if minimizer_kwargs is None: + minimizer_kwargs = dict() + wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func, + **minimizer_kwargs) + + # set up step-taking algorithm + if take_step is not None: + if not callable(take_step): + raise TypeError("take_step must be callable") + # if take_step.stepsize exists then use AdaptiveStepsize to control + # take_step.stepsize + if hasattr(take_step, "stepsize"): + take_step_wrapped = AdaptiveStepsize( + take_step, interval=interval, + accept_rate=target_accept_rate, + factor=stepwise_factor, + verbose=disp) + else: + take_step_wrapped = take_step + else: + # use default + displace = RandomDisplacement(stepsize=stepsize, random_gen=rng) + take_step_wrapped = AdaptiveStepsize(displace, interval=interval, + accept_rate=target_accept_rate, + factor=stepwise_factor, + verbose=disp) + + # set up accept tests + accept_tests = [] + if accept_test is not None: + if not callable(accept_test): + raise TypeError("accept_test must be callable") + accept_tests = [accept_test] + + # use default + metropolis = Metropolis(T, random_gen=rng) + accept_tests.append(metropolis) + + if niter_success is None: + niter_success = niter + 2 + + bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped, + accept_tests, disp=disp) + + # The wrapped minimizer is called once during construction of + # BasinHoppingRunner, so run the callback + if callable(callback): + callback(bh.storage.minres.x, bh.storage.minres.fun, True) + + # start main iteration loop + count, i = 0, 0 + message = ["requested number of basinhopping iterations completed" + " successfully"] + for i in range(niter): + new_global_min = bh.one_cycle() + + if callable(callback): + # should we pass a copy of x? + val = callback(bh.xtrial, bh.energy_trial, bh.accept) + if val is not None: + if val: + message = ["callback function requested stop early by" + "returning True"] + break + + count += 1 + if new_global_min: + count = 0 + elif count > niter_success: + message = ["success condition satisfied"] + break + + # prepare return object + res = bh.res + res.lowest_optimization_result = bh.storage.get_lowest() + res.x = np.copy(res.lowest_optimization_result.x) + res.fun = res.lowest_optimization_result.fun + res.message = message + res.nit = i + 1 + res.success = res.lowest_optimization_result.success + return res diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_cobyqa_py.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_cobyqa_py.py new file mode 100644 index 0000000000000000000000000000000000000000..4928fca9c162fe1117451e3325e6a870b5933a6f --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_cobyqa_py.py @@ -0,0 +1,62 @@ +import numpy as np + +from ._optimize import _check_unknown_options + + +def _minimize_cobyqa(fun, x0, args=(), bounds=None, constraints=(), + callback=None, disp=False, maxfev=None, maxiter=None, + f_target=-np.inf, feasibility_tol=1e-8, + initial_tr_radius=1.0, final_tr_radius=1e-6, scale=False, + **unknown_options): + """ + Minimize a scalar function of one or more variables using the + Constrained Optimization BY Quadratic Approximations (COBYQA) algorithm [1]_. + + .. versionadded:: 1.14.0 + + Options + ------- + disp : bool + Set to True to print information about the optimization procedure. + maxfev : int + Maximum number of function evaluations. + maxiter : int + Maximum number of iterations. + f_target : float + Target value for the objective function. The optimization procedure is + terminated when the objective function value of a feasible point (see + `feasibility_tol` below) is less than or equal to this target. + feasibility_tol : float + Absolute tolerance for the constraint violation. + initial_tr_radius : float + Initial trust-region radius. Typically, this value should be in the + order of one tenth of the greatest expected change to the variables. + final_tr_radius : float + Final trust-region radius. It should indicate the accuracy required in + the final values of the variables. If provided, this option overrides + the value of `tol` in the `minimize` function. + scale : bool + Set to True to scale the variables according to the bounds. If True and + if all the lower and upper bounds are finite, the variables are scaled + to be within the range :math:`[-1, 1]`. If any of the lower or upper + bounds is infinite, the variables are not scaled. + + References + ---------- + .. [1] COBYQA + https://www.cobyqa.com/stable/ + """ + from .._lib.cobyqa import minimize # import here to avoid circular imports + + _check_unknown_options(unknown_options) + options = { + 'disp': bool(disp), + 'maxfev': int(maxfev) if maxfev is not None else 500 * len(x0), + 'maxiter': int(maxiter) if maxiter is not None else 1000 * len(x0), + 'target': float(f_target), + 'feasibility_tol': float(feasibility_tol), + 'radius_init': float(initial_tr_radius), + 'radius_final': float(final_tr_radius), + 'scale': bool(scale), + } + return minimize(fun, x0, args, bounds, constraints, callback, options) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_dcsrch.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_dcsrch.py new file mode 100644 index 0000000000000000000000000000000000000000..f8b4df4763ba4f699869431a0b6528383c2f0328 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_dcsrch.py @@ -0,0 +1,728 @@ +import numpy as np + +""" +# 2023 - ported from minpack2.dcsrch, dcstep (Fortran) to Python +c MINPACK-1 Project. June 1983. +c Argonne National Laboratory. +c Jorge J. More' and David J. Thuente. +c +c MINPACK-2 Project. November 1993. +c Argonne National Laboratory and University of Minnesota. +c Brett M. Averick, Richard G. Carter, and Jorge J. More'. +""" + +# NOTE this file was linted by black on first commit, and can be kept that way. + + +class DCSRCH: + """ + Parameters + ---------- + phi : callable phi(alpha) + Function at point `alpha` + derphi : callable phi'(alpha) + Objective function derivative. Returns a scalar. + ftol : float + A nonnegative tolerance for the sufficient decrease condition. + gtol : float + A nonnegative tolerance for the curvature condition. + xtol : float + A nonnegative relative tolerance for an acceptable step. The + subroutine exits with a warning if the relative difference between + sty and stx is less than xtol. + stpmin : float + A nonnegative lower bound for the step. + stpmax : + A nonnegative upper bound for the step. + + Notes + ----- + + This subroutine finds a step that satisfies a sufficient + decrease condition and a curvature condition. + + Each call of the subroutine updates an interval with + endpoints stx and sty. The interval is initially chosen + so that it contains a minimizer of the modified function + + psi(stp) = f(stp) - f(0) - ftol*stp*f'(0). + + If psi(stp) <= 0 and f'(stp) >= 0 for some step, then the + interval is chosen so that it contains a minimizer of f. + + The algorithm is designed to find a step that satisfies + the sufficient decrease condition + + f(stp) <= f(0) + ftol*stp*f'(0), + + and the curvature condition + + abs(f'(stp)) <= gtol*abs(f'(0)). + + If ftol is less than gtol and if, for example, the function + is bounded below, then there is always a step which satisfies + both conditions. + + If no step can be found that satisfies both conditions, then + the algorithm stops with a warning. In this case stp only + satisfies the sufficient decrease condition. + + A typical invocation of dcsrch has the following outline: + + Evaluate the function at stp = 0.0d0; store in f. + Evaluate the gradient at stp = 0.0d0; store in g. + Choose a starting step stp. + + task = 'START' + 10 continue + call dcsrch(stp,f,g,ftol,gtol,xtol,task,stpmin,stpmax, + isave,dsave) + if (task .eq. 'FG') then + Evaluate the function and the gradient at stp + go to 10 + end if + + NOTE: The user must not alter work arrays between calls. + + The subroutine statement is + + subroutine dcsrch(f,g,stp,ftol,gtol,xtol,stpmin,stpmax, + task,isave,dsave) + where + + stp is a double precision variable. + On entry stp is the current estimate of a satisfactory + step. On initial entry, a positive initial estimate + must be provided. + On exit stp is the current estimate of a satisfactory step + if task = 'FG'. If task = 'CONV' then stp satisfies + the sufficient decrease and curvature condition. + + f is a double precision variable. + On initial entry f is the value of the function at 0. + On subsequent entries f is the value of the + function at stp. + On exit f is the value of the function at stp. + + g is a double precision variable. + On initial entry g is the derivative of the function at 0. + On subsequent entries g is the derivative of the + function at stp. + On exit g is the derivative of the function at stp. + + ftol is a double precision variable. + On entry ftol specifies a nonnegative tolerance for the + sufficient decrease condition. + On exit ftol is unchanged. + + gtol is a double precision variable. + On entry gtol specifies a nonnegative tolerance for the + curvature condition. + On exit gtol is unchanged. + + xtol is a double precision variable. + On entry xtol specifies a nonnegative relative tolerance + for an acceptable step. The subroutine exits with a + warning if the relative difference between sty and stx + is less than xtol. + + On exit xtol is unchanged. + + task is a character variable of length at least 60. + On initial entry task must be set to 'START'. + On exit task indicates the required action: + + If task(1:2) = 'FG' then evaluate the function and + derivative at stp and call dcsrch again. + + If task(1:4) = 'CONV' then the search is successful. + + If task(1:4) = 'WARN' then the subroutine is not able + to satisfy the convergence conditions. The exit value of + stp contains the best point found during the search. + + If task(1:5) = 'ERROR' then there is an error in the + input arguments. + + On exit with convergence, a warning or an error, the + variable task contains additional information. + + stpmin is a double precision variable. + On entry stpmin is a nonnegative lower bound for the step. + On exit stpmin is unchanged. + + stpmax is a double precision variable. + On entry stpmax is a nonnegative upper bound for the step. + On exit stpmax is unchanged. + + isave is an integer work array of dimension 2. + + dsave is a double precision work array of dimension 13. + + Subprograms called + + MINPACK-2 ... dcstep + MINPACK-1 Project. June 1983. + Argonne National Laboratory. + Jorge J. More' and David J. Thuente. + + MINPACK-2 Project. November 1993. + Argonne National Laboratory and University of Minnesota. + Brett M. Averick, Richard G. Carter, and Jorge J. More'. + """ + + def __init__(self, phi, derphi, ftol, gtol, xtol, stpmin, stpmax): + self.stage = None + self.ginit = None + self.gtest = None + self.gx = None + self.gy = None + self.finit = None + self.fx = None + self.fy = None + self.stx = None + self.sty = None + self.stmin = None + self.stmax = None + self.width = None + self.width1 = None + + # leave all assessment of tolerances/limits to the first call of + # this object + self.ftol = ftol + self.gtol = gtol + self.xtol = xtol + self.stpmin = stpmin + self.stpmax = stpmax + + self.phi = phi + self.derphi = derphi + + def __call__(self, alpha1, phi0=None, derphi0=None, maxiter=100): + """ + Parameters + ---------- + alpha1 : float + alpha1 is the current estimate of a satisfactory + step. A positive initial estimate must be provided. + phi0 : float + the value of `phi` at 0 (if known). + derphi0 : float + the derivative of `derphi` at 0 (if known). + maxiter : int + + Returns + ------- + alpha : float + Step size, or None if no suitable step was found. + phi : float + Value of `phi` at the new point `alpha`. + phi0 : float + Value of `phi` at `alpha=0`. + task : bytes + On exit task indicates status information. + + If task[:4] == b'CONV' then the search is successful. + + If task[:4] == b'WARN' then the subroutine is not able + to satisfy the convergence conditions. The exit value of + stp contains the best point found during the search. + + If task[:5] == b'ERROR' then there is an error in the + input arguments. + """ + if phi0 is None: + phi0 = self.phi(0.0) + if derphi0 is None: + derphi0 = self.derphi(0.0) + + phi1 = phi0 + derphi1 = derphi0 + + task = b"START" + for i in range(maxiter): + stp, phi1, derphi1, task = self._iterate( + alpha1, phi1, derphi1, task + ) + + if not np.isfinite(stp): + task = b"WARN" + stp = None + break + + if task[:2] == b"FG": + alpha1 = stp + phi1 = self.phi(stp) + derphi1 = self.derphi(stp) + else: + break + else: + # maxiter reached, the line search did not converge + stp = None + task = b"WARNING: dcsrch did not converge within max iterations" + + if task[:5] == b"ERROR" or task[:4] == b"WARN": + stp = None # failed + + return stp, phi1, phi0, task + + def _iterate(self, stp, f, g, task): + """ + Parameters + ---------- + stp : float + The current estimate of a satisfactory step. On initial entry, a + positive initial estimate must be provided. + f : float + On first call f is the value of the function at 0. On subsequent + entries f should be the value of the function at stp. + g : float + On initial entry g is the derivative of the function at 0. On + subsequent entries g is the derivative of the function at stp. + task : bytes + On initial entry task must be set to 'START'. + + On exit with convergence, a warning or an error, the + variable task contains additional information. + + + Returns + ------- + stp, f, g, task: tuple + + stp : float + the current estimate of a satisfactory step if task = 'FG'. If + task = 'CONV' then stp satisfies the sufficient decrease and + curvature condition. + f : float + the value of the function at stp. + g : float + the derivative of the function at stp. + task : bytes + On exit task indicates the required action: + + If task(1:2) == b'FG' then evaluate the function and + derivative at stp and call dcsrch again. + + If task(1:4) == b'CONV' then the search is successful. + + If task(1:4) == b'WARN' then the subroutine is not able + to satisfy the convergence conditions. The exit value of + stp contains the best point found during the search. + + If task(1:5) == b'ERROR' then there is an error in the + input arguments. + """ + p5 = 0.5 + p66 = 0.66 + xtrapl = 1.1 + xtrapu = 4.0 + + if task[:5] == b"START": + if stp < self.stpmin: + task = b"ERROR: STP .LT. STPMIN" + if stp > self.stpmax: + task = b"ERROR: STP .GT. STPMAX" + if g >= 0: + task = b"ERROR: INITIAL G .GE. ZERO" + if self.ftol < 0: + task = b"ERROR: FTOL .LT. ZERO" + if self.gtol < 0: + task = b"ERROR: GTOL .LT. ZERO" + if self.xtol < 0: + task = b"ERROR: XTOL .LT. ZERO" + if self.stpmin < 0: + task = b"ERROR: STPMIN .LT. ZERO" + if self.stpmax < self.stpmin: + task = b"ERROR: STPMAX .LT. STPMIN" + + if task[:5] == b"ERROR": + return stp, f, g, task + + # Initialize local variables. + + self.brackt = False + self.stage = 1 + self.finit = f + self.ginit = g + self.gtest = self.ftol * self.ginit + self.width = self.stpmax - self.stpmin + self.width1 = self.width / p5 + + # The variables stx, fx, gx contain the values of the step, + # function, and derivative at the best step. + # The variables sty, fy, gy contain the value of the step, + # function, and derivative at sty. + # The variables stp, f, g contain the values of the step, + # function, and derivative at stp. + + self.stx = 0.0 + self.fx = self.finit + self.gx = self.ginit + self.sty = 0.0 + self.fy = self.finit + self.gy = self.ginit + self.stmin = 0 + self.stmax = stp + xtrapu * stp + task = b"FG" + return stp, f, g, task + + # in the original Fortran this was a location to restore variables + # we don't need to do that because they're attributes. + + # If psi(stp) <= 0 and f'(stp) >= 0 for some step, then the + # algorithm enters the second stage. + ftest = self.finit + stp * self.gtest + + if self.stage == 1 and f <= ftest and g >= 0: + self.stage = 2 + + # test for warnings + if self.brackt and (stp <= self.stmin or stp >= self.stmax): + task = b"WARNING: ROUNDING ERRORS PREVENT PROGRESS" + if self.brackt and self.stmax - self.stmin <= self.xtol * self.stmax: + task = b"WARNING: XTOL TEST SATISFIED" + if stp == self.stpmax and f <= ftest and g <= self.gtest: + task = b"WARNING: STP = STPMAX" + if stp == self.stpmin and (f > ftest or g >= self.gtest): + task = b"WARNING: STP = STPMIN" + + # test for convergence + if f <= ftest and abs(g) <= self.gtol * -self.ginit: + task = b"CONVERGENCE" + + # test for termination + if task[:4] == b"WARN" or task[:4] == b"CONV": + return stp, f, g, task + + # A modified function is used to predict the step during the + # first stage if a lower function value has been obtained but + # the decrease is not sufficient. + if self.stage == 1 and f <= self.fx and f > ftest: + # Define the modified function and derivative values. + fm = f - stp * self.gtest + fxm = self.fx - self.stx * self.gtest + fym = self.fy - self.sty * self.gtest + gm = g - self.gtest + gxm = self.gx - self.gtest + gym = self.gy - self.gtest + + # Call dcstep to update stx, sty, and to compute the new step. + # dcstep can have several operations which can produce NaN + # e.g. inf/inf. Filter these out. + with np.errstate(invalid="ignore", over="ignore"): + tup = dcstep( + self.stx, + fxm, + gxm, + self.sty, + fym, + gym, + stp, + fm, + gm, + self.brackt, + self.stmin, + self.stmax, + ) + self.stx, fxm, gxm, self.sty, fym, gym, stp, self.brackt = tup + + # Reset the function and derivative values for f + self.fx = fxm + self.stx * self.gtest + self.fy = fym + self.sty * self.gtest + self.gx = gxm + self.gtest + self.gy = gym + self.gtest + + else: + # Call dcstep to update stx, sty, and to compute the new step. + # dcstep can have several operations which can produce NaN + # e.g. inf/inf. Filter these out. + + with np.errstate(invalid="ignore", over="ignore"): + tup = dcstep( + self.stx, + self.fx, + self.gx, + self.sty, + self.fy, + self.gy, + stp, + f, + g, + self.brackt, + self.stmin, + self.stmax, + ) + ( + self.stx, + self.fx, + self.gx, + self.sty, + self.fy, + self.gy, + stp, + self.brackt, + ) = tup + + # Decide if a bisection step is needed + if self.brackt: + if abs(self.sty - self.stx) >= p66 * self.width1: + stp = self.stx + p5 * (self.sty - self.stx) + self.width1 = self.width + self.width = abs(self.sty - self.stx) + + # Set the minimum and maximum steps allowed for stp. + if self.brackt: + self.stmin = min(self.stx, self.sty) + self.stmax = max(self.stx, self.sty) + else: + self.stmin = stp + xtrapl * (stp - self.stx) + self.stmax = stp + xtrapu * (stp - self.stx) + + # Force the step to be within the bounds stpmax and stpmin. + stp = np.clip(stp, self.stpmin, self.stpmax) + + # If further progress is not possible, let stp be the best + # point obtained during the search. + if ( + self.brackt + and (stp <= self.stmin or stp >= self.stmax) + or ( + self.brackt + and self.stmax - self.stmin <= self.xtol * self.stmax + ) + ): + stp = self.stx + + # Obtain another function and derivative + task = b"FG" + return stp, f, g, task + + +def dcstep(stx, fx, dx, sty, fy, dy, stp, fp, dp, brackt, stpmin, stpmax): + """ + Subroutine dcstep + + This subroutine computes a safeguarded step for a search + procedure and updates an interval that contains a step that + satisfies a sufficient decrease and a curvature condition. + + The parameter stx contains the step with the least function + value. If brackt is set to .true. then a minimizer has + been bracketed in an interval with endpoints stx and sty. + The parameter stp contains the current step. + The subroutine assumes that if brackt is set to .true. then + + min(stx,sty) < stp < max(stx,sty), + + and that the derivative at stx is negative in the direction + of the step. + + The subroutine statement is + + subroutine dcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,brackt, + stpmin,stpmax) + + where + + stx is a double precision variable. + On entry stx is the best step obtained so far and is an + endpoint of the interval that contains the minimizer. + On exit stx is the updated best step. + + fx is a double precision variable. + On entry fx is the function at stx. + On exit fx is the function at stx. + + dx is a double precision variable. + On entry dx is the derivative of the function at + stx. The derivative must be negative in the direction of + the step, that is, dx and stp - stx must have opposite + signs. + On exit dx is the derivative of the function at stx. + + sty is a double precision variable. + On entry sty is the second endpoint of the interval that + contains the minimizer. + On exit sty is the updated endpoint of the interval that + contains the minimizer. + + fy is a double precision variable. + On entry fy is the function at sty. + On exit fy is the function at sty. + + dy is a double precision variable. + On entry dy is the derivative of the function at sty. + On exit dy is the derivative of the function at the exit sty. + + stp is a double precision variable. + On entry stp is the current step. If brackt is set to .true. + then on input stp must be between stx and sty. + On exit stp is a new trial step. + + fp is a double precision variable. + On entry fp is the function at stp + On exit fp is unchanged. + + dp is a double precision variable. + On entry dp is the derivative of the function at stp. + On exit dp is unchanged. + + brackt is an logical variable. + On entry brackt specifies if a minimizer has been bracketed. + Initially brackt must be set to .false. + On exit brackt specifies if a minimizer has been bracketed. + When a minimizer is bracketed brackt is set to .true. + + stpmin is a double precision variable. + On entry stpmin is a lower bound for the step. + On exit stpmin is unchanged. + + stpmax is a double precision variable. + On entry stpmax is an upper bound for the step. + On exit stpmax is unchanged. + + MINPACK-1 Project. June 1983 + Argonne National Laboratory. + Jorge J. More' and David J. Thuente. + + MINPACK-2 Project. November 1993. + Argonne National Laboratory and University of Minnesota. + Brett M. Averick and Jorge J. More'. + + """ + sgn_dp = np.sign(dp) + sgn_dx = np.sign(dx) + + # sgnd = dp * (dx / abs(dx)) + sgnd = sgn_dp * sgn_dx + + # First case: A higher function value. The minimum is bracketed. + # If the cubic step is closer to stx than the quadratic step, the + # cubic step is taken, otherwise the average of the cubic and + # quadratic steps is taken. + if fp > fx: + theta = 3.0 * (fx - fp) / (stp - stx) + dx + dp + s = max(abs(theta), abs(dx), abs(dp)) + gamma = s * np.sqrt((theta / s) ** 2 - (dx / s) * (dp / s)) + if stp < stx: + gamma *= -1 + p = (gamma - dx) + theta + q = ((gamma - dx) + gamma) + dp + r = p / q + stpc = stx + r * (stp - stx) + stpq = stx + ((dx / ((fx - fp) / (stp - stx) + dx)) / 2.0) * (stp - stx) + if abs(stpc - stx) <= abs(stpq - stx): + stpf = stpc + else: + stpf = stpc + (stpq - stpc) / 2.0 + brackt = True + elif sgnd < 0.0: + # Second case: A lower function value and derivatives of opposite + # sign. The minimum is bracketed. If the cubic step is farther from + # stp than the secant step, the cubic step is taken, otherwise the + # secant step is taken. + theta = 3 * (fx - fp) / (stp - stx) + dx + dp + s = max(abs(theta), abs(dx), abs(dp)) + gamma = s * np.sqrt((theta / s) ** 2 - (dx / s) * (dp / s)) + if stp > stx: + gamma *= -1 + p = (gamma - dp) + theta + q = ((gamma - dp) + gamma) + dx + r = p / q + stpc = stp + r * (stx - stp) + stpq = stp + (dp / (dp - dx)) * (stx - stp) + if abs(stpc - stp) > abs(stpq - stp): + stpf = stpc + else: + stpf = stpq + brackt = True + elif abs(dp) < abs(dx): + # Third case: A lower function value, derivatives of the same sign, + # and the magnitude of the derivative decreases. + + # The cubic step is computed only if the cubic tends to infinity + # in the direction of the step or if the minimum of the cubic + # is beyond stp. Otherwise the cubic step is defined to be the + # secant step. + theta = 3 * (fx - fp) / (stp - stx) + dx + dp + s = max(abs(theta), abs(dx), abs(dp)) + + # The case gamma = 0 only arises if the cubic does not tend + # to infinity in the direction of the step. + gamma = s * np.sqrt(max(0, (theta / s) ** 2 - (dx / s) * (dp / s))) + if stp > stx: + gamma = -gamma + p = (gamma - dp) + theta + q = (gamma + (dx - dp)) + gamma + r = p / q + if r < 0 and gamma != 0: + stpc = stp + r * (stx - stp) + elif stp > stx: + stpc = stpmax + else: + stpc = stpmin + stpq = stp + (dp / (dp - dx)) * (stx - stp) + + if brackt: + # A minimizer has been bracketed. If the cubic step is + # closer to stp than the secant step, the cubic step is + # taken, otherwise the secant step is taken. + if abs(stpc - stp) < abs(stpq - stp): + stpf = stpc + else: + stpf = stpq + + if stp > stx: + stpf = min(stp + 0.66 * (sty - stp), stpf) + else: + stpf = max(stp + 0.66 * (sty - stp), stpf) + else: + # A minimizer has not been bracketed. If the cubic step is + # farther from stp than the secant step, the cubic step is + # taken, otherwise the secant step is taken. + if abs(stpc - stp) > abs(stpq - stp): + stpf = stpc + else: + stpf = stpq + stpf = np.clip(stpf, stpmin, stpmax) + + else: + # Fourth case: A lower function value, derivatives of the same sign, + # and the magnitude of the derivative does not decrease. If the + # minimum is not bracketed, the step is either stpmin or stpmax, + # otherwise the cubic step is taken. + if brackt: + theta = 3.0 * (fp - fy) / (sty - stp) + dy + dp + s = max(abs(theta), abs(dy), abs(dp)) + gamma = s * np.sqrt((theta / s) ** 2 - (dy / s) * (dp / s)) + if stp > sty: + gamma = -gamma + p = (gamma - dp) + theta + q = ((gamma - dp) + gamma) + dy + r = p / q + stpc = stp + r * (sty - stp) + stpf = stpc + elif stp > stx: + stpf = stpmax + else: + stpf = stpmin + + # Update the interval which contains a minimizer. + if fp > fx: + sty = stp + fy = fp + dy = dp + else: + if sgnd < 0: + sty = stx + fy = fx + dy = dx + stx = stp + fx = fp + dx = dp + + # Compute the new step. + stp = stpf + + return stx, fx, dx, sty, fy, dy, stp, brackt diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_differentiate.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_differentiate.py new file mode 100644 index 0000000000000000000000000000000000000000..959c17e3ffaedf960ebffe7984aa25aa32d9eb6c --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_differentiate.py @@ -0,0 +1,856 @@ +# mypy: disable-error-code="attr-defined" +import numpy as np +import scipy._lib._elementwise_iterative_method as eim +from scipy._lib._util import _RichResult + +_EERRORINCREASE = -1 # used in _differentiate + +def _differentiate_iv(func, x, args, atol, rtol, maxiter, order, initial_step, + step_factor, step_direction, preserve_shape, callback): + # Input validation for `_differentiate` + + if not callable(func): + raise ValueError('`func` must be callable.') + + # x has more complex IV that is taken care of during initialization + x = np.asarray(x) + dtype = x.dtype if np.issubdtype(x.dtype, np.inexact) else np.float64 + + if not np.iterable(args): + args = (args,) + + if atol is None: + atol = np.finfo(dtype).tiny + + if rtol is None: + rtol = np.sqrt(np.finfo(dtype).eps) + + message = 'Tolerances and step parameters must be non-negative scalars.' + tols = np.asarray([atol, rtol, initial_step, step_factor]) + if (not np.issubdtype(tols.dtype, np.number) + or np.any(tols < 0) + or tols.shape != (4,)): + raise ValueError(message) + initial_step, step_factor = tols[2:].astype(dtype) + + maxiter_int = int(maxiter) + if maxiter != maxiter_int or maxiter <= 0: + raise ValueError('`maxiter` must be a positive integer.') + + order_int = int(order) + if order_int != order or order <= 0: + raise ValueError('`order` must be a positive integer.') + + step_direction = np.sign(step_direction).astype(dtype) + x, step_direction = np.broadcast_arrays(x, step_direction) + x, step_direction = x[()], step_direction[()] + + message = '`preserve_shape` must be True or False.' + if preserve_shape not in {True, False}: + raise ValueError(message) + + if callback is not None and not callable(callback): + raise ValueError('`callback` must be callable.') + + return (func, x, args, atol, rtol, maxiter_int, order_int, initial_step, + step_factor, step_direction, preserve_shape, callback) + + +def _differentiate(func, x, *, args=(), atol=None, rtol=None, maxiter=10, + order=8, initial_step=0.5, step_factor=2.0, + step_direction=0, preserve_shape=False, callback=None): + """Evaluate the derivative of an elementwise scalar function numerically. + + Parameters + ---------- + func : callable + The function whose derivative is desired. The signature must be:: + + func(x: ndarray, *fargs) -> ndarray + + where each element of ``x`` is a finite real number and ``fargs`` is a tuple, + which may contain an arbitrary number of arrays that are broadcastable + with `x`. ``func`` must be an elementwise function: each element + ``func(x)[i]`` must equal ``func(x[i])`` for all indices ``i``. + x : array_like + Abscissae at which to evaluate the derivative. + args : tuple, optional + Additional positional arguments to be passed to `func`. Must be arrays + broadcastable with `x`. If the callable to be differentiated requires + arguments that are not broadcastable with `x`, wrap that callable with + `func`. See Examples. + atol, rtol : float, optional + Absolute and relative tolerances for the stopping condition: iteration + will stop when ``res.error < atol + rtol * abs(res.df)``. The default + `atol` is the smallest normal number of the appropriate dtype, and + the default `rtol` is the square root of the precision of the + appropriate dtype. + order : int, default: 8 + The (positive integer) order of the finite difference formula to be + used. Odd integers will be rounded up to the next even integer. + initial_step : float, default: 0.5 + The (absolute) initial step size for the finite difference derivative + approximation. + step_factor : float, default: 2.0 + The factor by which the step size is *reduced* in each iteration; i.e. + the step size in iteration 1 is ``initial_step/step_factor``. If + ``step_factor < 1``, subsequent steps will be greater than the initial + step; this may be useful if steps smaller than some threshold are + undesirable (e.g. due to subtractive cancellation error). + maxiter : int, default: 10 + The maximum number of iterations of the algorithm to perform. See + notes. + step_direction : array_like + An array representing the direction of the finite difference steps (for + use when `x` lies near to the boundary of the domain of the function.) + Must be broadcastable with `x` and all `args`. + Where 0 (default), central differences are used; where negative (e.g. + -1), steps are non-positive; and where positive (e.g. 1), all steps are + non-negative. + preserve_shape : bool, default: False + In the following, "arguments of `func`" refers to the array ``x`` and + any arrays within ``fargs``. Let ``shape`` be the broadcasted shape + of `x` and all elements of `args` (which is conceptually + distinct from ``fargs`` passed into `f`). + + - When ``preserve_shape=False`` (default), `f` must accept arguments + of *any* broadcastable shapes. + + - When ``preserve_shape=True``, `f` must accept arguments of shape + ``shape`` *or* ``shape + (n,)``, where ``(n,)`` is the number of + abscissae at which the function is being evaluated. + + In either case, for each scalar element ``xi`` within `x`, the array + returned by `f` must include the scalar ``f(xi)`` at the same index. + Consequently, the shape of the output is always the shape of the input + ``x``. + + See Examples. + callback : callable, optional + An optional user-supplied function to be called before the first + iteration and after each iteration. + Called as ``callback(res)``, where ``res`` is a ``_RichResult`` + similar to that returned by `_differentiate` (but containing the + current iterate's values of all variables). If `callback` raises a + ``StopIteration``, the algorithm will terminate immediately and + `_differentiate` will return a result. + + Returns + ------- + res : _RichResult + An instance of `scipy._lib._util._RichResult` with the following + attributes. (The descriptions are written as though the values will be + scalars; however, if `func` returns an array, the outputs will be + arrays of the same shape.) + + success : bool + ``True`` when the algorithm terminated successfully (status ``0``). + status : int + An integer representing the exit status of the algorithm. + ``0`` : The algorithm converged to the specified tolerances. + ``-1`` : The error estimate increased, so iteration was terminated. + ``-2`` : The maximum number of iterations was reached. + ``-3`` : A non-finite value was encountered. + ``-4`` : Iteration was terminated by `callback`. + ``1`` : The algorithm is proceeding normally (in `callback` only). + df : float + The derivative of `func` at `x`, if the algorithm terminated + successfully. + error : float + An estimate of the error: the magnitude of the difference between + the current estimate of the derivative and the estimate in the + previous iteration. + nit : int + The number of iterations performed. + nfev : int + The number of points at which `func` was evaluated. + x : float + The value at which the derivative of `func` was evaluated + (after broadcasting with `args` and `step_direction`). + + Notes + ----- + The implementation was inspired by jacobi [1]_, numdifftools [2]_, and + DERIVEST [3]_, but the implementation follows the theory of Taylor series + more straightforwardly (and arguably naively so). + In the first iteration, the derivative is estimated using a finite + difference formula of order `order` with maximum step size `initial_step`. + Each subsequent iteration, the maximum step size is reduced by + `step_factor`, and the derivative is estimated again until a termination + condition is reached. The error estimate is the magnitude of the difference + between the current derivative approximation and that of the previous + iteration. + + The stencils of the finite difference formulae are designed such that + abscissae are "nested": after `func` is evaluated at ``order + 1`` + points in the first iteration, `func` is evaluated at only two new points + in each subsequent iteration; ``order - 1`` previously evaluated function + values required by the finite difference formula are reused, and two + function values (evaluations at the points furthest from `x`) are unused. + + Step sizes are absolute. When the step size is small relative to the + magnitude of `x`, precision is lost; for example, if `x` is ``1e20``, the + default initial step size of ``0.5`` cannot be resolved. Accordingly, + consider using larger initial step sizes for large magnitudes of `x`. + + The default tolerances are challenging to satisfy at points where the + true derivative is exactly zero. If the derivative may be exactly zero, + consider specifying an absolute tolerance (e.g. ``atol=1e-16``) to + improve convergence. + + References + ---------- + [1]_ Hans Dembinski (@HDembinski). jacobi. + https://github.com/HDembinski/jacobi + [2]_ Per A. Brodtkorb and John D'Errico. numdifftools. + https://numdifftools.readthedocs.io/en/latest/ + [3]_ John D'Errico. DERIVEST: Adaptive Robust Numerical Differentiation. + https://www.mathworks.com/matlabcentral/fileexchange/13490-adaptive-robust-numerical-differentiation + [4]_ Numerical Differentition. Wikipedia. + https://en.wikipedia.org/wiki/Numerical_differentiation + + Examples + -------- + Evaluate the derivative of ``np.exp`` at several points ``x``. + + >>> import numpy as np + >>> from scipy.optimize._differentiate import _differentiate + >>> f = np.exp + >>> df = np.exp # true derivative + >>> x = np.linspace(1, 2, 5) + >>> res = _differentiate(f, x) + >>> res.df # approximation of the derivative + array([2.71828183, 3.49034296, 4.48168907, 5.75460268, 7.3890561 ]) + >>> res.error # estimate of the error + array( + [7.12940817e-12, 9.16688947e-12, 1.17594823e-11, 1.50972568e-11, 1.93942640e-11] + ) + >>> abs(res.df - df(x)) # true error + array( + [3.06421555e-14, 3.01980663e-14, 5.06261699e-14, 6.30606678e-14, 8.34887715e-14] + ) + + Show the convergence of the approximation as the step size is reduced. + Each iteration, the step size is reduced by `step_factor`, so for + sufficiently small initial step, each iteration reduces the error by a + factor of ``1/step_factor**order`` until finite precision arithmetic + inhibits further improvement. + + >>> iter = list(range(1, 12)) # maximum iterations + >>> hfac = 2 # step size reduction per iteration + >>> hdir = [-1, 0, 1] # compare left-, central-, and right- steps + >>> order = 4 # order of differentiation formula + >>> x = 1 + >>> ref = df(x) + >>> errors = [] # true error + >>> for i in iter: + ... res = _differentiate(f, x, maxiter=i, step_factor=hfac, + ... step_direction=hdir, order=order, + ... atol=0, rtol=0) # prevent early termination + ... errors.append(abs(res.df - ref)) + >>> errors = np.array(errors) + >>> plt.semilogy(iter, errors[:, 0], label='left differences') + >>> plt.semilogy(iter, errors[:, 1], label='central differences') + >>> plt.semilogy(iter, errors[:, 2], label='right differences') + >>> plt.xlabel('iteration') + >>> plt.ylabel('error') + >>> plt.legend() + >>> plt.show() + >>> (errors[1, 1] / errors[0, 1], 1 / hfac**order) + (0.06215223140159822, 0.0625) + + The implementation is vectorized over `x`, `step_direction`, and `args`. + The function is evaluated once before the first iteration to perform input + validation and standardization, and once per iteration thereafter. + + >>> def f(x, p): + ... print('here') + ... f.nit += 1 + ... return x**p + >>> f.nit = 0 + >>> def df(x, p): + ... return p*x**(p-1) + >>> x = np.arange(1, 5) + >>> p = np.arange(1, 6).reshape((-1, 1)) + >>> hdir = np.arange(-1, 2).reshape((-1, 1, 1)) + >>> res = _differentiate(f, x, args=(p,), step_direction=hdir, maxiter=1) + >>> np.allclose(res.df, df(x, p)) + True + >>> res.df.shape + (3, 5, 4) + >>> f.nit + 2 + + By default, `preserve_shape` is False, and therefore the callable + `f` may be called with arrays of any broadcastable shapes. + For example: + + >>> shapes = [] + >>> def f(x, c): + ... shape = np.broadcast_shapes(x.shape, c.shape) + ... shapes.append(shape) + ... return np.sin(c*x) + >>> + >>> c = [1, 5, 10, 20] + >>> res = _differentiate(f, 0, args=(c,)) + >>> shapes + [(4,), (4, 8), (4, 2), (3, 2), (2, 2), (1, 2)] + + To understand where these shapes are coming from - and to better + understand how `_differentiate` computes accurate results - note that + higher values of ``c`` correspond with higher frequency sinusoids. + The higher frequency sinusoids make the function's derivative change + faster, so more function evaluations are required to achieve the target + accuracy: + + >>> res.nfev + array([11, 13, 15, 17]) + + The initial ``shape``, ``(4,)``, corresponds with evaluating the + function at a single abscissa and all four frequencies; this is used + for input validation and to determine the size and dtype of the arrays + that store results. The next shape corresponds with evaluating the + function at an initial grid of abscissae and all four frequencies. + Successive calls to the function evaluate the function at two more + abscissae, increasing the effective order of the approximation by two. + However, in later function evaluations, the function is evaluated at + fewer frequencies because the corresponding derivative has already + converged to the required tolerance. This saves function evaluations to + improve performance, but it requires the function to accept arguments of + any shape. + + "Vector-valued" functions are unlikely to satisfy this requirement. + For example, consider + + >>> def f(x): + ... return [x, np.sin(3*x), x+np.sin(10*x), np.sin(20*x)*(x-1)**2] + + This integrand is not compatible with `_differentiate` as written; for instance, + the shape of the output will not be the same as the shape of ``x``. Such a + function *could* be converted to a compatible form with the introduction of + additional parameters, but this would be inconvenient. In such cases, + a simpler solution would be to use `preserve_shape`. + + >>> shapes = [] + >>> def f(x): + ... shapes.append(x.shape) + ... x0, x1, x2, x3 = x + ... return [x0, np.sin(3*x1), x2+np.sin(10*x2), np.sin(20*x3)*(x3-1)**2] + >>> + >>> x = np.zeros(4) + >>> res = _differentiate(f, x, preserve_shape=True) + >>> shapes + [(4,), (4, 8), (4, 2), (4, 2), (4, 2), (4, 2)] + + Here, the shape of ``x`` is ``(4,)``. With ``preserve_shape=True``, the + function may be called with argument ``x`` of shape ``(4,)`` or ``(4, n)``, + and this is what we observe. + + """ + # TODO (followup): + # - investigate behavior at saddle points + # - array initial_step / step_factor? + # - multivariate functions? + + res = _differentiate_iv(func, x, args, atol, rtol, maxiter, order, initial_step, + step_factor, step_direction, preserve_shape, callback) + (func, x, args, atol, rtol, maxiter, order, + h0, fac, hdir, preserve_shape, callback) = res + + # Initialization + # Since f(x) (no step) is not needed for central differences, it may be + # possible to eliminate this function evaluation. However, it's useful for + # input validation and standardization, and everything else is designed to + # reduce function calls, so let's keep it simple. + temp = eim._initialize(func, (x,), args, preserve_shape=preserve_shape) + func, xs, fs, args, shape, dtype, xp = temp + x, f = xs[0], fs[0] + df = np.full_like(f, np.nan) + # Ideally we'd broadcast the shape of `hdir` in `_elementwise_algo_init`, but + # it's simpler to do it here than to generalize `_elementwise_algo_init` further. + # `hdir` and `x` are already broadcasted in `_differentiate_iv`, so we know + # that `hdir` can be broadcasted to the final shape. + hdir = np.broadcast_to(hdir, shape).flatten() + + status = np.full_like(x, eim._EINPROGRESS, dtype=int) # in progress + nit, nfev = 0, 1 # one function evaluations performed above + # Boolean indices of left, central, right, and (all) one-sided steps + il = hdir < 0 + ic = hdir == 0 + ir = hdir > 0 + io = il | ir + + # Most of these attributes are reasonably obvious, but: + # - `fs` holds all the function values of all active `x`. The zeroth + # axis corresponds with active points `x`, the first axis corresponds + # with the different steps (in the order described in + # `_differentiate_weights`). + # - `terms` (which could probably use a better name) is half the `order`, + # which is always even. + work = _RichResult(x=x, df=df, fs=f[:, np.newaxis], error=np.nan, h=h0, + df_last=np.nan, error_last=np.nan, h0=h0, fac=fac, + atol=atol, rtol=rtol, nit=nit, nfev=nfev, + status=status, dtype=dtype, terms=(order+1)//2, + hdir=hdir, il=il, ic=ic, ir=ir, io=io) + # This is the correspondence between terms in the `work` object and the + # final result. In this case, the mapping is trivial. Note that `success` + # is prepended automatically. + res_work_pairs = [('status', 'status'), ('df', 'df'), ('error', 'error'), + ('nit', 'nit'), ('nfev', 'nfev'), ('x', 'x')] + + def pre_func_eval(work): + """Determine the abscissae at which the function needs to be evaluated. + + See `_differentiate_weights` for a description of the stencil (pattern + of the abscissae). + + In the first iteration, there is only one stored function value in + `work.fs`, `f(x)`, so we need to evaluate at `order` new points. In + subsequent iterations, we evaluate at two new points. Note that + `work.x` is always flattened into a 1D array after broadcasting with + all `args`, so we add a new axis at the end and evaluate all point + in one call to the function. + + For improvement: + - Consider measuring the step size actually taken, since `(x + h) - x` + is not identically equal to `h` with floating point arithmetic. + - Adjust the step size automatically if `x` is too big to resolve the + step. + - We could probably save some work if there are no central difference + steps or no one-sided steps. + """ + n = work.terms # half the order + h = work.h # step size + c = work.fac # step reduction factor + d = c**0.5 # square root of step reduction factor (one-sided stencil) + # Note - no need to be careful about dtypes until we allocate `x_eval` + + if work.nit == 0: + hc = h / c**np.arange(n) + hc = np.concatenate((-hc[::-1], hc)) + else: + hc = np.asarray([-h, h]) / c**(n-1) + + if work.nit == 0: + hr = h / d**np.arange(2*n) + else: + hr = np.asarray([h, h/d]) / c**(n-1) + + n_new = 2*n if work.nit == 0 else 2 # number of new abscissae + x_eval = np.zeros((len(work.hdir), n_new), dtype=work.dtype) + il, ic, ir = work.il, work.ic, work.ir + x_eval[ir] = work.x[ir, np.newaxis] + hr + x_eval[ic] = work.x[ic, np.newaxis] + hc + x_eval[il] = work.x[il, np.newaxis] - hr + return x_eval + + def post_func_eval(x, f, work): + """ Estimate the derivative and error from the function evaluations + + As in `pre_func_eval`: in the first iteration, there is only one stored + function value in `work.fs`, `f(x)`, so we need to add the `order` new + points. In subsequent iterations, we add two new points. The tricky + part is getting the order to match that of the weights, which is + described in `_differentiate_weights`. + + For improvement: + - Change the order of the weights (and steps in `pre_func_eval`) to + simplify `work_fc` concatenation and eliminate `fc` concatenation. + - It would be simple to do one-step Richardson extrapolation with `df` + and `df_last` to increase the order of the estimate and/or improve + the error estimate. + - Process the function evaluations in a more numerically favorable + way. For instance, combining the pairs of central difference evals + into a second-order approximation and using Richardson extrapolation + to produce a higher order approximation seemed to retain accuracy up + to very high order. + - Alternatively, we could use `polyfit` like Jacobi. An advantage of + fitting polynomial to more points than necessary is improved noise + tolerance. + """ + n = work.terms + n_new = n if work.nit == 0 else 1 + il, ic, io = work.il, work.ic, work.io + + # Central difference + # `work_fc` is *all* the points at which the function has been evaluated + # `fc` is the points we're using *this iteration* to produce the estimate + work_fc = (f[ic, :n_new], work.fs[ic, :], f[ic, -n_new:]) + work_fc = np.concatenate(work_fc, axis=-1) + if work.nit == 0: + fc = work_fc + else: + fc = (work_fc[:, :n], work_fc[:, n:n+1], work_fc[:, -n:]) + fc = np.concatenate(fc, axis=-1) + + # One-sided difference + work_fo = np.concatenate((work.fs[io, :], f[io, :]), axis=-1) + if work.nit == 0: + fo = work_fo + else: + fo = np.concatenate((work_fo[:, 0:1], work_fo[:, -2*n:]), axis=-1) + + work.fs = np.zeros((len(ic), work.fs.shape[-1] + 2*n_new)) + work.fs[ic] = work_fc + work.fs[io] = work_fo + + wc, wo = _differentiate_weights(work, n) + work.df_last = work.df.copy() + work.df[ic] = fc @ wc / work.h + work.df[io] = fo @ wo / work.h + work.df[il] *= -1 + + work.h /= work.fac + work.error_last = work.error + # Simple error estimate - the difference in derivative estimates between + # this iteration and the last. This is typically conservative because if + # convergence has begin, the true error is much closer to the difference + # between the current estimate and the *next* error estimate. However, + # we could use Richarson extrapolation to produce an error estimate that + # is one order higher, and take the difference between that and + # `work.df` (which would just be constant factor that depends on `fac`.) + work.error = abs(work.df - work.df_last) + + def check_termination(work): + """Terminate due to convergence, non-finite values, or error increase""" + stop = np.zeros_like(work.df).astype(bool) + + i = work.error < work.atol + work.rtol*abs(work.df) + work.status[i] = eim._ECONVERGED + stop[i] = True + + if work.nit > 0: + i = ~((np.isfinite(work.x) & np.isfinite(work.df)) | stop) + work.df[i], work.status[i] = np.nan, eim._EVALUEERR + stop[i] = True + + # With infinite precision, there is a step size below which + # all smaller step sizes will reduce the error. But in floating point + # arithmetic, catastrophic cancellation will begin to cause the error + # to increase again. This heuristic tries to avoid step sizes that are + # too small. There may be more theoretically sound approaches for + # detecting a step size that minimizes the total error, but this + # heuristic seems simple and effective. + i = (work.error > work.error_last*10) & ~stop + work.status[i] = _EERRORINCREASE + stop[i] = True + + return stop + + def post_termination_check(work): + return + + def customize_result(res, shape): + return shape + + return eim._loop(work, callback, shape, maxiter, func, args, dtype, + pre_func_eval, post_func_eval, check_termination, + post_termination_check, customize_result, res_work_pairs, + xp, preserve_shape) + + +def _differentiate_weights(work, n): + # This produces the weights of the finite difference formula for a given + # stencil. In experiments, use of a second-order central difference formula + # with Richardson extrapolation was more accurate numerically, but it was + # more complicated, and it would have become even more complicated when + # adding support for one-sided differences. However, now that all the + # function evaluation values are stored, they can be processed in whatever + # way is desired to produce the derivative estimate. We leave alternative + # approaches to future work. To be more self-contained, here is the theory + # for deriving the weights below. + # + # Recall that the Taylor expansion of a univariate, scalar-values function + # about a point `x` may be expressed as: + # f(x + h) = f(x) + f'(x)*h + f''(x)/2!*h**2 + O(h**3) + # Suppose we evaluate f(x), f(x+h), and f(x-h). We have: + # f(x) = f(x) + # f(x + h) = f(x) + f'(x)*h + f''(x)/2!*h**2 + O(h**3) + # f(x - h) = f(x) - f'(x)*h + f''(x)/2!*h**2 + O(h**3) + # We can solve for weights `wi` such that: + # w1*f(x) = w1*(f(x)) + # + w2*f(x + h) = w2*(f(x) + f'(x)*h + f''(x)/2!*h**2) + O(h**3) + # + w3*f(x - h) = w3*(f(x) - f'(x)*h + f''(x)/2!*h**2) + O(h**3) + # = 0 + f'(x)*h + 0 + O(h**3) + # Then + # f'(x) ~ (w1*f(x) + w2*f(x+h) + w3*f(x-h))/h + # is a finite difference derivative approximation with error O(h**2), + # and so it is said to be a "second-order" approximation. Under certain + # conditions (e.g. well-behaved function, `h` sufficiently small), the + # error in the approximation will decrease with h**2; that is, if `h` is + # reduced by a factor of 2, the error is reduced by a factor of 4. + # + # By default, we use eighth-order formulae. Our central-difference formula + # uses abscissae: + # x-h/c**3, x-h/c**2, x-h/c, x-h, x, x+h, x+h/c, x+h/c**2, x+h/c**3 + # where `c` is the step factor. (Typically, the step factor is greater than + # one, so the outermost points - as written above - are actually closest to + # `x`.) This "stencil" is chosen so that each iteration, the step can be + # reduced by the factor `c`, and most of the function evaluations can be + # reused with the new step size. For example, in the next iteration, we + # will have: + # x-h/c**4, x-h/c**3, x-h/c**2, x-h/c, x, x+h/c, x+h/c**2, x+h/c**3, x+h/c**4 + # We do not reuse `x-h` and `x+h` for the new derivative estimate. + # While this would increase the order of the formula and thus the + # theoretical convergence rate, it is also less stable numerically. + # (As noted above, there are other ways of processing the values that are + # more stable. Thus, even now we store `f(x-h)` and `f(x+h)` in `work.fs` + # to simplify future development of this sort of improvement.) + # + # The (right) one-sided formula is produced similarly using abscissae + # x, x+h, x+h/d, x+h/d**2, ..., x+h/d**6, x+h/d**7, x+h/d**7 + # where `d` is the square root of `c`. (The left one-sided formula simply + # uses -h.) When the step size is reduced by factor `c = d**2`, we have + # abscissae: + # x, x+h/d**2, x+h/d**3..., x+h/d**8, x+h/d**9, x+h/d**9 + # `d` is chosen as the square root of `c` so that the rate of the step-size + # reduction is the same per iteration as in the central difference case. + # Note that because the central difference formulas are inherently of even + # order, for simplicity, we use only even-order formulas for one-sided + # differences, too. + + # It's possible for the user to specify `fac` in, say, double precision but + # `x` and `args` in single precision. `fac` gets converted to single + # precision, but we should always use double precision for the intermediate + # calculations here to avoid additional error in the weights. + fac = work.fac.astype(np.float64) + + # Note that if the user switches back to floating point precision with + # `x` and `args`, then `fac` will not necessarily equal the (lower + # precision) cached `_differentiate_weights.fac`, and the weights will + # need to be recalculated. This could be fixed, but it's late, and of + # low consequence. + if fac != _differentiate_weights.fac: + _differentiate_weights.central = [] + _differentiate_weights.right = [] + _differentiate_weights.fac = fac + + if len(_differentiate_weights.central) != 2*n + 1: + # Central difference weights. Consider refactoring this; it could + # probably be more compact. + i = np.arange(-n, n + 1) + p = np.abs(i) - 1. # center point has power `p` -1, but sign `s` is 0 + s = np.sign(i) + + h = s / fac ** p + A = np.vander(h, increasing=True).T + b = np.zeros(2*n + 1) + b[1] = 1 + weights = np.linalg.solve(A, b) + + # Enforce identities to improve accuracy + weights[n] = 0 + for i in range(n): + weights[-i-1] = -weights[i] + + # Cache the weights. We only need to calculate them once unless + # the step factor changes. + _differentiate_weights.central = weights + + # One-sided difference weights. The left one-sided weights (with + # negative steps) are simply the negative of the right one-sided + # weights, so no need to compute them separately. + i = np.arange(2*n + 1) + p = i - 1. + s = np.sign(i) + + h = s / np.sqrt(fac) ** p + A = np.vander(h, increasing=True).T + b = np.zeros(2 * n + 1) + b[1] = 1 + weights = np.linalg.solve(A, b) + + _differentiate_weights.right = weights + + return (_differentiate_weights.central.astype(work.dtype, copy=False), + _differentiate_weights.right.astype(work.dtype, copy=False)) +_differentiate_weights.central = [] +_differentiate_weights.right = [] +_differentiate_weights.fac = None + + +def _jacobian(func, x, *, atol=None, rtol=None, maxiter=10, + order=8, initial_step=0.5, step_factor=2.0): + r"""Evaluate the Jacobian of a function numerically. + + Parameters + ---------- + func : callable + The function whose Jacobian is desired. The signature must be:: + + func(x: ndarray) -> ndarray + + where each element of ``x`` is a finite real. If the function to be + differentiated accepts additional, arguments wrap it (e.g. using + `functools.partial` or ``lambda``) and pass the wrapped callable + into `_jacobian`. See Notes regarding vectorization and the dimensionality + of the input and output. + x : array_like + Points at which to evaluate the Jacobian. Must have at least one dimension. + See Notes regarding the dimensionality and vectorization. + atol, rtol : float, optional + Absolute and relative tolerances for the stopping condition: iteration + will stop for each element of the Jacobian when + ``res.error < atol + rtol * abs(res.df)``. The default `atol` is the + smallest normal number of the appropriate dtype, and the default `rtol` + is the square root of the precision of the appropriate dtype. + order : int, default: 8 + The (positive integer) order of the finite difference formula to be + used. Odd integers will be rounded up to the next even integer. + initial_step : float, default: 0.5 + The (absolute) initial step size for the finite difference derivative + approximation. + step_factor : float, default: 2.0 + The factor by which the step size is *reduced* in each iteration; i.e. + the step size in iteration 1 is ``initial_step/step_factor``. If + ``step_factor < 1``, subsequent steps will be greater than the initial + step; this may be useful if steps smaller than some threshold are + undesirable (e.g. due to subtractive cancellation error). + maxiter : int, default: 10 + The maximum number of iterations of the algorithm to perform. + + Returns + ------- + res : _RichResult + An instance of `scipy._lib._util._RichResult` with the following + attributes. + + success : bool array + ``True`` when the algorithm terminated successfully (status ``0``). + status : int array + An integer representing the exit status of the algorithm. + ``0`` : The algorithm converged to the specified tolerances. + ``-1`` : The error estimate increased, so iteration was terminated. + ``-2`` : The maximum number of iterations was reached. + ``-3`` : A non-finite value was encountered. + ``-4`` : Iteration was terminated by `callback`. + ``1`` : The algorithm is proceeding normally (in `callback` only). + df : float array + The Jacobian of `func` at `x`, if the algorithm terminated + successfully. + error : float array + An estimate of the error: the magnitude of the difference between + the current estimate of the derivative and the estimate in the + previous iteration. + nit : int array + The number of iterations performed. + nfev : int array + The number of points at which `func` was evaluated. + x : float array + The value at which the derivative of `func` was evaluated. + + See Also + -------- + _differentiate + + Notes + ----- + Suppose we wish to evaluate the Jacobian of a function + :math:`f: \mathbf{R^m} \rightarrow \mathbf{R^n}`, and assign to variables + ``m`` and ``n`` the positive integer values of :math:`m` and :math:`n`, + respectively. If we wish to evaluate the Jacobian at a single point, + then: + + - argument `x` must be an array of shape ``(m,)`` + - argument `func` must be vectorized to accept an array of shape ``(m, p)``. + The first axis represents the :math:`m` inputs of :math:`f`; the second + is for evaluating the function at multiple points in a single call. + - argument `func` must return an array of shape ``(n, p)``. The first + axis represents the :math:`n` outputs of :math:`f`; the second + is for the result of evaluating the function at multiple points. + - attribute ``df`` of the result object will be an array of shape ``(n, m)``, + the Jacobian. + + This function is also vectorized in the sense that the Jacobian can be + evaluated at ``k`` points in a single call. In this case, `x` would be an + array of shape ``(m, k)``, `func` would accept an array of shape + ``(m, k, p)`` and return an array of shape ``(n, k, p)``, and the ``df`` + attribute of the result would have shape ``(n, m, k)``. + + References + ---------- + .. [1] Jacobian matrix and determinant, *Wikipedia*, + https://en.wikipedia.org/wiki/Jacobian_matrix_and_determinant + + Examples + -------- + The Rosenbrock function maps from :math:`\mathbf{R}^m \righarrow \mathbf{R}`; + the SciPy implementation `scipy.optimize.rosen` is vectorized to accept an + array of shape ``(m, p)`` and return an array of shape ``m``. Suppose we wish + to evaluate the Jacobian (AKA the gradient because the function returns a scalar) + at ``[0.5, 0.5, 0.5]``. + + >>> import numpy as np + >>> from scipy.optimize._differentiate import _jacobian as jacobian + >>> from scipy.optimize import rosen, rosen_der + >>> m = 3 + >>> x = np.full(m, 0.5) + >>> res = jacobian(rosen, x) + >>> ref = rosen_der(x) # reference value of the gradient + >>> res.df, ref + (array([-51., -1., 50.]), array([-51., -1., 50.])) + + As an example of a function with multiple outputs, consider Example 4 + from [1]_. + + >>> def f(x): + ... x1, x2, x3 = x ... + ... return [x1, 5*x3, 4*x2**2 - 2*x3, x3*np.sin(x1)] + + The true Jacobian is given by: + + >>> def df(x): + ... x1, x2, x3 = x + ... one = np.ones_like(x1) + ... return [[one, 0*one, 0*one], + ... [0*one, 0*one, 5*one], + ... [0*one, 8*x2, -2*one], + ... [x3*np.cos(x1), 0*one, np.sin(x1)]] + + Evaluate the Jacobian at an arbitrary point. + + >>> rng = np.random.default_rng(389252938452) + >>> x = rng.random(size=3) + >>> res = jacobian(f, x) + >>> ref = df(x) + >>> res.df.shape == (4, 3) + True + >>> np.allclose(res.df, ref) + True + + Evaluate the Jacobian at 10 arbitrary points in a single call. + + >>> x = rng.random(size=(3, 10)) + >>> res = jacobian(f, x) + >>> ref = df(x) + >>> res.df.shape == (4, 3, 10) + True + >>> np.allclose(res.df, ref) + True + + """ + x = np.asarray(x) + int_dtype = np.issubdtype(x.dtype, np.integer) + x0 = np.asarray(x, dtype=float) if int_dtype else x + + if x0.ndim < 1: + message = "Argument `x` must be at least 1-D." + raise ValueError(message) + + m = x0.shape[0] + i = np.arange(m) + + def wrapped(x): + p = () if x.ndim == x0.ndim else (x.shape[-1],) # number of abscissae + new_dims = (1,) if x.ndim == x0.ndim else (1, -1) + new_shape = (m, m) + x0.shape[1:] + p + xph = np.expand_dims(x0, new_dims) + xph = np.broadcast_to(xph, new_shape).copy() + xph[i, i] = x + return func(xph) + + res = _differentiate(wrapped, x, atol=atol, rtol=rtol, + maxiter=maxiter, order=order, initial_step=initial_step, + step_factor=step_factor, preserve_shape=True) + del res.x # the user knows `x`, and the way it gets broadcasted is meaningless here + return res diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_direct_py.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_direct_py.py new file mode 100644 index 0000000000000000000000000000000000000000..440cbb5ae866462b6299b1e12d4a6ba1e407fd62 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_direct_py.py @@ -0,0 +1,278 @@ +from __future__ import annotations +from typing import ( # noqa: UP035 + Any, Callable, Iterable, TYPE_CHECKING +) + +import numpy as np +from scipy.optimize import OptimizeResult +from ._constraints import old_bound_to_new, Bounds +from ._direct import direct as _direct # type: ignore + +if TYPE_CHECKING: + import numpy.typing as npt + +__all__ = ['direct'] + +ERROR_MESSAGES = ( + "Number of function evaluations done is larger than maxfun={}", + "Number of iterations is larger than maxiter={}", + "u[i] < l[i] for some i", + "maxfun is too large", + "Initialization failed", + "There was an error in the creation of the sample points", + "An error occurred while the function was sampled", + "Maximum number of levels has been reached.", + "Forced stop", + "Invalid arguments", + "Out of memory", +) + +SUCCESS_MESSAGES = ( + ("The best function value found is within a relative error={} " + "of the (known) global optimum f_min"), + ("The volume of the hyperrectangle containing the lowest function value " + "found is below vol_tol={}"), + ("The side length measure of the hyperrectangle containing the lowest " + "function value found is below len_tol={}"), +) + + +def direct( + func: Callable[[npt.ArrayLike, tuple[Any]], float], + bounds: Iterable | Bounds, + *, + args: tuple = (), + eps: float = 1e-4, + maxfun: int | None = None, + maxiter: int = 1000, + locally_biased: bool = True, + f_min: float = -np.inf, + f_min_rtol: float = 1e-4, + vol_tol: float = 1e-16, + len_tol: float = 1e-6, + callback: Callable[[npt.ArrayLike], None] | None = None +) -> OptimizeResult: + """ + Finds the global minimum of a function using the + DIRECT algorithm. + + Parameters + ---------- + func : callable + The objective function to be minimized. + ``func(x, *args) -> float`` + where ``x`` is an 1-D array with shape (n,) and ``args`` is a tuple of + the fixed parameters needed to completely specify the function. + bounds : sequence or `Bounds` + Bounds for variables. There are two ways to specify the bounds: + + 1. Instance of `Bounds` class. + 2. ``(min, max)`` pairs for each element in ``x``. + + args : tuple, optional + Any additional fixed parameters needed to + completely specify the objective function. + eps : float, optional + Minimal required difference of the objective function values + between the current best hyperrectangle and the next potentially + optimal hyperrectangle to be divided. In consequence, `eps` serves as a + tradeoff between local and global search: the smaller, the more local + the search becomes. Default is 1e-4. + maxfun : int or None, optional + Approximate upper bound on objective function evaluations. + If `None`, will be automatically set to ``1000 * N`` where ``N`` + represents the number of dimensions. Will be capped if necessary to + limit DIRECT's RAM usage to app. 1GiB. This will only occur for very + high dimensional problems and excessive `max_fun`. Default is `None`. + maxiter : int, optional + Maximum number of iterations. Default is 1000. + locally_biased : bool, optional + If `True` (default), use the locally biased variant of the + algorithm known as DIRECT_L. If `False`, use the original unbiased + DIRECT algorithm. For hard problems with many local minima, + `False` is recommended. + f_min : float, optional + Function value of the global optimum. Set this value only if the + global optimum is known. Default is ``-np.inf``, so that this + termination criterion is deactivated. + f_min_rtol : float, optional + Terminate the optimization once the relative error between the + current best minimum `f` and the supplied global minimum `f_min` + is smaller than `f_min_rtol`. This parameter is only used if + `f_min` is also set. Must lie between 0 and 1. Default is 1e-4. + vol_tol : float, optional + Terminate the optimization once the volume of the hyperrectangle + containing the lowest function value is smaller than `vol_tol` + of the complete search space. Must lie between 0 and 1. + Default is 1e-16. + len_tol : float, optional + If `locally_biased=True`, terminate the optimization once half of + the normalized maximal side length of the hyperrectangle containing + the lowest function value is smaller than `len_tol`. + If `locally_biased=False`, terminate the optimization once half of + the normalized diagonal of the hyperrectangle containing the lowest + function value is smaller than `len_tol`. Must lie between 0 and 1. + Default is 1e-6. + callback : callable, optional + A callback function with signature ``callback(xk)`` where ``xk`` + represents the best function value found so far. + + Returns + ------- + res : OptimizeResult + The optimization result represented as a ``OptimizeResult`` object. + Important attributes are: ``x`` the solution array, ``success`` a + Boolean flag indicating if the optimizer exited successfully and + ``message`` which describes the cause of the termination. See + `OptimizeResult` for a description of other attributes. + + Notes + ----- + DIviding RECTangles (DIRECT) is a deterministic global + optimization algorithm capable of minimizing a black box function with + its variables subject to lower and upper bound constraints by sampling + potential solutions in the search space [1]_. The algorithm starts by + normalising the search space to an n-dimensional unit hypercube. + It samples the function at the center of this hypercube and at 2n + (n is the number of variables) more points, 2 in each coordinate + direction. Using these function values, DIRECT then divides the + domain into hyperrectangles, each having exactly one of the sampling + points as its center. In each iteration, DIRECT chooses, using the `eps` + parameter which defaults to 1e-4, some of the existing hyperrectangles + to be further divided. This division process continues until either the + maximum number of iterations or maximum function evaluations allowed + are exceeded, or the hyperrectangle containing the minimal value found + so far becomes small enough. If `f_min` is specified, the optimization + will stop once this function value is reached within a relative tolerance. + The locally biased variant of DIRECT (originally called DIRECT_L) [2]_ is + used by default. It makes the search more locally biased and more + efficient for cases with only a few local minima. + + A note about termination criteria: `vol_tol` refers to the volume of the + hyperrectangle containing the lowest function value found so far. This + volume decreases exponentially with increasing dimensionality of the + problem. Therefore `vol_tol` should be decreased to avoid premature + termination of the algorithm for higher dimensions. This does not hold + for `len_tol`: it refers either to half of the maximal side length + (for ``locally_biased=True``) or half of the diagonal of the + hyperrectangle (for ``locally_biased=False``). + + This code is based on the DIRECT 2.0.4 Fortran code by Gablonsky et al. at + https://ctk.math.ncsu.edu/SOFTWARE/DIRECTv204.tar.gz . + This original version was initially converted via f2c and then cleaned up + and reorganized by Steven G. Johnson, August 2007, for the NLopt project. + The `direct` function wraps the C implementation. + + .. versionadded:: 1.9.0 + + References + ---------- + .. [1] Jones, D.R., Perttunen, C.D. & Stuckman, B.E. Lipschitzian + optimization without the Lipschitz constant. J Optim Theory Appl + 79, 157-181 (1993). + .. [2] Gablonsky, J., Kelley, C. A Locally-Biased form of the DIRECT + Algorithm. Journal of Global Optimization 21, 27-37 (2001). + + Examples + -------- + The following example is a 2-D problem with four local minima: minimizing + the Styblinski-Tang function + (https://en.wikipedia.org/wiki/Test_functions_for_optimization). + + >>> from scipy.optimize import direct, Bounds + >>> def styblinski_tang(pos): + ... x, y = pos + ... return 0.5 * (x**4 - 16*x**2 + 5*x + y**4 - 16*y**2 + 5*y) + >>> bounds = Bounds([-4., -4.], [4., 4.]) + >>> result = direct(styblinski_tang, bounds) + >>> result.x, result.fun, result.nfev + array([-2.90321597, -2.90321597]), -78.3323279095383, 2011 + + The correct global minimum was found but with a huge number of function + evaluations (2011). Loosening the termination tolerances `vol_tol` and + `len_tol` can be used to stop DIRECT earlier. + + >>> result = direct(styblinski_tang, bounds, len_tol=1e-3) + >>> result.x, result.fun, result.nfev + array([-2.9044353, -2.9044353]), -78.33230330754142, 207 + + """ + # convert bounds to new Bounds class if necessary + if not isinstance(bounds, Bounds): + if isinstance(bounds, list) or isinstance(bounds, tuple): + lb, ub = old_bound_to_new(bounds) + bounds = Bounds(lb, ub) + else: + message = ("bounds must be a sequence or " + "instance of Bounds class") + raise ValueError(message) + + lb = np.ascontiguousarray(bounds.lb, dtype=np.float64) + ub = np.ascontiguousarray(bounds.ub, dtype=np.float64) + + # validate bounds + # check that lower bounds are smaller than upper bounds + if not np.all(lb < ub): + raise ValueError('Bounds are not consistent min < max') + # check for infs + if (np.any(np.isinf(lb)) or np.any(np.isinf(ub))): + raise ValueError("Bounds must not be inf.") + + # validate tolerances + if (vol_tol < 0 or vol_tol > 1): + raise ValueError("vol_tol must be between 0 and 1.") + if (len_tol < 0 or len_tol > 1): + raise ValueError("len_tol must be between 0 and 1.") + if (f_min_rtol < 0 or f_min_rtol > 1): + raise ValueError("f_min_rtol must be between 0 and 1.") + + # validate maxfun and maxiter + if maxfun is None: + maxfun = 1000 * lb.shape[0] + if not isinstance(maxfun, int): + raise ValueError("maxfun must be of type int.") + if maxfun < 0: + raise ValueError("maxfun must be > 0.") + if not isinstance(maxiter, int): + raise ValueError("maxiter must be of type int.") + if maxiter < 0: + raise ValueError("maxiter must be > 0.") + + # validate boolean parameters + if not isinstance(locally_biased, bool): + raise ValueError("locally_biased must be True or False.") + + def _func_wrap(x, args=None): + x = np.asarray(x) + if args is None: + f = func(x) + else: + f = func(x, *args) + # always return a float + return np.asarray(f).item() + + # TODO: fix disp argument + x, fun, ret_code, nfev, nit = _direct( + _func_wrap, + np.asarray(lb), np.asarray(ub), + args, + False, eps, maxfun, maxiter, + locally_biased, + f_min, f_min_rtol, + vol_tol, len_tol, callback + ) + + format_val = (maxfun, maxiter, f_min_rtol, vol_tol, len_tol) + if ret_code > 2: + message = SUCCESS_MESSAGES[ret_code - 3].format( + format_val[ret_code - 1]) + elif 0 < ret_code <= 2: + message = ERROR_MESSAGES[ret_code - 1].format(format_val[ret_code - 1]) + elif 0 > ret_code > -100: + message = ERROR_MESSAGES[abs(ret_code) + 1] + else: + message = ERROR_MESSAGES[ret_code + 99] + + return OptimizeResult(x=np.asarray(x), fun=fun, status=ret_code, + success=ret_code > 2, message=message, + nfev=nfev, nit=nit) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_group_columns.cpython-310-x86_64-linux-gnu.so b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_group_columns.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..6bf365e82423d10a4e2fe926977e1e1167aab4e5 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_group_columns.cpython-310-x86_64-linux-gnu.so differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_hessian_update_strategy.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_hessian_update_strategy.py new file mode 100644 index 0000000000000000000000000000000000000000..c72d1159314e0ea449085df44ef80d1d0dbb1ebb --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_hessian_update_strategy.py @@ -0,0 +1,475 @@ +"""Hessian update strategies for quasi-Newton optimization methods.""" +import numpy as np +from numpy.linalg import norm +from scipy.linalg import get_blas_funcs, issymmetric +from warnings import warn + + +__all__ = ['HessianUpdateStrategy', 'BFGS', 'SR1'] + + +class HessianUpdateStrategy: + """Interface for implementing Hessian update strategies. + + Many optimization methods make use of Hessian (or inverse Hessian) + approximations, such as the quasi-Newton methods BFGS, SR1, L-BFGS. + Some of these approximations, however, do not actually need to store + the entire matrix or can compute the internal matrix product with a + given vector in a very efficiently manner. This class serves as an + abstract interface between the optimization algorithm and the + quasi-Newton update strategies, giving freedom of implementation + to store and update the internal matrix as efficiently as possible. + Different choices of initialization and update procedure will result + in different quasi-Newton strategies. + + Four methods should be implemented in derived classes: ``initialize``, + ``update``, ``dot`` and ``get_matrix``. + + Notes + ----- + Any instance of a class that implements this interface, + can be accepted by the method ``minimize`` and used by + the compatible solvers to approximate the Hessian (or + inverse Hessian) used by the optimization algorithms. + """ + + def initialize(self, n, approx_type): + """Initialize internal matrix. + + Allocate internal memory for storing and updating + the Hessian or its inverse. + + Parameters + ---------- + n : int + Problem dimension. + approx_type : {'hess', 'inv_hess'} + Selects either the Hessian or the inverse Hessian. + When set to 'hess' the Hessian will be stored and updated. + When set to 'inv_hess' its inverse will be used instead. + """ + raise NotImplementedError("The method ``initialize(n, approx_type)``" + " is not implemented.") + + def update(self, delta_x, delta_grad): + """Update internal matrix. + + Update Hessian matrix or its inverse (depending on how 'approx_type' + is defined) using information about the last evaluated points. + + Parameters + ---------- + delta_x : ndarray + The difference between two points the gradient + function have been evaluated at: ``delta_x = x2 - x1``. + delta_grad : ndarray + The difference between the gradients: + ``delta_grad = grad(x2) - grad(x1)``. + """ + raise NotImplementedError("The method ``update(delta_x, delta_grad)``" + " is not implemented.") + + def dot(self, p): + """Compute the product of the internal matrix with the given vector. + + Parameters + ---------- + p : array_like + 1-D array representing a vector. + + Returns + ------- + Hp : array + 1-D represents the result of multiplying the approximation matrix + by vector p. + """ + raise NotImplementedError("The method ``dot(p)``" + " is not implemented.") + + def get_matrix(self): + """Return current internal matrix. + + Returns + ------- + H : ndarray, shape (n, n) + Dense matrix containing either the Hessian + or its inverse (depending on how 'approx_type' + is defined). + """ + raise NotImplementedError("The method ``get_matrix(p)``" + " is not implemented.") + + +class FullHessianUpdateStrategy(HessianUpdateStrategy): + """Hessian update strategy with full dimensional internal representation. + """ + _syr = get_blas_funcs('syr', dtype='d') # Symmetric rank 1 update + _syr2 = get_blas_funcs('syr2', dtype='d') # Symmetric rank 2 update + # Symmetric matrix-vector product + _symv = get_blas_funcs('symv', dtype='d') + + def __init__(self, init_scale='auto'): + self.init_scale = init_scale + # Until initialize is called we can't really use the class, + # so it makes sense to set everything to None. + self.first_iteration = None + self.approx_type = None + self.B = None + self.H = None + + def initialize(self, n, approx_type): + """Initialize internal matrix. + + Allocate internal memory for storing and updating + the Hessian or its inverse. + + Parameters + ---------- + n : int + Problem dimension. + approx_type : {'hess', 'inv_hess'} + Selects either the Hessian or the inverse Hessian. + When set to 'hess' the Hessian will be stored and updated. + When set to 'inv_hess' its inverse will be used instead. + """ + self.first_iteration = True + self.n = n + self.approx_type = approx_type + if approx_type not in ('hess', 'inv_hess'): + raise ValueError("`approx_type` must be 'hess' or 'inv_hess'.") + # Create matrix + if self.approx_type == 'hess': + self.B = np.eye(n, dtype=float) + else: + self.H = np.eye(n, dtype=float) + + def _auto_scale(self, delta_x, delta_grad): + # Heuristic to scale matrix at first iteration. + # Described in Nocedal and Wright "Numerical Optimization" + # p.143 formula (6.20). + s_norm2 = np.dot(delta_x, delta_x) + y_norm2 = np.dot(delta_grad, delta_grad) + ys = np.abs(np.dot(delta_grad, delta_x)) + if ys == 0.0 or y_norm2 == 0 or s_norm2 == 0: + return 1 + if self.approx_type == 'hess': + return y_norm2 / ys + else: + return ys / y_norm2 + + def _update_implementation(self, delta_x, delta_grad): + raise NotImplementedError("The method ``_update_implementation``" + " is not implemented.") + + def update(self, delta_x, delta_grad): + """Update internal matrix. + + Update Hessian matrix or its inverse (depending on how 'approx_type' + is defined) using information about the last evaluated points. + + Parameters + ---------- + delta_x : ndarray + The difference between two points the gradient + function have been evaluated at: ``delta_x = x2 - x1``. + delta_grad : ndarray + The difference between the gradients: + ``delta_grad = grad(x2) - grad(x1)``. + """ + if np.all(delta_x == 0.0): + return + if np.all(delta_grad == 0.0): + warn('delta_grad == 0.0. Check if the approximated ' + 'function is linear. If the function is linear ' + 'better results can be obtained by defining the ' + 'Hessian as zero instead of using quasi-Newton ' + 'approximations.', + UserWarning, stacklevel=2) + return + if self.first_iteration: + # Get user specific scale + if isinstance(self.init_scale, str) and self.init_scale == "auto": + scale = self._auto_scale(delta_x, delta_grad) + else: + scale = self.init_scale + + # Check for complex: numpy will silently cast a complex array to + # a real one but not so for scalar as it raises a TypeError. + # Checking here brings a consistent behavior. + replace = False + if np.size(scale) == 1: + # to account for the legacy behavior having the exact same cast + scale = float(scale) + elif np.iscomplexobj(scale): + raise TypeError("init_scale contains complex elements, " + "must be real.") + else: # test explicitly for allowed shapes and values + replace = True + if self.approx_type == 'hess': + shape = np.shape(self.B) + dtype = self.B.dtype + else: + shape = np.shape(self.H) + dtype = self.H.dtype + # copy, will replace the original + scale = np.array(scale, dtype=dtype, copy=True) + + # it has to match the shape of the matrix for the multiplication, + # no implicit broadcasting is allowed + if shape != (init_shape := np.shape(scale)): + raise ValueError("If init_scale is an array, it must have the " + f"dimensions of the hess/inv_hess: {shape}." + f" Got {init_shape}.") + if not issymmetric(scale): + raise ValueError("If init_scale is an array, it must be" + " symmetric (passing scipy.linalg.issymmetric)" + " to be an approximation of a hess/inv_hess.") + + # Scale initial matrix with ``scale * np.eye(n)`` or replace + # This is not ideal, we could assign the scale directly in + # initialize, but we would need to + if self.approx_type == 'hess': + if replace: + self.B = scale + else: + self.B *= scale + else: + if replace: + self.H = scale + else: + self.H *= scale + self.first_iteration = False + self._update_implementation(delta_x, delta_grad) + + def dot(self, p): + """Compute the product of the internal matrix with the given vector. + + Parameters + ---------- + p : array_like + 1-D array representing a vector. + + Returns + ------- + Hp : array + 1-D represents the result of multiplying the approximation matrix + by vector p. + """ + if self.approx_type == 'hess': + return self._symv(1, self.B, p) + else: + return self._symv(1, self.H, p) + + def get_matrix(self): + """Return the current internal matrix. + + Returns + ------- + M : ndarray, shape (n, n) + Dense matrix containing either the Hessian or its inverse + (depending on how `approx_type` was defined). + """ + if self.approx_type == 'hess': + M = np.copy(self.B) + else: + M = np.copy(self.H) + li = np.tril_indices_from(M, k=-1) + M[li] = M.T[li] + return M + + +class BFGS(FullHessianUpdateStrategy): + """Broyden-Fletcher-Goldfarb-Shanno (BFGS) Hessian update strategy. + + Parameters + ---------- + exception_strategy : {'skip_update', 'damp_update'}, optional + Define how to proceed when the curvature condition is violated. + Set it to 'skip_update' to just skip the update. Or, alternatively, + set it to 'damp_update' to interpolate between the actual BFGS + result and the unmodified matrix. Both exceptions strategies + are explained in [1]_, p.536-537. + min_curvature : float + This number, scaled by a normalization factor, defines the + minimum curvature ``dot(delta_grad, delta_x)`` allowed to go + unaffected by the exception strategy. By default is equal to + 1e-8 when ``exception_strategy = 'skip_update'`` and equal + to 0.2 when ``exception_strategy = 'damp_update'``. + init_scale : {float, np.array, 'auto'} + This parameter can be used to initialize the Hessian or its + inverse. When a float is given, the relevant array is initialized + to ``np.eye(n) * init_scale``, where ``n`` is the problem dimension. + Alternatively, if a precisely ``(n, n)`` shaped, symmetric array is given, + this array will be used. Otherwise an error is generated. + Set it to 'auto' in order to use an automatic heuristic for choosing + the initial scale. The heuristic is described in [1]_, p.143. + The default is 'auto'. + + Notes + ----- + The update is based on the description in [1]_, p.140. + + References + ---------- + .. [1] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). + """ + + def __init__(self, exception_strategy='skip_update', min_curvature=None, + init_scale='auto'): + if exception_strategy == 'skip_update': + if min_curvature is not None: + self.min_curvature = min_curvature + else: + self.min_curvature = 1e-8 + elif exception_strategy == 'damp_update': + if min_curvature is not None: + self.min_curvature = min_curvature + else: + self.min_curvature = 0.2 + else: + raise ValueError("`exception_strategy` must be 'skip_update' " + "or 'damp_update'.") + + super().__init__(init_scale) + self.exception_strategy = exception_strategy + + def _update_inverse_hessian(self, ys, Hy, yHy, s): + """Update the inverse Hessian matrix. + + BFGS update using the formula: + + ``H <- H + ((H*y).T*y + s.T*y)/(s.T*y)^2 * (s*s.T) + - 1/(s.T*y) * ((H*y)*s.T + s*(H*y).T)`` + + where ``s = delta_x`` and ``y = delta_grad``. This formula is + equivalent to (6.17) in [1]_ written in a more efficient way + for implementation. + + References + ---------- + .. [1] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). + """ + self.H = self._syr2(-1.0 / ys, s, Hy, a=self.H) + self.H = self._syr((ys + yHy) / ys ** 2, s, a=self.H) + + def _update_hessian(self, ys, Bs, sBs, y): + """Update the Hessian matrix. + + BFGS update using the formula: + + ``B <- B - (B*s)*(B*s).T/s.T*(B*s) + y*y^T/s.T*y`` + + where ``s`` is short for ``delta_x`` and ``y`` is short + for ``delta_grad``. Formula (6.19) in [1]_. + + References + ---------- + .. [1] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). + """ + self.B = self._syr(1.0 / ys, y, a=self.B) + self.B = self._syr(-1.0 / sBs, Bs, a=self.B) + + def _update_implementation(self, delta_x, delta_grad): + # Auxiliary variables w and z + if self.approx_type == 'hess': + w = delta_x + z = delta_grad + else: + w = delta_grad + z = delta_x + # Do some common operations + wz = np.dot(w, z) + Mw = self.dot(w) + wMw = Mw.dot(w) + # Guarantee that wMw > 0 by reinitializing matrix. + # While this is always true in exact arithmetic, + # indefinite matrix may appear due to roundoff errors. + if wMw <= 0.0: + scale = self._auto_scale(delta_x, delta_grad) + # Reinitialize matrix + if self.approx_type == 'hess': + self.B = scale * np.eye(self.n, dtype=float) + else: + self.H = scale * np.eye(self.n, dtype=float) + # Do common operations for new matrix + Mw = self.dot(w) + wMw = Mw.dot(w) + # Check if curvature condition is violated + if wz <= self.min_curvature * wMw: + # If the option 'skip_update' is set + # we just skip the update when the condition + # is violated. + if self.exception_strategy == 'skip_update': + return + # If the option 'damp_update' is set we + # interpolate between the actual BFGS + # result and the unmodified matrix. + elif self.exception_strategy == 'damp_update': + update_factor = (1-self.min_curvature) / (1 - wz/wMw) + z = update_factor*z + (1-update_factor)*Mw + wz = np.dot(w, z) + # Update matrix + if self.approx_type == 'hess': + self._update_hessian(wz, Mw, wMw, z) + else: + self._update_inverse_hessian(wz, Mw, wMw, z) + + +class SR1(FullHessianUpdateStrategy): + """Symmetric-rank-1 Hessian update strategy. + + Parameters + ---------- + min_denominator : float + This number, scaled by a normalization factor, + defines the minimum denominator magnitude allowed + in the update. When the condition is violated we skip + the update. By default uses ``1e-8``. + init_scale : {float, np.array, 'auto'}, optional + This parameter can be used to initialize the Hessian or its + inverse. When a float is given, the relevant array is initialized + to ``np.eye(n) * init_scale``, where ``n`` is the problem dimension. + Alternatively, if a precisely ``(n, n)`` shaped, symmetric array is given, + this array will be used. Otherwise an error is generated. + Set it to 'auto' in order to use an automatic heuristic for choosing + the initial scale. The heuristic is described in [1]_, p.143. + The default is 'auto'. + + Notes + ----- + The update is based on the description in [1]_, p.144-146. + + References + ---------- + .. [1] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). + """ + + def __init__(self, min_denominator=1e-8, init_scale='auto'): + self.min_denominator = min_denominator + super().__init__(init_scale) + + def _update_implementation(self, delta_x, delta_grad): + # Auxiliary variables w and z + if self.approx_type == 'hess': + w = delta_x + z = delta_grad + else: + w = delta_grad + z = delta_x + # Do some common operations + Mw = self.dot(w) + z_minus_Mw = z - Mw + denominator = np.dot(w, z_minus_Mw) + # If the denominator is too small + # we just skip the update. + if np.abs(denominator) <= self.min_denominator*norm(w)*norm(z_minus_Mw): + return + # Update matrix + if self.approx_type == 'hess': + self.B = self._syr(1/denominator, z_minus_Mw, a=self.B) + else: + self.H = self._syr(1/denominator, z_minus_Mw, a=self.H) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_lbfgsb.cpython-310-x86_64-linux-gnu.so b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_lbfgsb.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f056a3ab27626c4b67905c7977bc3f2cce57ca56 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_lbfgsb.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0e0cc53dba47fe455ac20e0c5588de5dcd553f4c8df5bc5b11a81d84339d015 +size 524785 diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog.py new file mode 100644 index 0000000000000000000000000000000000000000..1812182171961f16f69fe85f40d07bf0ae790e03 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog.py @@ -0,0 +1,716 @@ +""" +A top-level linear programming interface. + +.. versionadded:: 0.15.0 + +Functions +--------- +.. autosummary:: + :toctree: generated/ + + linprog + linprog_verbose_callback + linprog_terse_callback + +""" + +import numpy as np + +from ._optimize import OptimizeResult, OptimizeWarning +from warnings import warn +from ._linprog_highs import _linprog_highs +from ._linprog_ip import _linprog_ip +from ._linprog_simplex import _linprog_simplex +from ._linprog_rs import _linprog_rs +from ._linprog_doc import (_linprog_highs_doc, _linprog_ip_doc, # noqa: F401 + _linprog_rs_doc, _linprog_simplex_doc, + _linprog_highs_ipm_doc, _linprog_highs_ds_doc) +from ._linprog_util import ( + _parse_linprog, _presolve, _get_Abc, _LPProblem, _autoscale, + _postsolve, _check_result, _display_summary) +from copy import deepcopy + +__all__ = ['linprog', 'linprog_verbose_callback', 'linprog_terse_callback'] + +__docformat__ = "restructuredtext en" + +LINPROG_METHODS = [ + 'simplex', 'revised simplex', 'interior-point', 'highs', 'highs-ds', 'highs-ipm' +] + + +def linprog_verbose_callback(res): + """ + A sample callback function demonstrating the linprog callback interface. + This callback produces detailed output to sys.stdout before each iteration + and after the final iteration of the simplex algorithm. + + Parameters + ---------- + res : A `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + The independent variable vector which optimizes the linear + programming problem. + fun : float + Value of the objective function. + success : bool + True if the algorithm succeeded in finding an optimal solution. + slack : 1-D array + The values of the slack variables. Each slack variable corresponds + to an inequality constraint. If the slack is zero, then the + corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, that is, + ``b - A_eq @ x`` + phase : int + The phase of the optimization being executed. In phase 1 a basic + feasible solution is sought and the T has an additional row + representing an alternate objective function. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + """ + x = res['x'] + fun = res['fun'] + phase = res['phase'] + status = res['status'] + nit = res['nit'] + message = res['message'] + complete = res['complete'] + + saved_printoptions = np.get_printoptions() + np.set_printoptions(linewidth=500, + formatter={'float': lambda x: f"{x: 12.4f}"}) + if status: + print('--------- Simplex Early Exit -------\n') + print(f'The simplex method exited early with status {status:d}') + print(message) + elif complete: + print('--------- Simplex Complete --------\n') + print(f'Iterations required: {nit}') + else: + print(f'--------- Iteration {nit:d} ---------\n') + + if nit > 0: + if phase == 1: + print('Current Pseudo-Objective Value:') + else: + print('Current Objective Value:') + print('f = ', fun) + print() + print('Current Solution Vector:') + print('x = ', x) + print() + + np.set_printoptions(**saved_printoptions) + + +def linprog_terse_callback(res): + """ + A sample callback function demonstrating the linprog callback interface. + This callback produces brief output to sys.stdout before each iteration + and after the final iteration of the simplex algorithm. + + Parameters + ---------- + res : A `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + The independent variable vector which optimizes the linear + programming problem. + fun : float + Value of the objective function. + success : bool + True if the algorithm succeeded in finding an optimal solution. + slack : 1-D array + The values of the slack variables. Each slack variable corresponds + to an inequality constraint. If the slack is zero, then the + corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, that is, + ``b - A_eq @ x``. + phase : int + The phase of the optimization being executed. In phase 1 a basic + feasible solution is sought and the T has an additional row + representing an alternate objective function. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + """ + nit = res['nit'] + x = res['x'] + + if nit == 0: + print("Iter: X:") + print(f"{nit: <5d} ", end="") + print(x) + + +def linprog(c, A_ub=None, b_ub=None, A_eq=None, b_eq=None, + bounds=(0, None), method='highs', callback=None, + options=None, x0=None, integrality=None): + r""" + Linear programming: minimize a linear objective function subject to linear + equality and inequality constraints. + + Linear programming solves problems of the following form: + + .. math:: + + \min_x \ & c^T x \\ + \mbox{such that} \ & A_{ub} x \leq b_{ub},\\ + & A_{eq} x = b_{eq},\\ + & l \leq x \leq u , + + where :math:`x` is a vector of decision variables; :math:`c`, + :math:`b_{ub}`, :math:`b_{eq}`, :math:`l`, and :math:`u` are vectors; and + :math:`A_{ub}` and :math:`A_{eq}` are matrices. + + Alternatively, that's: + + - minimize :: + + c @ x + + - such that :: + + A_ub @ x <= b_ub + A_eq @ x == b_eq + lb <= x <= ub + + Note that by default ``lb = 0`` and ``ub = None``. Other bounds can be + specified with ``bounds``. + + Parameters + ---------- + c : 1-D array + The coefficients of the linear objective function to be minimized. + A_ub : 2-D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1-D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2-D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1-D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : sequence, optional + A sequence of ``(min, max)`` pairs for each element in ``x``, defining + the minimum and maximum values of that decision variable. + If a single tuple ``(min, max)`` is provided, then ``min`` and ``max`` + will serve as bounds for all decision variables. + Use ``None`` to indicate that there is no bound. For instance, the + default bound ``(0, None)`` means that all decision variables are + non-negative, and the pair ``(None, None)`` means no bounds at all, + i.e. all variables are allowed to be any real. + method : str, optional + The algorithm used to solve the standard form problem. + :ref:`'highs' ` (default), + :ref:`'highs-ds' `, + :ref:`'highs-ipm' `, + :ref:`'interior-point' ` (legacy), + :ref:`'revised simplex' ` (legacy), + and + :ref:`'simplex' ` (legacy) are supported. + The legacy methods are deprecated and will be removed in SciPy 1.11.0. + callback : callable, optional + If a callback function is provided, it will be called at least once per + iteration of the algorithm. The callback function must accept a single + `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + The current solution vector. + fun : float + The current value of the objective function ``c @ x``. + success : bool + ``True`` when the algorithm has completed successfully. + slack : 1-D array + The (nominally positive) values of the slack, + ``b_ub - A_ub @ x``. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + ``b_eq - A_eq @ x``. + phase : int + The phase of the algorithm being executed. + status : int + An integer representing the status of the algorithm. + + ``0`` : Optimization proceeding nominally. + + ``1`` : Iteration limit reached. + + ``2`` : Problem appears to be infeasible. + + ``3`` : Problem appears to be unbounded. + + ``4`` : Numerical difficulties encountered. + + nit : int + The current iteration number. + message : str + A string descriptor of the algorithm status. + + Callback functions are not currently supported by the HiGHS methods. + + options : dict, optional + A dictionary of solver options. All methods accept the following + options: + + maxiter : int + Maximum number of iterations to perform. + Default: see method-specific documentation. + disp : bool + Set to ``True`` to print convergence messages. + Default: ``False``. + presolve : bool + Set to ``False`` to disable automatic presolve. + Default: ``True``. + + All methods except the HiGHS solvers also accept: + + tol : float + A tolerance which determines when a residual is "close enough" to + zero to be considered exactly zero. + autoscale : bool + Set to ``True`` to automatically perform equilibration. + Consider using this option if the numerical values in the + constraints are separated by several orders of magnitude. + Default: ``False``. + rr : bool + Set to ``False`` to disable automatic redundancy removal. + Default: ``True``. + rr_method : string + Method used to identify and remove redundant rows from the + equality constraint matrix after presolve. For problems with + dense input, the available methods for redundancy removal are: + + "SVD": + Repeatedly performs singular value decomposition on + the matrix, detecting redundant rows based on nonzeros + in the left singular vectors that correspond with + zero singular values. May be fast when the matrix is + nearly full rank. + "pivot": + Uses the algorithm presented in [5]_ to identify + redundant rows. + "ID": + Uses a randomized interpolative decomposition. + Identifies columns of the matrix transpose not used in + a full-rank interpolative decomposition of the matrix. + None: + Uses "svd" if the matrix is nearly full rank, that is, + the difference between the matrix rank and the number + of rows is less than five. If not, uses "pivot". The + behavior of this default is subject to change without + prior notice. + + Default: None. + For problems with sparse input, this option is ignored, and the + pivot-based algorithm presented in [5]_ is used. + + For method-specific options, see + :func:`show_options('linprog') `. + + x0 : 1-D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + integrality : 1-D array or int, optional + Indicates the type of integrality constraint on each decision variable. + + ``0`` : Continuous variable; no integrality constraint. + + ``1`` : Integer variable; decision variable must be an integer + within `bounds`. + + ``2`` : Semi-continuous variable; decision variable must be within + `bounds` or take value ``0``. + + ``3`` : Semi-integer variable; decision variable must be an integer + within `bounds` or take value ``0``. + + By default, all variables are continuous. + + For mixed integrality constraints, supply an array of shape `c.shape`. + To infer a constraint on each decision variable from shorter inputs, + the argument will be broadcasted to `c.shape` using `np.broadcast_to`. + + This argument is currently used only by the ``'highs'`` method and + ignored otherwise. + + Returns + ------- + res : OptimizeResult + A :class:`scipy.optimize.OptimizeResult` consisting of the fields + below. Note that the return types of the fields may depend on whether + the optimization was successful, therefore it is recommended to check + `OptimizeResult.status` before relying on the other fields: + + x : 1-D array + The values of the decision variables that minimizes the + objective function while satisfying the constraints. + fun : float + The optimal value of the objective function ``c @ x``. + slack : 1-D array + The (nominally positive) values of the slack variables, + ``b_ub - A_ub @ x``. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + ``b_eq - A_eq @ x``. + success : bool + ``True`` when the algorithm succeeds in finding an optimal + solution. + status : int + An integer representing the exit status of the algorithm. + + ``0`` : Optimization terminated successfully. + + ``1`` : Iteration limit reached. + + ``2`` : Problem appears to be infeasible. + + ``3`` : Problem appears to be unbounded. + + ``4`` : Numerical difficulties encountered. + + nit : int + The total number of iterations performed in all phases. + message : str + A string descriptor of the exit status of the algorithm. + + See Also + -------- + show_options : Additional options accepted by the solvers. + + Notes + ----- + This section describes the available solvers that can be selected by the + 'method' parameter. + + `'highs-ds'` and + `'highs-ipm'` are interfaces to the + HiGHS simplex and interior-point method solvers [13]_, respectively. + `'highs'` (default) chooses between + the two automatically. These are the fastest linear + programming solvers in SciPy, especially for large, sparse problems; + which of these two is faster is problem-dependent. + The other solvers (`'interior-point'`, `'revised simplex'`, and + `'simplex'`) are legacy methods and will be removed in SciPy 1.11.0. + + Method *highs-ds* is a wrapper of the C++ high performance dual + revised simplex implementation (HSOL) [13]_, [14]_. Method *highs-ipm* + is a wrapper of a C++ implementation of an **i**\ nterior-\ **p**\ oint + **m**\ ethod [13]_; it features a crossover routine, so it is as accurate + as a simplex solver. Method *highs* chooses between the two automatically. + For new code involving `linprog`, we recommend explicitly choosing one of + these three method values. + + .. versionadded:: 1.6.0 + + Method *interior-point* uses the primal-dual path following algorithm + as outlined in [4]_. This algorithm supports sparse constraint matrices and + is typically faster than the simplex methods, especially for large, sparse + problems. Note, however, that the solution returned may be slightly less + accurate than those of the simplex methods and will not, in general, + correspond with a vertex of the polytope defined by the constraints. + + .. versionadded:: 1.0.0 + + Method *revised simplex* uses the revised simplex method as described in + [9]_, except that a factorization [11]_ of the basis matrix, rather than + its inverse, is efficiently maintained and used to solve the linear systems + at each iteration of the algorithm. + + .. versionadded:: 1.3.0 + + Method *simplex* uses a traditional, full-tableau implementation of + Dantzig's simplex algorithm [1]_, [2]_ (*not* the + Nelder-Mead simplex). This algorithm is included for backwards + compatibility and educational purposes. + + .. versionadded:: 0.15.0 + + Before applying *interior-point*, *revised simplex*, or *simplex*, + a presolve procedure based on [8]_ attempts + to identify trivial infeasibilities, trivial unboundedness, and potential + problem simplifications. Specifically, it checks for: + + - rows of zeros in ``A_eq`` or ``A_ub``, representing trivial constraints; + - columns of zeros in ``A_eq`` `and` ``A_ub``, representing unconstrained + variables; + - column singletons in ``A_eq``, representing fixed variables; and + - column singletons in ``A_ub``, representing simple bounds. + + If presolve reveals that the problem is unbounded (e.g. an unconstrained + and unbounded variable has negative cost) or infeasible (e.g., a row of + zeros in ``A_eq`` corresponds with a nonzero in ``b_eq``), the solver + terminates with the appropriate status code. Note that presolve terminates + as soon as any sign of unboundedness is detected; consequently, a problem + may be reported as unbounded when in reality the problem is infeasible + (but infeasibility has not been detected yet). Therefore, if it is + important to know whether the problem is actually infeasible, solve the + problem again with option ``presolve=False``. + + If neither infeasibility nor unboundedness are detected in a single pass + of the presolve, bounds are tightened where possible and fixed + variables are removed from the problem. Then, linearly dependent rows + of the ``A_eq`` matrix are removed, (unless they represent an + infeasibility) to avoid numerical difficulties in the primary solve + routine. Note that rows that are nearly linearly dependent (within a + prescribed tolerance) may also be removed, which can change the optimal + solution in rare cases. If this is a concern, eliminate redundancy from + your problem formulation and run with option ``rr=False`` or + ``presolve=False``. + + Several potential improvements can be made here: additional presolve + checks outlined in [8]_ should be implemented, the presolve routine should + be run multiple times (until no further simplifications can be made), and + more of the efficiency improvements from [5]_ should be implemented in the + redundancy removal routines. + + After presolve, the problem is transformed to standard form by converting + the (tightened) simple bounds to upper bound constraints, introducing + non-negative slack variables for inequality constraints, and expressing + unbounded variables as the difference between two non-negative variables. + Optionally, the problem is automatically scaled via equilibration [12]_. + The selected algorithm solves the standard form problem, and a + postprocessing routine converts the result to a solution to the original + problem. + + References + ---------- + .. [1] Dantzig, George B., Linear programming and extensions. Rand + Corporation Research Study Princeton Univ. Press, Princeton, NJ, + 1963 + .. [2] Hillier, S.H. and Lieberman, G.J. (1995), "Introduction to + Mathematical Programming", McGraw-Hill, Chapter 4. + .. [3] Bland, Robert G. New finite pivoting rules for the simplex method. + Mathematics of Operations Research (2), 1977: pp. 103-107. + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + .. [5] Andersen, Erling D. "Finding all linearly dependent rows in + large-scale linear programming." Optimization Methods and Software + 6.3 (1995): 219-227. + .. [6] Freund, Robert M. "Primal-Dual Interior-Point Methods for Linear + Programming based on Newton's Method." Unpublished Course Notes, + March 2004. Available 2/25/2017 at + https://ocw.mit.edu/courses/sloan-school-of-management/15-084j-nonlinear-programming-spring-2004/lecture-notes/lec14_int_pt_mthd.pdf + .. [7] Fourer, Robert. "Solving Linear Programs by Interior-Point Methods." + Unpublished Course Notes, August 26, 2005. Available 2/25/2017 at + http://www.4er.org/CourseNotes/Book%20B/B-III.pdf + .. [8] Andersen, Erling D., and Knud D. Andersen. "Presolving in linear + programming." Mathematical Programming 71.2 (1995): 221-245. + .. [9] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear + programming." Athena Scientific 1 (1997): 997. + .. [10] Andersen, Erling D., et al. Implementation of interior point + methods for large scale linear programming. HEC/Universite de + Geneve, 1996. + .. [11] Bartels, Richard H. "A stabilization of the simplex method." + Journal in Numerische Mathematik 16.5 (1971): 414-434. + .. [12] Tomlin, J. A. "On scaling linear programming problems." + Mathematical Programming Study 4 (1975): 146-166. + .. [13] Huangfu, Q., Galabova, I., Feldmeier, M., and Hall, J. A. J. + "HiGHS - high performance software for linear optimization." + https://highs.dev/ + .. [14] Huangfu, Q. and Hall, J. A. J. "Parallelizing the dual revised + simplex method." Mathematical Programming Computation, 10 (1), + 119-142, 2018. DOI: 10.1007/s12532-017-0130-5 + + Examples + -------- + Consider the following problem: + + .. math:: + + \min_{x_0, x_1} \ -x_0 + 4x_1 & \\ + \mbox{such that} \ -3x_0 + x_1 & \leq 6,\\ + -x_0 - 2x_1 & \geq -4,\\ + x_1 & \geq -3. + + The problem is not presented in the form accepted by `linprog`. This is + easily remedied by converting the "greater than" inequality + constraint to a "less than" inequality constraint by + multiplying both sides by a factor of :math:`-1`. Note also that the last + constraint is really the simple bound :math:`-3 \leq x_1 \leq \infty`. + Finally, since there are no bounds on :math:`x_0`, we must explicitly + specify the bounds :math:`-\infty \leq x_0 \leq \infty`, as the + default is for variables to be non-negative. After collecting coeffecients + into arrays and tuples, the input for this problem is: + + >>> from scipy.optimize import linprog + >>> c = [-1, 4] + >>> A = [[-3, 1], [1, 2]] + >>> b = [6, 4] + >>> x0_bounds = (None, None) + >>> x1_bounds = (-3, None) + >>> res = linprog(c, A_ub=A, b_ub=b, bounds=[x0_bounds, x1_bounds]) + >>> res.fun + -22.0 + >>> res.x + array([10., -3.]) + >>> res.message + 'Optimization terminated successfully. (HiGHS Status 7: Optimal)' + + The marginals (AKA dual values / shadow prices / Lagrange multipliers) + and residuals (slacks) are also available. + + >>> res.ineqlin + residual: [ 3.900e+01 0.000e+00] + marginals: [-0.000e+00 -1.000e+00] + + For example, because the marginal associated with the second inequality + constraint is -1, we expect the optimal value of the objective function + to decrease by ``eps`` if we add a small amount ``eps`` to the right hand + side of the second inequality constraint: + + >>> eps = 0.05 + >>> b[1] += eps + >>> linprog(c, A_ub=A, b_ub=b, bounds=[x0_bounds, x1_bounds]).fun + -22.05 + + Also, because the residual on the first inequality constraint is 39, we + can decrease the right hand side of the first constraint by 39 without + affecting the optimal solution. + + >>> b = [6, 4] # reset to original values + >>> b[0] -= 39 + >>> linprog(c, A_ub=A, b_ub=b, bounds=[x0_bounds, x1_bounds]).fun + -22.0 + + """ + + meth = method.lower() + methods = {"highs", "highs-ds", "highs-ipm", + "simplex", "revised simplex", "interior-point"} + + if meth not in methods: + raise ValueError(f"Unknown solver '{method}'") + + if x0 is not None and meth != "revised simplex": + warning_message = "x0 is used only when method is 'revised simplex'. " + warn(warning_message, OptimizeWarning, stacklevel=2) + + if np.any(integrality) and not meth == "highs": + integrality = None + warning_message = ("Only `method='highs'` supports integer " + "constraints. Ignoring `integrality`.") + warn(warning_message, OptimizeWarning, stacklevel=2) + elif np.any(integrality): + integrality = np.broadcast_to(integrality, np.shape(c)) + else: + integrality = None + + lp = _LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality) + lp, solver_options = _parse_linprog(lp, options, meth) + tol = solver_options.get('tol', 1e-9) + + # Give unmodified problem to HiGHS + if meth.startswith('highs'): + if callback is not None: + raise NotImplementedError("HiGHS solvers do not support the " + "callback interface.") + highs_solvers = {'highs-ipm': 'ipm', 'highs-ds': 'simplex', + 'highs': None} + + sol = _linprog_highs(lp, solver=highs_solvers[meth], + **solver_options) + sol['status'], sol['message'] = ( + _check_result(sol['x'], sol['fun'], sol['status'], sol['slack'], + sol['con'], lp.bounds, tol, sol['message'], + integrality)) + sol['success'] = sol['status'] == 0 + return OptimizeResult(sol) + + warn(f"`method='{meth}'` is deprecated and will be removed in SciPy " + "1.11.0. Please use one of the HiGHS solvers (e.g. " + "`method='highs'`) in new code.", DeprecationWarning, stacklevel=2) + + iteration = 0 + complete = False # will become True if solved in presolve + undo = [] + + # Keep the original arrays to calculate slack/residuals for original + # problem. + lp_o = deepcopy(lp) + + # Solve trivial problem, eliminate variables, tighten bounds, etc. + rr_method = solver_options.pop('rr_method', None) # need to pop these; + rr = solver_options.pop('rr', True) # they're not passed to methods + c0 = 0 # we might get a constant term in the objective + if solver_options.pop('presolve', True): + (lp, c0, x, undo, complete, status, message) = _presolve(lp, rr, + rr_method, + tol) + + C, b_scale = 1, 1 # for trivial unscaling if autoscale is not used + postsolve_args = (lp_o._replace(bounds=lp.bounds), undo, C, b_scale) + + if not complete: + A, b, c, c0, x0 = _get_Abc(lp, c0) + if solver_options.pop('autoscale', False): + A, b, c, x0, C, b_scale = _autoscale(A, b, c, x0) + postsolve_args = postsolve_args[:-2] + (C, b_scale) + + if meth == 'simplex': + x, status, message, iteration = _linprog_simplex( + c, c0=c0, A=A, b=b, callback=callback, + postsolve_args=postsolve_args, **solver_options) + elif meth == 'interior-point': + x, status, message, iteration = _linprog_ip( + c, c0=c0, A=A, b=b, callback=callback, + postsolve_args=postsolve_args, **solver_options) + elif meth == 'revised simplex': + x, status, message, iteration = _linprog_rs( + c, c0=c0, A=A, b=b, x0=x0, callback=callback, + postsolve_args=postsolve_args, **solver_options) + + # Eliminate artificial variables, re-introduce presolved variables, etc. + disp = solver_options.get('disp', False) + + x, fun, slack, con = _postsolve(x, postsolve_args, complete) + + status, message = _check_result(x, fun, status, slack, con, lp_o.bounds, + tol, message, integrality) + + if disp: + _display_summary(message, status, fun, iteration) + + sol = { + 'x': x, + 'fun': fun, + 'slack': slack, + 'con': con, + 'status': status, + 'message': message, + 'nit': iteration, + 'success': status == 0} + + return OptimizeResult(sol) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_highs.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_highs.py new file mode 100644 index 0000000000000000000000000000000000000000..eb07443bb255471e6e0ac487bd6749253bf5d133 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_highs.py @@ -0,0 +1,440 @@ +"""HiGHS Linear Optimization Methods + +Interface to HiGHS linear optimization software. +https://highs.dev/ + +.. versionadded:: 1.5.0 + +References +---------- +.. [1] Q. Huangfu and J.A.J. Hall. "Parallelizing the dual revised simplex + method." Mathematical Programming Computation, 10 (1), 119-142, + 2018. DOI: 10.1007/s12532-017-0130-5 + +""" + +import inspect +import numpy as np +from ._optimize import OptimizeWarning, OptimizeResult +from warnings import warn +from ._highs._highs_wrapper import _highs_wrapper +from ._highs._highs_constants import ( + CONST_INF, + MESSAGE_LEVEL_NONE, + HIGHS_OBJECTIVE_SENSE_MINIMIZE, + + MODEL_STATUS_NOTSET, + MODEL_STATUS_LOAD_ERROR, + MODEL_STATUS_MODEL_ERROR, + MODEL_STATUS_PRESOLVE_ERROR, + MODEL_STATUS_SOLVE_ERROR, + MODEL_STATUS_POSTSOLVE_ERROR, + MODEL_STATUS_MODEL_EMPTY, + MODEL_STATUS_OPTIMAL, + MODEL_STATUS_INFEASIBLE, + MODEL_STATUS_UNBOUNDED_OR_INFEASIBLE, + MODEL_STATUS_UNBOUNDED, + MODEL_STATUS_REACHED_DUAL_OBJECTIVE_VALUE_UPPER_BOUND + as MODEL_STATUS_RDOVUB, + MODEL_STATUS_REACHED_OBJECTIVE_TARGET, + MODEL_STATUS_REACHED_TIME_LIMIT, + MODEL_STATUS_REACHED_ITERATION_LIMIT, + + HIGHS_SIMPLEX_STRATEGY_DUAL, + + HIGHS_SIMPLEX_CRASH_STRATEGY_OFF, + + HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_CHOOSE, + HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_DANTZIG, + HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_DEVEX, + HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_STEEPEST_EDGE, +) +from scipy.sparse import csc_matrix, vstack, issparse + + +def _highs_to_scipy_status_message(highs_status, highs_message): + """Converts HiGHS status number/message to SciPy status number/message""" + + scipy_statuses_messages = { + None: (4, "HiGHS did not provide a status code. "), + MODEL_STATUS_NOTSET: (4, ""), + MODEL_STATUS_LOAD_ERROR: (4, ""), + MODEL_STATUS_MODEL_ERROR: (2, ""), + MODEL_STATUS_PRESOLVE_ERROR: (4, ""), + MODEL_STATUS_SOLVE_ERROR: (4, ""), + MODEL_STATUS_POSTSOLVE_ERROR: (4, ""), + MODEL_STATUS_MODEL_EMPTY: (4, ""), + MODEL_STATUS_RDOVUB: (4, ""), + MODEL_STATUS_REACHED_OBJECTIVE_TARGET: (4, ""), + MODEL_STATUS_OPTIMAL: (0, "Optimization terminated successfully. "), + MODEL_STATUS_REACHED_TIME_LIMIT: (1, "Time limit reached. "), + MODEL_STATUS_REACHED_ITERATION_LIMIT: (1, "Iteration limit reached. "), + MODEL_STATUS_INFEASIBLE: (2, "The problem is infeasible. "), + MODEL_STATUS_UNBOUNDED: (3, "The problem is unbounded. "), + MODEL_STATUS_UNBOUNDED_OR_INFEASIBLE: (4, "The problem is unbounded " + "or infeasible. ")} + unrecognized = (4, "The HiGHS status code was not recognized. ") + scipy_status, scipy_message = ( + scipy_statuses_messages.get(highs_status, unrecognized)) + scipy_message = (f"{scipy_message}" + f"(HiGHS Status {highs_status}: {highs_message})") + return scipy_status, scipy_message + + +def _replace_inf(x): + # Replace `np.inf` with CONST_INF + infs = np.isinf(x) + with np.errstate(invalid="ignore"): + x[infs] = np.sign(x[infs])*CONST_INF + return x + + +def _convert_to_highs_enum(option, option_str, choices): + # If option is in the choices we can look it up, if not use + # the default value taken from function signature and warn: + try: + return choices[option.lower()] + except AttributeError: + return choices[option] + except KeyError: + sig = inspect.signature(_linprog_highs) + default_str = sig.parameters[option_str].default + warn(f"Option {option_str} is {option}, but only values in " + f"{set(choices.keys())} are allowed. Using default: " + f"{default_str}.", + OptimizeWarning, stacklevel=3) + return choices[default_str] + + +def _linprog_highs(lp, solver, time_limit=None, presolve=True, + disp=False, maxiter=None, + dual_feasibility_tolerance=None, + primal_feasibility_tolerance=None, + ipm_optimality_tolerance=None, + simplex_dual_edge_weight_strategy=None, + mip_rel_gap=None, + mip_max_nodes=None, + **unknown_options): + r""" + Solve the following linear programming problem using one of the HiGHS + solvers: + + User-facing documentation is in _linprog_doc.py. + + Parameters + ---------- + lp : _LPProblem + A ``scipy.optimize._linprog_util._LPProblem`` ``namedtuple``. + solver : "ipm" or "simplex" or None + Which HiGHS solver to use. If ``None``, "simplex" will be used. + + Options + ------- + maxiter : int + The maximum number of iterations to perform in either phase. For + ``solver='ipm'``, this does not include the number of crossover + iterations. Default is the largest possible value for an ``int`` + on the platform. + disp : bool + Set to ``True`` if indicators of optimization status are to be printed + to the console each iteration; default ``False``. + time_limit : float + The maximum time in seconds allotted to solve the problem; default is + the largest possible value for a ``double`` on the platform. + presolve : bool + Presolve attempts to identify trivial infeasibilities, + identify trivial unboundedness, and simplify the problem before + sending it to the main solver. It is generally recommended + to keep the default setting ``True``; set to ``False`` if presolve is + to be disabled. + dual_feasibility_tolerance : double + Dual feasibility tolerance. Default is 1e-07. + The minimum of this and ``primal_feasibility_tolerance`` + is used for the feasibility tolerance when ``solver='ipm'``. + primal_feasibility_tolerance : double + Primal feasibility tolerance. Default is 1e-07. + The minimum of this and ``dual_feasibility_tolerance`` + is used for the feasibility tolerance when ``solver='ipm'``. + ipm_optimality_tolerance : double + Optimality tolerance for ``solver='ipm'``. Default is 1e-08. + Minimum possible value is 1e-12 and must be smaller than the largest + possible value for a ``double`` on the platform. + simplex_dual_edge_weight_strategy : str (default: None) + Strategy for simplex dual edge weights. The default, ``None``, + automatically selects one of the following. + + ``'dantzig'`` uses Dantzig's original strategy of choosing the most + negative reduced cost. + + ``'devex'`` uses the strategy described in [15]_. + + ``steepest`` uses the exact steepest edge strategy as described in + [16]_. + + ``'steepest-devex'`` begins with the exact steepest edge strategy + until the computation is too costly or inexact and then switches to + the devex method. + + Currently, using ``None`` always selects ``'steepest-devex'``, but this + may change as new options become available. + + mip_max_nodes : int + The maximum number of nodes allotted to solve the problem; default is + the largest possible value for a ``HighsInt`` on the platform. + Ignored if not using the MIP solver. + unknown_options : dict + Optional arguments not used by this particular solver. If + ``unknown_options`` is non-empty, a warning is issued listing all + unused options. + + Returns + ------- + sol : dict + A dictionary consisting of the fields: + + x : 1D array + The values of the decision variables that minimizes the + objective function while satisfying the constraints. + fun : float + The optimal value of the objective function ``c @ x``. + slack : 1D array + The (nominally positive) values of the slack, + ``b_ub - A_ub @ x``. + con : 1D array + The (nominally zero) residuals of the equality constraints, + ``b_eq - A_eq @ x``. + success : bool + ``True`` when the algorithm succeeds in finding an optimal + solution. + status : int + An integer representing the exit status of the algorithm. + + ``0`` : Optimization terminated successfully. + + ``1`` : Iteration or time limit reached. + + ``2`` : Problem appears to be infeasible. + + ``3`` : Problem appears to be unbounded. + + ``4`` : The HiGHS solver ran into a problem. + + message : str + A string descriptor of the exit status of the algorithm. + nit : int + The total number of iterations performed. + For ``solver='simplex'``, this includes iterations in all + phases. For ``solver='ipm'``, this does not include + crossover iterations. + crossover_nit : int + The number of primal/dual pushes performed during the + crossover routine for ``solver='ipm'``. This is ``0`` + for ``solver='simplex'``. + ineqlin : OptimizeResult + Solution and sensitivity information corresponding to the + inequality constraints, `b_ub`. A dictionary consisting of the + fields: + + residual : np.ndnarray + The (nominally positive) values of the slack variables, + ``b_ub - A_ub @ x``. This quantity is also commonly + referred to as "slack". + + marginals : np.ndarray + The sensitivity (partial derivative) of the objective + function with respect to the right-hand side of the + inequality constraints, `b_ub`. + + eqlin : OptimizeResult + Solution and sensitivity information corresponding to the + equality constraints, `b_eq`. A dictionary consisting of the + fields: + + residual : np.ndarray + The (nominally zero) residuals of the equality constraints, + ``b_eq - A_eq @ x``. + + marginals : np.ndarray + The sensitivity (partial derivative) of the objective + function with respect to the right-hand side of the + equality constraints, `b_eq`. + + lower, upper : OptimizeResult + Solution and sensitivity information corresponding to the + lower and upper bounds on decision variables, `bounds`. + + residual : np.ndarray + The (nominally positive) values of the quantity + ``x - lb`` (lower) or ``ub - x`` (upper). + + marginals : np.ndarray + The sensitivity (partial derivative) of the objective + function with respect to the lower and upper + `bounds`. + + mip_node_count : int + The number of subproblems or "nodes" solved by the MILP + solver. Only present when `integrality` is not `None`. + + mip_dual_bound : float + The MILP solver's final estimate of the lower bound on the + optimal solution. Only present when `integrality` is not + `None`. + + mip_gap : float + The difference between the final objective function value + and the final dual bound, scaled by the final objective + function value. Only present when `integrality` is not + `None`. + + Notes + ----- + The result fields `ineqlin`, `eqlin`, `lower`, and `upper` all contain + `marginals`, or partial derivatives of the objective function with respect + to the right-hand side of each constraint. These partial derivatives are + also referred to as "Lagrange multipliers", "dual values", and + "shadow prices". The sign convention of `marginals` is opposite that + of Lagrange multipliers produced by many nonlinear solvers. + + References + ---------- + .. [15] Harris, Paula MJ. "Pivot selection methods of the Devex LP code." + Mathematical programming 5.1 (1973): 1-28. + .. [16] Goldfarb, Donald, and John Ker Reid. "A practicable steepest-edge + simplex algorithm." Mathematical Programming 12.1 (1977): 361-371. + """ + if unknown_options: + message = (f"Unrecognized options detected: {unknown_options}. " + "These will be passed to HiGHS verbatim.") + warn(message, OptimizeWarning, stacklevel=3) + + # Map options to HiGHS enum values + simplex_dual_edge_weight_strategy_enum = _convert_to_highs_enum( + simplex_dual_edge_weight_strategy, + 'simplex_dual_edge_weight_strategy', + choices={'dantzig': HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_DANTZIG, + 'devex': HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_DEVEX, + 'steepest-devex': HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_CHOOSE, + 'steepest': + HIGHS_SIMPLEX_EDGE_WEIGHT_STRATEGY_STEEPEST_EDGE, + None: None}) + + c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality = lp + + lb, ub = bounds.T.copy() # separate bounds, copy->C-cntgs + # highs_wrapper solves LHS <= A*x <= RHS, not equality constraints + with np.errstate(invalid="ignore"): + lhs_ub = -np.ones_like(b_ub)*np.inf # LHS of UB constraints is -inf + rhs_ub = b_ub # RHS of UB constraints is b_ub + lhs_eq = b_eq # Equality constraint is inequality + rhs_eq = b_eq # constraint with LHS=RHS + lhs = np.concatenate((lhs_ub, lhs_eq)) + rhs = np.concatenate((rhs_ub, rhs_eq)) + + if issparse(A_ub) or issparse(A_eq): + A = vstack((A_ub, A_eq)) + else: + A = np.vstack((A_ub, A_eq)) + A = csc_matrix(A) + + options = { + 'presolve': presolve, + 'sense': HIGHS_OBJECTIVE_SENSE_MINIMIZE, + 'solver': solver, + 'time_limit': time_limit, + 'highs_debug_level': MESSAGE_LEVEL_NONE, + 'dual_feasibility_tolerance': dual_feasibility_tolerance, + 'ipm_optimality_tolerance': ipm_optimality_tolerance, + 'log_to_console': disp, + 'mip_max_nodes': mip_max_nodes, + 'output_flag': disp, + 'primal_feasibility_tolerance': primal_feasibility_tolerance, + 'simplex_dual_edge_weight_strategy': + simplex_dual_edge_weight_strategy_enum, + 'simplex_strategy': HIGHS_SIMPLEX_STRATEGY_DUAL, + 'simplex_crash_strategy': HIGHS_SIMPLEX_CRASH_STRATEGY_OFF, + 'ipm_iteration_limit': maxiter, + 'simplex_iteration_limit': maxiter, + 'mip_rel_gap': mip_rel_gap, + } + options.update(unknown_options) + + # np.inf doesn't work; use very large constant + rhs = _replace_inf(rhs) + lhs = _replace_inf(lhs) + lb = _replace_inf(lb) + ub = _replace_inf(ub) + + if integrality is None or np.sum(integrality) == 0: + integrality = np.empty(0) + else: + integrality = np.array(integrality) + + res = _highs_wrapper(c, A.indptr, A.indices, A.data, lhs, rhs, + lb, ub, integrality.astype(np.uint8), options) + + # HiGHS represents constraints as lhs/rhs, so + # Ax + s = b => Ax = b - s + # and we need to split up s by A_ub and A_eq + if 'slack' in res: + slack = res['slack'] + con = np.array(slack[len(b_ub):]) + slack = np.array(slack[:len(b_ub)]) + else: + slack, con = None, None + + # lagrange multipliers for equalities/inequalities and upper/lower bounds + if 'lambda' in res: + lamda = res['lambda'] + marg_ineqlin = np.array(lamda[:len(b_ub)]) + marg_eqlin = np.array(lamda[len(b_ub):]) + marg_upper = np.array(res['marg_bnds'][1, :]) + marg_lower = np.array(res['marg_bnds'][0, :]) + else: + marg_ineqlin, marg_eqlin = None, None + marg_upper, marg_lower = None, None + + # this needs to be updated if we start choosing the solver intelligently + + # Convert to scipy-style status and message + highs_status = res.get('status', None) + highs_message = res.get('message', None) + status, message = _highs_to_scipy_status_message(highs_status, + highs_message) + + x = np.array(res['x']) if 'x' in res else None + sol = {'x': x, + 'slack': slack, + 'con': con, + 'ineqlin': OptimizeResult({ + 'residual': slack, + 'marginals': marg_ineqlin, + }), + 'eqlin': OptimizeResult({ + 'residual': con, + 'marginals': marg_eqlin, + }), + 'lower': OptimizeResult({ + 'residual': None if x is None else x - lb, + 'marginals': marg_lower, + }), + 'upper': OptimizeResult({ + 'residual': None if x is None else ub - x, + 'marginals': marg_upper + }), + 'fun': res.get('fun'), + 'status': status, + 'success': res['status'] == MODEL_STATUS_OPTIMAL, + 'message': message, + 'nit': res.get('simplex_nit', 0) or res.get('ipm_nit', 0), + 'crossover_nit': res.get('crossover_nit'), + } + + if np.any(x) and integrality is not None: + sol.update({ + 'mip_node_count': res.get('mip_node_count', 0), + 'mip_dual_bound': res.get('mip_dual_bound', 0.0), + 'mip_gap': res.get('mip_gap', 0.0), + }) + + return sol diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_ip.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_ip.py new file mode 100644 index 0000000000000000000000000000000000000000..73bca3037f0e548f2420ba6be220446e94ddeb69 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_ip.py @@ -0,0 +1,1126 @@ +"""Interior-point method for linear programming + +The *interior-point* method uses the primal-dual path following algorithm +outlined in [1]_. This algorithm supports sparse constraint matrices and +is typically faster than the simplex methods, especially for large, sparse +problems. Note, however, that the solution returned may be slightly less +accurate than those of the simplex methods and will not, in general, +correspond with a vertex of the polytope defined by the constraints. + + .. versionadded:: 1.0.0 + +References +---------- +.. [1] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. +""" +# Author: Matt Haberland + +import numpy as np +import scipy as sp +import scipy.sparse as sps +from warnings import warn +from scipy.linalg import LinAlgError +from ._optimize import OptimizeWarning, OptimizeResult, _check_unknown_options +from ._linprog_util import _postsolve +has_umfpack = True +has_cholmod = True +try: + import sksparse # noqa: F401 + from sksparse.cholmod import cholesky as cholmod # noqa: F401 + from sksparse.cholmod import analyze as cholmod_analyze +except ImportError: + has_cholmod = False +try: + import scikits.umfpack # test whether to use factorized # noqa: F401 +except ImportError: + has_umfpack = False + + +def _get_solver(M, sparse=False, lstsq=False, sym_pos=True, + cholesky=True, permc_spec='MMD_AT_PLUS_A'): + """ + Given solver options, return a handle to the appropriate linear system + solver. + + Parameters + ---------- + M : 2-D array + As defined in [4] Equation 8.31 + sparse : bool (default = False) + True if the system to be solved is sparse. This is typically set + True when the original ``A_ub`` and ``A_eq`` arrays are sparse. + lstsq : bool (default = False) + True if the system is ill-conditioned and/or (nearly) singular and + thus a more robust least-squares solver is desired. This is sometimes + needed as the solution is approached. + sym_pos : bool (default = True) + True if the system matrix is symmetric positive definite + Sometimes this needs to be set false as the solution is approached, + even when the system should be symmetric positive definite, due to + numerical difficulties. + cholesky : bool (default = True) + True if the system is to be solved by Cholesky, rather than LU, + decomposition. This is typically faster unless the problem is very + small or prone to numerical difficulties. + permc_spec : str (default = 'MMD_AT_PLUS_A') + Sparsity preservation strategy used by SuperLU. Acceptable values are: + + - ``NATURAL``: natural ordering. + - ``MMD_ATA``: minimum degree ordering on the structure of A^T A. + - ``MMD_AT_PLUS_A``: minimum degree ordering on the structure of A^T+A. + - ``COLAMD``: approximate minimum degree column ordering. + + See SuperLU documentation. + + Returns + ------- + solve : function + Handle to the appropriate solver function + + """ + try: + if sparse: + if lstsq: + def solve(r, sym_pos=False): + return sps.linalg.lsqr(M, r)[0] + elif cholesky: + try: + # Will raise an exception in the first call, + # or when the matrix changes due to a new problem + _get_solver.cholmod_factor.cholesky_inplace(M) + except Exception: + _get_solver.cholmod_factor = cholmod_analyze(M) + _get_solver.cholmod_factor.cholesky_inplace(M) + solve = _get_solver.cholmod_factor + else: + if has_umfpack and sym_pos: + solve = sps.linalg.factorized(M) + else: # factorized doesn't pass permc_spec + solve = sps.linalg.splu(M, permc_spec=permc_spec).solve + + else: + if lstsq: # sometimes necessary as solution is approached + def solve(r): + return sp.linalg.lstsq(M, r)[0] + elif cholesky: + L = sp.linalg.cho_factor(M) + + def solve(r): + return sp.linalg.cho_solve(L, r) + else: + # this seems to cache the matrix factorization, so solving + # with multiple right hand sides is much faster + def solve(r, sym_pos=sym_pos): + if sym_pos: + return sp.linalg.solve(M, r, assume_a="pos") + else: + return sp.linalg.solve(M, r) + # There are many things that can go wrong here, and it's hard to say + # what all of them are. It doesn't really matter: if the matrix can't be + # factorized, return None. get_solver will be called again with different + # inputs, and a new routine will try to factorize the matrix. + except KeyboardInterrupt: + raise + except Exception: + return None + return solve + + +def _get_delta(A, b, c, x, y, z, tau, kappa, gamma, eta, sparse=False, + lstsq=False, sym_pos=True, cholesky=True, pc=True, ip=False, + permc_spec='MMD_AT_PLUS_A'): + """ + Given standard form problem defined by ``A``, ``b``, and ``c``; + current variable estimates ``x``, ``y``, ``z``, ``tau``, and ``kappa``; + algorithmic parameters ``gamma and ``eta; + and options ``sparse``, ``lstsq``, ``sym_pos``, ``cholesky``, ``pc`` + (predictor-corrector), and ``ip`` (initial point improvement), + get the search direction for increments to the variable estimates. + + Parameters + ---------- + As defined in [4], except: + sparse : bool + True if the system to be solved is sparse. This is typically set + True when the original ``A_ub`` and ``A_eq`` arrays are sparse. + lstsq : bool + True if the system is ill-conditioned and/or (nearly) singular and + thus a more robust least-squares solver is desired. This is sometimes + needed as the solution is approached. + sym_pos : bool + True if the system matrix is symmetric positive definite + Sometimes this needs to be set false as the solution is approached, + even when the system should be symmetric positive definite, due to + numerical difficulties. + cholesky : bool + True if the system is to be solved by Cholesky, rather than LU, + decomposition. This is typically faster unless the problem is very + small or prone to numerical difficulties. + pc : bool + True if the predictor-corrector method of Mehrota is to be used. This + is almost always (if not always) beneficial. Even though it requires + the solution of an additional linear system, the factorization + is typically (implicitly) reused so solution is efficient, and the + number of algorithm iterations is typically reduced. + ip : bool + True if the improved initial point suggestion due to [4] section 4.3 + is desired. It's unclear whether this is beneficial. + permc_spec : str (default = 'MMD_AT_PLUS_A') + (Has effect only with ``sparse = True``, ``lstsq = False``, ``sym_pos = + True``.) A matrix is factorized in each iteration of the algorithm. + This option specifies how to permute the columns of the matrix for + sparsity preservation. Acceptable values are: + + - ``NATURAL``: natural ordering. + - ``MMD_ATA``: minimum degree ordering on the structure of A^T A. + - ``MMD_AT_PLUS_A``: minimum degree ordering on the structure of A^T+A. + - ``COLAMD``: approximate minimum degree column ordering. + + This option can impact the convergence of the + interior point algorithm; test different values to determine which + performs best for your problem. For more information, refer to + ``scipy.sparse.linalg.splu``. + + Returns + ------- + Search directions as defined in [4] + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + if A.shape[0] == 0: + # If there are no constraints, some solvers fail (understandably) + # rather than returning empty solution. This gets the job done. + sparse, lstsq, sym_pos, cholesky = False, False, True, False + n_x = len(x) + + # [4] Equation 8.8 + r_P = b * tau - A.dot(x) + r_D = c * tau - A.T.dot(y) - z + r_G = c.dot(x) - b.transpose().dot(y) + kappa + mu = (x.dot(z) + tau * kappa) / (n_x + 1) + + # Assemble M from [4] Equation 8.31 + Dinv = x / z + + if sparse: + M = A.dot(sps.diags(Dinv, 0, format="csc").dot(A.T)) + else: + M = A.dot(Dinv.reshape(-1, 1) * A.T) + solve = _get_solver(M, sparse, lstsq, sym_pos, cholesky, permc_spec) + + # pc: "predictor-corrector" [4] Section 4.1 + # In development this option could be turned off + # but it always seems to improve performance substantially + n_corrections = 1 if pc else 0 + + i = 0 + alpha, d_x, d_z, d_tau, d_kappa = 0, 0, 0, 0, 0 + while i <= n_corrections: + # Reference [4] Eq. 8.6 + rhatp = eta(gamma) * r_P + rhatd = eta(gamma) * r_D + rhatg = eta(gamma) * r_G + + # Reference [4] Eq. 8.7 + rhatxs = gamma * mu - x * z + rhattk = gamma * mu - tau * kappa + + if i == 1: + if ip: # if the correction is to get "initial point" + # Reference [4] Eq. 8.23 + rhatxs = ((1 - alpha) * gamma * mu - + x * z - alpha**2 * d_x * d_z) + rhattk = ((1 - alpha) * gamma * mu - + tau * kappa - + alpha**2 * d_tau * d_kappa) + else: # if the correction is for "predictor-corrector" + # Reference [4] Eq. 8.13 + rhatxs -= d_x * d_z + rhattk -= d_tau * d_kappa + + # sometimes numerical difficulties arise as the solution is approached + # this loop tries to solve the equations using a sequence of functions + # for solve. For dense systems, the order is: + # 1. scipy.linalg.cho_factor/scipy.linalg.cho_solve, + # 2. scipy.linalg.solve w/ sym_pos = True, + # 3. scipy.linalg.solve w/ sym_pos = False, and if all else fails + # 4. scipy.linalg.lstsq + # For sparse systems, the order is: + # 1. sksparse.cholmod.cholesky (if available) + # 2. scipy.sparse.linalg.factorized (if umfpack available) + # 3. scipy.sparse.linalg.splu + # 4. scipy.sparse.linalg.lsqr + solved = False + while not solved: + try: + # [4] Equation 8.28 + p, q = _sym_solve(Dinv, A, c, b, solve) + # [4] Equation 8.29 + u, v = _sym_solve(Dinv, A, rhatd - + (1 / x) * rhatxs, rhatp, solve) + if np.any(np.isnan(p)) or np.any(np.isnan(q)): + raise LinAlgError + solved = True + except (LinAlgError, ValueError, TypeError) as e: + # Usually this doesn't happen. If it does, it happens when + # there are redundant constraints or when approaching the + # solution. If so, change solver. + if cholesky: + cholesky = False + warn( + "Solving system with option 'cholesky':True " + "failed. It is normal for this to happen " + "occasionally, especially as the solution is " + "approached. However, if you see this frequently, " + "consider setting option 'cholesky' to False.", + OptimizeWarning, stacklevel=5) + elif sym_pos: + sym_pos = False + warn( + "Solving system with option 'sym_pos':True " + "failed. It is normal for this to happen " + "occasionally, especially as the solution is " + "approached. However, if you see this frequently, " + "consider setting option 'sym_pos' to False.", + OptimizeWarning, stacklevel=5) + elif not lstsq: + lstsq = True + warn( + "Solving system with option 'sym_pos':False " + "failed. This may happen occasionally, " + "especially as the solution is " + "approached. However, if you see this frequently, " + "your problem may be numerically challenging. " + "If you cannot improve the formulation, consider " + "setting 'lstsq' to True. Consider also setting " + "`presolve` to True, if it is not already.", + OptimizeWarning, stacklevel=5) + else: + raise e + solve = _get_solver(M, sparse, lstsq, sym_pos, + cholesky, permc_spec) + # [4] Results after 8.29 + d_tau = ((rhatg + 1 / tau * rhattk - (-c.dot(u) + b.dot(v))) / + (1 / tau * kappa + (-c.dot(p) + b.dot(q)))) + d_x = u + p * d_tau + d_y = v + q * d_tau + + # [4] Relations between after 8.25 and 8.26 + d_z = (1 / x) * (rhatxs - z * d_x) + d_kappa = 1 / tau * (rhattk - kappa * d_tau) + + # [4] 8.12 and "Let alpha be the maximal possible step..." before 8.23 + alpha = _get_step(x, d_x, z, d_z, tau, d_tau, kappa, d_kappa, 1) + if ip: # initial point - see [4] 4.4 + gamma = 10 + else: # predictor-corrector, [4] definition after 8.12 + beta1 = 0.1 # [4] pg. 220 (Table 8.1) + gamma = (1 - alpha)**2 * min(beta1, (1 - alpha)) + i += 1 + + return d_x, d_y, d_z, d_tau, d_kappa + + +def _sym_solve(Dinv, A, r1, r2, solve): + """ + An implementation of [4] equation 8.31 and 8.32 + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + # [4] 8.31 + r = r2 + A.dot(Dinv * r1) + v = solve(r) + # [4] 8.32 + u = Dinv * (A.T.dot(v) - r1) + return u, v + + +def _get_step(x, d_x, z, d_z, tau, d_tau, kappa, d_kappa, alpha0): + """ + An implementation of [4] equation 8.21 + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + # [4] 4.3 Equation 8.21, ignoring 8.20 requirement + # same step is taken in primal and dual spaces + # alpha0 is basically beta3 from [4] Table 8.1, but instead of beta3 + # the value 1 is used in Mehrota corrector and initial point correction + i_x = d_x < 0 + i_z = d_z < 0 + alpha_x = alpha0 * np.min(x[i_x] / -d_x[i_x]) if np.any(i_x) else 1 + alpha_tau = alpha0 * tau / -d_tau if d_tau < 0 else 1 + alpha_z = alpha0 * np.min(z[i_z] / -d_z[i_z]) if np.any(i_z) else 1 + alpha_kappa = alpha0 * kappa / -d_kappa if d_kappa < 0 else 1 + alpha = np.min([1, alpha_x, alpha_tau, alpha_z, alpha_kappa]) + return alpha + + +def _get_message(status): + """ + Given problem status code, return a more detailed message. + + Parameters + ---------- + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + Returns + ------- + message : str + A string descriptor of the exit status of the optimization. + + """ + messages = ( + ["Optimization terminated successfully.", + "The iteration limit was reached before the algorithm converged.", + "The algorithm terminated successfully and determined that the " + "problem is infeasible.", + "The algorithm terminated successfully and determined that the " + "problem is unbounded.", + "Numerical difficulties were encountered before the problem " + "converged. Please check your problem formulation for errors, " + "independence of linear equality constraints, and reasonable " + "scaling and matrix condition numbers. If you continue to " + "encounter this error, please submit a bug report." + ]) + return messages[status] + + +def _do_step(x, y, z, tau, kappa, d_x, d_y, d_z, d_tau, d_kappa, alpha): + """ + An implementation of [4] Equation 8.9 + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + x = x + alpha * d_x + tau = tau + alpha * d_tau + z = z + alpha * d_z + kappa = kappa + alpha * d_kappa + y = y + alpha * d_y + return x, y, z, tau, kappa + + +def _get_blind_start(shape): + """ + Return the starting point from [4] 4.4 + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + m, n = shape + x0 = np.ones(n) + y0 = np.zeros(m) + z0 = np.ones(n) + tau0 = 1 + kappa0 = 1 + return x0, y0, z0, tau0, kappa0 + + +def _indicators(A, b, c, c0, x, y, z, tau, kappa): + """ + Implementation of several equations from [4] used as indicators of + the status of optimization. + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + + # residuals for termination are relative to initial values + x0, y0, z0, tau0, kappa0 = _get_blind_start(A.shape) + + # See [4], Section 4 - The Homogeneous Algorithm, Equation 8.8 + def r_p(x, tau): + return b * tau - A.dot(x) + + def r_d(y, z, tau): + return c * tau - A.T.dot(y) - z + + def r_g(x, y, kappa): + return kappa + c.dot(x) - b.dot(y) + + # np.dot unpacks if they are arrays of size one + def mu(x, tau, z, kappa): + return (x.dot(z) + np.dot(tau, kappa)) / (len(x) + 1) + + obj = c.dot(x / tau) + c0 + + def norm(a): + return np.linalg.norm(a) + + # See [4], Section 4.5 - The Stopping Criteria + r_p0 = r_p(x0, tau0) + r_d0 = r_d(y0, z0, tau0) + r_g0 = r_g(x0, y0, kappa0) + mu_0 = mu(x0, tau0, z0, kappa0) + rho_A = norm(c.T.dot(x) - b.T.dot(y)) / (tau + norm(b.T.dot(y))) + rho_p = norm(r_p(x, tau)) / max(1, norm(r_p0)) + rho_d = norm(r_d(y, z, tau)) / max(1, norm(r_d0)) + rho_g = norm(r_g(x, y, kappa)) / max(1, norm(r_g0)) + rho_mu = mu(x, tau, z, kappa) / mu_0 + return rho_p, rho_d, rho_A, rho_g, rho_mu, obj + + +def _display_iter(rho_p, rho_d, rho_g, alpha, rho_mu, obj, header=False): + """ + Print indicators of optimization status to the console. + + Parameters + ---------- + rho_p : float + The (normalized) primal feasibility, see [4] 4.5 + rho_d : float + The (normalized) dual feasibility, see [4] 4.5 + rho_g : float + The (normalized) duality gap, see [4] 4.5 + alpha : float + The step size, see [4] 4.3 + rho_mu : float + The (normalized) path parameter, see [4] 4.5 + obj : float + The objective function value of the current iterate + header : bool + True if a header is to be printed + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + + """ + if header: + print("Primal Feasibility ", + "Dual Feasibility ", + "Duality Gap ", + "Step ", + "Path Parameter ", + "Objective ") + + # no clue why this works + fmt = '{0:<20.13}{1:<20.13}{2:<20.13}{3:<17.13}{4:<20.13}{5:<20.13}' + print(fmt.format( + float(rho_p), + float(rho_d), + float(rho_g), + alpha if isinstance(alpha, str) else float(alpha), + float(rho_mu), + float(obj))) + + +def _ip_hsd(A, b, c, c0, alpha0, beta, maxiter, disp, tol, sparse, lstsq, + sym_pos, cholesky, pc, ip, permc_spec, callback, postsolve_args): + r""" + Solve a linear programming problem in standard form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + using the interior point method of [4]. + + Parameters + ---------- + A : 2-D array + 2-D array such that ``A @ x``, gives the values of the equality + constraints at ``x``. + b : 1-D array + 1-D array of values representing the RHS of each equality constraint + (row) in ``A`` (for standard form problem). + c : 1-D array + Coefficients of the linear objective function to be minimized (for + standard form problem). + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. (Purely for display.) + alpha0 : float + The maximal step size for Mehrota's predictor-corrector search + direction; see :math:`\beta_3`of [4] Table 8.1 + beta : float + The desired reduction of the path parameter :math:`\mu` (see [6]_) + maxiter : int + The maximum number of iterations of the algorithm. + disp : bool + Set to ``True`` if indicators of optimization status are to be printed + to the console each iteration. + tol : float + Termination tolerance; see [4]_ Section 4.5. + sparse : bool + Set to ``True`` if the problem is to be treated as sparse. However, + the inputs ``A_eq`` and ``A_ub`` should nonetheless be provided as + (dense) arrays rather than sparse matrices. + lstsq : bool + Set to ``True`` if the problem is expected to be very poorly + conditioned. This should always be left as ``False`` unless severe + numerical difficulties are frequently encountered, and a better option + would be to improve the formulation of the problem. + sym_pos : bool + Leave ``True`` if the problem is expected to yield a well conditioned + symmetric positive definite normal equation matrix (almost always). + cholesky : bool + Set to ``True`` if the normal equations are to be solved by explicit + Cholesky decomposition followed by explicit forward/backward + substitution. This is typically faster for moderate, dense problems + that are numerically well-behaved. + pc : bool + Leave ``True`` if the predictor-corrector method of Mehrota is to be + used. This is almost always (if not always) beneficial. + ip : bool + Set to ``True`` if the improved initial point suggestion due to [4]_ + Section 4.3 is desired. It's unclear whether this is beneficial. + permc_spec : str (default = 'MMD_AT_PLUS_A') + (Has effect only with ``sparse = True``, ``lstsq = False``, ``sym_pos = + True``.) A matrix is factorized in each iteration of the algorithm. + This option specifies how to permute the columns of the matrix for + sparsity preservation. Acceptable values are: + + - ``NATURAL``: natural ordering. + - ``MMD_ATA``: minimum degree ordering on the structure of A^T A. + - ``MMD_AT_PLUS_A``: minimum degree ordering on the structure of A^T+A. + - ``COLAMD``: approximate minimum degree column ordering. + + This option can impact the convergence of the + interior point algorithm; test different values to determine which + performs best for your problem. For more information, refer to + ``scipy.sparse.linalg.splu``. + callback : callable, optional + If a callback function is provided, it will be called within each + iteration of the algorithm. The callback function must accept a single + `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + Current solution vector + fun : float + Current value of the objective function + success : bool + True only when an algorithm has completed successfully, + so this is always False as the callback function is called + only while the algorithm is still iterating. + slack : 1-D array + The values of the slack variables. Each slack variable + corresponds to an inequality constraint. If the slack is zero, + the corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + that is, ``b - A_eq @ x`` + phase : int + The phase of the algorithm being executed. This is always + 1 for the interior-point method because it has only one phase. + status : int + For revised simplex, this is always 0 because if a different + status is detected, the algorithm terminates. + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem. + + Returns + ------- + x_hat : float + Solution vector (for standard form problem). + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + message : str + A string descriptor of the exit status of the optimization. + iteration : int + The number of iterations taken to solve the problem + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + .. [6] Freund, Robert M. "Primal-Dual Interior-Point Methods for Linear + Programming based on Newton's Method." Unpublished Course Notes, + March 2004. Available 2/25/2017 at: + https://ocw.mit.edu/courses/sloan-school-of-management/15-084j-nonlinear-programming-spring-2004/lecture-notes/lec14_int_pt_mthd.pdf + + """ + + iteration = 0 + + # default initial point + x, y, z, tau, kappa = _get_blind_start(A.shape) + + # first iteration is special improvement of initial point + ip = ip if pc else False + + # [4] 4.5 + rho_p, rho_d, rho_A, rho_g, rho_mu, obj = _indicators( + A, b, c, c0, x, y, z, tau, kappa) + go = rho_p > tol or rho_d > tol or rho_A > tol # we might get lucky : ) + + if disp: + _display_iter(rho_p, rho_d, rho_g, "-", rho_mu, obj, header=True) + if callback is not None: + x_o, fun, slack, con = _postsolve(x/tau, postsolve_args) + res = OptimizeResult({'x': x_o, 'fun': fun, 'slack': slack, + 'con': con, 'nit': iteration, 'phase': 1, + 'complete': False, 'status': 0, + 'message': "", 'success': False}) + callback(res) + + status = 0 + message = "Optimization terminated successfully." + + if sparse: + A = sps.csc_matrix(A) + + while go: + + iteration += 1 + + if ip: # initial point + # [4] Section 4.4 + gamma = 1 + + def eta(g): + return 1 + else: + # gamma = 0 in predictor step according to [4] 4.1 + # if predictor/corrector is off, use mean of complementarity [6] + # 5.1 / [4] Below Figure 10-4 + gamma = 0 if pc else beta * np.mean(z * x) + # [4] Section 4.1 + + def eta(g=gamma): + return 1 - g + + try: + # Solve [4] 8.6 and 8.7/8.13/8.23 + d_x, d_y, d_z, d_tau, d_kappa = _get_delta( + A, b, c, x, y, z, tau, kappa, gamma, eta, + sparse, lstsq, sym_pos, cholesky, pc, ip, permc_spec) + + if ip: # initial point + # [4] 4.4 + # Formula after 8.23 takes a full step regardless if this will + # take it negative + alpha = 1.0 + x, y, z, tau, kappa = _do_step( + x, y, z, tau, kappa, d_x, d_y, + d_z, d_tau, d_kappa, alpha) + x[x < 1] = 1 + z[z < 1] = 1 + tau = max(1, tau) + kappa = max(1, kappa) + ip = False # done with initial point + else: + # [4] Section 4.3 + alpha = _get_step(x, d_x, z, d_z, tau, + d_tau, kappa, d_kappa, alpha0) + # [4] Equation 8.9 + x, y, z, tau, kappa = _do_step( + x, y, z, tau, kappa, d_x, d_y, d_z, d_tau, d_kappa, alpha) + + except (LinAlgError, FloatingPointError, + ValueError, ZeroDivisionError): + # this can happen when sparse solver is used and presolve + # is turned off. Also observed ValueError in AppVeyor Python 3.6 + # Win32 build (PR #8676). I've never seen it otherwise. + status = 4 + message = _get_message(status) + break + + # [4] 4.5 + rho_p, rho_d, rho_A, rho_g, rho_mu, obj = _indicators( + A, b, c, c0, x, y, z, tau, kappa) + go = rho_p > tol or rho_d > tol or rho_A > tol + + if disp: + _display_iter(rho_p, rho_d, rho_g, alpha, rho_mu, obj) + if callback is not None: + x_o, fun, slack, con = _postsolve(x/tau, postsolve_args) + res = OptimizeResult({'x': x_o, 'fun': fun, 'slack': slack, + 'con': con, 'nit': iteration, 'phase': 1, + 'complete': False, 'status': 0, + 'message': "", 'success': False}) + callback(res) + + # [4] 4.5 + inf1 = (rho_p < tol and rho_d < tol and rho_g < tol and tau < tol * + max(1, kappa)) + inf2 = rho_mu < tol and tau < tol * min(1, kappa) + if inf1 or inf2: + # [4] Lemma 8.4 / Theorem 8.3 + if b.transpose().dot(y) > tol: + status = 2 + else: # elif c.T.dot(x) < tol: ? Probably not necessary. + status = 3 + message = _get_message(status) + break + elif iteration >= maxiter: + status = 1 + message = _get_message(status) + break + + x_hat = x / tau + # [4] Statement after Theorem 8.2 + return x_hat, status, message, iteration + + +def _linprog_ip(c, c0, A, b, callback, postsolve_args, maxiter=1000, tol=1e-8, + disp=False, alpha0=.99995, beta=0.1, sparse=False, lstsq=False, + sym_pos=True, cholesky=None, pc=True, ip=False, + permc_spec='MMD_AT_PLUS_A', **unknown_options): + r""" + Minimize a linear objective function subject to linear + equality and non-negativity constraints using the interior point method + of [4]_. Linear programming is intended to solve problems + of the following form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + User-facing documentation is in _linprog_doc.py. + + Parameters + ---------- + c : 1-D array + Coefficients of the linear objective function to be minimized. + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. (Purely for display.) + A : 2-D array + 2-D array such that ``A @ x``, gives the values of the equality + constraints at ``x``. + b : 1-D array + 1-D array of values representing the right hand side of each equality + constraint (row) in ``A``. + callback : callable, optional + Callback function to be executed once per iteration. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem. + + Options + ------- + maxiter : int (default = 1000) + The maximum number of iterations of the algorithm. + tol : float (default = 1e-8) + Termination tolerance to be used for all termination criteria; + see [4]_ Section 4.5. + disp : bool (default = False) + Set to ``True`` if indicators of optimization status are to be printed + to the console each iteration. + alpha0 : float (default = 0.99995) + The maximal step size for Mehrota's predictor-corrector search + direction; see :math:`\beta_{3}` of [4]_ Table 8.1. + beta : float (default = 0.1) + The desired reduction of the path parameter :math:`\mu` (see [6]_) + when Mehrota's predictor-corrector is not in use (uncommon). + sparse : bool (default = False) + Set to ``True`` if the problem is to be treated as sparse after + presolve. If either ``A_eq`` or ``A_ub`` is a sparse matrix, + this option will automatically be set ``True``, and the problem + will be treated as sparse even during presolve. If your constraint + matrices contain mostly zeros and the problem is not very small (less + than about 100 constraints or variables), consider setting ``True`` + or providing ``A_eq`` and ``A_ub`` as sparse matrices. + lstsq : bool (default = False) + Set to ``True`` if the problem is expected to be very poorly + conditioned. This should always be left ``False`` unless severe + numerical difficulties are encountered. Leave this at the default + unless you receive a warning message suggesting otherwise. + sym_pos : bool (default = True) + Leave ``True`` if the problem is expected to yield a well conditioned + symmetric positive definite normal equation matrix + (almost always). Leave this at the default unless you receive + a warning message suggesting otherwise. + cholesky : bool (default = True) + Set to ``True`` if the normal equations are to be solved by explicit + Cholesky decomposition followed by explicit forward/backward + substitution. This is typically faster for problems + that are numerically well-behaved. + pc : bool (default = True) + Leave ``True`` if the predictor-corrector method of Mehrota is to be + used. This is almost always (if not always) beneficial. + ip : bool (default = False) + Set to ``True`` if the improved initial point suggestion due to [4]_ + Section 4.3 is desired. Whether this is beneficial or not + depends on the problem. + permc_spec : str (default = 'MMD_AT_PLUS_A') + (Has effect only with ``sparse = True``, ``lstsq = False``, ``sym_pos = + True``, and no SuiteSparse.) + A matrix is factorized in each iteration of the algorithm. + This option specifies how to permute the columns of the matrix for + sparsity preservation. Acceptable values are: + + - ``NATURAL``: natural ordering. + - ``MMD_ATA``: minimum degree ordering on the structure of A^T A. + - ``MMD_AT_PLUS_A``: minimum degree ordering on the structure of A^T+A. + - ``COLAMD``: approximate minimum degree column ordering. + + This option can impact the convergence of the + interior point algorithm; test different values to determine which + performs best for your problem. For more information, refer to + ``scipy.sparse.linalg.splu``. + unknown_options : dict + Optional arguments not used by this particular solver. If + `unknown_options` is non-empty a warning is issued listing all + unused options. + + Returns + ------- + x : 1-D array + Solution vector. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + message : str + A string descriptor of the exit status of the optimization. + iteration : int + The number of iterations taken to solve the problem. + + Notes + ----- + This method implements the algorithm outlined in [4]_ with ideas from [8]_ + and a structure inspired by the simpler methods of [6]_. + + The primal-dual path following method begins with initial 'guesses' of + the primal and dual variables of the standard form problem and iteratively + attempts to solve the (nonlinear) Karush-Kuhn-Tucker conditions for the + problem with a gradually reduced logarithmic barrier term added to the + objective. This particular implementation uses a homogeneous self-dual + formulation, which provides certificates of infeasibility or unboundedness + where applicable. + + The default initial point for the primal and dual variables is that + defined in [4]_ Section 4.4 Equation 8.22. Optionally (by setting initial + point option ``ip=True``), an alternate (potentially improved) starting + point can be calculated according to the additional recommendations of + [4]_ Section 4.4. + + A search direction is calculated using the predictor-corrector method + (single correction) proposed by Mehrota and detailed in [4]_ Section 4.1. + (A potential improvement would be to implement the method of multiple + corrections described in [4]_ Section 4.2.) In practice, this is + accomplished by solving the normal equations, [4]_ Section 5.1 Equations + 8.31 and 8.32, derived from the Newton equations [4]_ Section 5 Equations + 8.25 (compare to [4]_ Section 4 Equations 8.6-8.8). The advantage of + solving the normal equations rather than 8.25 directly is that the + matrices involved are symmetric positive definite, so Cholesky + decomposition can be used rather than the more expensive LU factorization. + + With default options, the solver used to perform the factorization depends + on third-party software availability and the conditioning of the problem. + + For dense problems, solvers are tried in the following order: + + 1. ``scipy.linalg.cho_factor`` + + 2. ``scipy.linalg.solve`` with option ``sym_pos=True`` + + 3. ``scipy.linalg.solve`` with option ``sym_pos=False`` + + 4. ``scipy.linalg.lstsq`` + + For sparse problems: + + 1. ``sksparse.cholmod.cholesky`` (if scikit-sparse and SuiteSparse are installed) + + 2. ``scipy.sparse.linalg.factorized`` + (if scikit-umfpack and SuiteSparse are installed) + + 3. ``scipy.sparse.linalg.splu`` (which uses SuperLU distributed with SciPy) + + 4. ``scipy.sparse.linalg.lsqr`` + + If the solver fails for any reason, successively more robust (but slower) + solvers are attempted in the order indicated. Attempting, failing, and + re-starting factorization can be time consuming, so if the problem is + numerically challenging, options can be set to bypass solvers that are + failing. Setting ``cholesky=False`` skips to solver 2, + ``sym_pos=False`` skips to solver 3, and ``lstsq=True`` skips + to solver 4 for both sparse and dense problems. + + Potential improvements for combatting issues associated with dense + columns in otherwise sparse problems are outlined in [4]_ Section 5.3 and + [10]_ Section 4.1-4.2; the latter also discusses the alleviation of + accuracy issues associated with the substitution approach to free + variables. + + After calculating the search direction, the maximum possible step size + that does not activate the non-negativity constraints is calculated, and + the smaller of this step size and unity is applied (as in [4]_ Section + 4.1.) [4]_ Section 4.3 suggests improvements for choosing the step size. + + The new point is tested according to the termination conditions of [4]_ + Section 4.5. The same tolerance, which can be set using the ``tol`` option, + is used for all checks. (A potential improvement would be to expose + the different tolerances to be set independently.) If optimality, + unboundedness, or infeasibility is detected, the solve procedure + terminates; otherwise it repeats. + + The expected problem formulation differs between the top level ``linprog`` + module and the method specific solvers. The method specific solvers expect a + problem in standard form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + Whereas the top level ``linprog`` module expects a problem of form: + + Minimize:: + + c @ x + + Subject to:: + + A_ub @ x <= b_ub + A_eq @ x == b_eq + lb <= x <= ub + + where ``lb = 0`` and ``ub = None`` unless set in ``bounds``. + + The original problem contains equality, upper-bound and variable constraints + whereas the method specific solver requires equality constraints and + variable non-negativity. + + ``linprog`` module converts the original problem to standard form by + converting the simple bounds to upper bound constraints, introducing + non-negative slack variables for inequality constraints, and expressing + unbounded variables as the difference between two non-negative variables. + + + References + ---------- + .. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point + optimizer for linear programming: an implementation of the + homogeneous algorithm." High performance optimization. Springer US, + 2000. 197-232. + .. [6] Freund, Robert M. "Primal-Dual Interior-Point Methods for Linear + Programming based on Newton's Method." Unpublished Course Notes, + March 2004. Available 2/25/2017 at + https://ocw.mit.edu/courses/sloan-school-of-management/15-084j-nonlinear-programming-spring-2004/lecture-notes/lec14_int_pt_mthd.pdf + .. [8] Andersen, Erling D., and Knud D. Andersen. "Presolving in linear + programming." Mathematical Programming 71.2 (1995): 221-245. + .. [9] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear + programming." Athena Scientific 1 (1997): 997. + .. [10] Andersen, Erling D., et al. Implementation of interior point methods + for large scale linear programming. HEC/Universite de Geneve, 1996. + + """ + + _check_unknown_options(unknown_options) + + # These should be warnings, not errors + if (cholesky or cholesky is None) and sparse and not has_cholmod: + if cholesky: + warn("Sparse cholesky is only available with scikit-sparse. " + "Setting `cholesky = False`", + OptimizeWarning, stacklevel=3) + cholesky = False + + if sparse and lstsq: + warn("Option combination 'sparse':True and 'lstsq':True " + "is not recommended.", + OptimizeWarning, stacklevel=3) + + if lstsq and cholesky: + warn("Invalid option combination 'lstsq':True " + "and 'cholesky':True; option 'cholesky' has no effect when " + "'lstsq' is set True.", + OptimizeWarning, stacklevel=3) + + valid_permc_spec = ('NATURAL', 'MMD_ATA', 'MMD_AT_PLUS_A', 'COLAMD') + if permc_spec.upper() not in valid_permc_spec: + warn("Invalid permc_spec option: '" + str(permc_spec) + "'. " + "Acceptable values are 'NATURAL', 'MMD_ATA', 'MMD_AT_PLUS_A', " + "and 'COLAMD'. Reverting to default.", + OptimizeWarning, stacklevel=3) + permc_spec = 'MMD_AT_PLUS_A' + + # This can be an error + if not sym_pos and cholesky: + raise ValueError( + "Invalid option combination 'sym_pos':False " + "and 'cholesky':True: Cholesky decomposition is only possible " + "for symmetric positive definite matrices.") + + cholesky = cholesky or (cholesky is None and sym_pos and not lstsq) + + x, status, message, iteration = _ip_hsd(A, b, c, c0, alpha0, beta, + maxiter, disp, tol, sparse, + lstsq, sym_pos, cholesky, + pc, ip, permc_spec, callback, + postsolve_args) + + return x, status, message, iteration diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_rs.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_rs.py new file mode 100644 index 0000000000000000000000000000000000000000..826ceffce398a6f58bdfcd6264e2f14fc5f6f8ee --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_rs.py @@ -0,0 +1,572 @@ +"""Revised simplex method for linear programming + +The *revised simplex* method uses the method described in [1]_, except +that a factorization [2]_ of the basis matrix, rather than its inverse, +is efficiently maintained and used to solve the linear systems at each +iteration of the algorithm. + +.. versionadded:: 1.3.0 + +References +---------- +.. [1] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear + programming." Athena Scientific 1 (1997): 997. +.. [2] Bartels, Richard H. "A stabilization of the simplex method." + Journal in Numerische Mathematik 16.5 (1971): 414-434. + +""" +# Author: Matt Haberland + +import numpy as np +from numpy.linalg import LinAlgError + +from scipy.linalg import solve +from ._optimize import _check_unknown_options +from ._bglu_dense import LU +from ._bglu_dense import BGLU as BGLU +from ._linprog_util import _postsolve +from ._optimize import OptimizeResult + + +def _phase_one(A, b, x0, callback, postsolve_args, maxiter, tol, disp, + maxupdate, mast, pivot): + """ + The purpose of phase one is to find an initial basic feasible solution + (BFS) to the original problem. + + Generates an auxiliary problem with a trivial BFS and an objective that + minimizes infeasibility of the original problem. Solves the auxiliary + problem using the main simplex routine (phase two). This either yields + a BFS to the original problem or determines that the original problem is + infeasible. If feasible, phase one detects redundant rows in the original + constraint matrix and removes them, then chooses additional indices as + necessary to complete a basis/BFS for the original problem. + """ + + m, n = A.shape + status = 0 + + # generate auxiliary problem to get initial BFS + A, b, c, basis, x, status = _generate_auxiliary_problem(A, b, x0, tol) + + if status == 6: + residual = c.dot(x) + iter_k = 0 + return x, basis, A, b, residual, status, iter_k + + # solve auxiliary problem + phase_one_n = n + iter_k = 0 + x, basis, status, iter_k = _phase_two(c, A, x, basis, callback, + postsolve_args, + maxiter, tol, disp, + maxupdate, mast, pivot, + iter_k, phase_one_n) + + # check for infeasibility + residual = c.dot(x) + if status == 0 and residual > tol: + status = 2 + + # drive artificial variables out of basis + # TODO: test redundant row removal better + # TODO: make solve more efficient with BGLU? This could take a while. + keep_rows = np.ones(m, dtype=bool) + for basis_column in basis[basis >= n]: + B = A[:, basis] + try: + basis_finder = np.abs(solve(B, A)) # inefficient + pertinent_row = np.argmax(basis_finder[:, basis_column]) + eligible_columns = np.ones(n, dtype=bool) + eligible_columns[basis[basis < n]] = 0 + eligible_column_indices = np.where(eligible_columns)[0] + index = np.argmax(basis_finder[:, :n] + [pertinent_row, eligible_columns]) + new_basis_column = eligible_column_indices[index] + if basis_finder[pertinent_row, new_basis_column] < tol: + keep_rows[pertinent_row] = False + else: + basis[basis == basis_column] = new_basis_column + except LinAlgError: + status = 4 + + # form solution to original problem + A = A[keep_rows, :n] + basis = basis[keep_rows] + x = x[:n] + m = A.shape[0] + return x, basis, A, b, residual, status, iter_k + + +def _get_more_basis_columns(A, basis): + """ + Called when the auxiliary problem terminates with artificial columns in + the basis, which must be removed and replaced with non-artificial + columns. Finds additional columns that do not make the matrix singular. + """ + m, n = A.shape + + # options for inclusion are those that aren't already in the basis + a = np.arange(m+n) + bl = np.zeros(len(a), dtype=bool) + bl[basis] = 1 + options = a[~bl] + options = options[options < n] # and they have to be non-artificial + + # form basis matrix + B = np.zeros((m, m)) + B[:, 0:len(basis)] = A[:, basis] + + if (basis.size > 0 and + np.linalg.matrix_rank(B[:, :len(basis)]) < len(basis)): + raise Exception("Basis has dependent columns") + + rank = 0 # just enter the loop + for i in range(n): # somewhat arbitrary, but we need another way out + # permute the options, and take as many as needed + new_basis = np.random.permutation(options)[:m-len(basis)] + B[:, len(basis):] = A[:, new_basis] # update the basis matrix + rank = np.linalg.matrix_rank(B) # check the rank + if rank == m: + break + + return np.concatenate((basis, new_basis)) + + +def _generate_auxiliary_problem(A, b, x0, tol): + """ + Modifies original problem to create an auxiliary problem with a trivial + initial basic feasible solution and an objective that minimizes + infeasibility in the original problem. + + Conceptually, this is done by stacking an identity matrix on the right of + the original constraint matrix, adding artificial variables to correspond + with each of these new columns, and generating a cost vector that is all + zeros except for ones corresponding with each of the new variables. + + A initial basic feasible solution is trivial: all variables are zero + except for the artificial variables, which are set equal to the + corresponding element of the right hand side `b`. + + Running the simplex method on this auxiliary problem drives all of the + artificial variables - and thus the cost - to zero if the original problem + is feasible. The original problem is declared infeasible otherwise. + + Much of the complexity below is to improve efficiency by using singleton + columns in the original problem where possible, thus generating artificial + variables only as necessary, and using an initial 'guess' basic feasible + solution. + """ + status = 0 + m, n = A.shape + + if x0 is not None: + x = x0 + else: + x = np.zeros(n) + + r = b - A@x # residual; this must be all zeros for feasibility + + A[r < 0] = -A[r < 0] # express problem with RHS positive for trivial BFS + b[r < 0] = -b[r < 0] # to the auxiliary problem + r[r < 0] *= -1 + + # Rows which we will need to find a trivial way to zero. + # This should just be the rows where there is a nonzero residual. + # But then we would not necessarily have a column singleton in every row. + # This makes it difficult to find an initial basis. + if x0 is None: + nonzero_constraints = np.arange(m) + else: + nonzero_constraints = np.where(r > tol)[0] + + # these are (at least some of) the initial basis columns + basis = np.where(np.abs(x) > tol)[0] + + if len(nonzero_constraints) == 0 and len(basis) <= m: # already a BFS + c = np.zeros(n) + basis = _get_more_basis_columns(A, basis) + return A, b, c, basis, x, status + elif (len(nonzero_constraints) > m - len(basis) or + np.any(x < 0)): # can't get trivial BFS + c = np.zeros(n) + status = 6 + return A, b, c, basis, x, status + + # chooses existing columns appropriate for inclusion in initial basis + cols, rows = _select_singleton_columns(A, r) + + # find the rows we need to zero that we _can_ zero with column singletons + i_tofix = np.isin(rows, nonzero_constraints) + # these columns can't already be in the basis, though + # we are going to add them to the basis and change the corresponding x val + i_notinbasis = np.logical_not(np.isin(cols, basis)) + i_fix_without_aux = np.logical_and(i_tofix, i_notinbasis) + rows = rows[i_fix_without_aux] + cols = cols[i_fix_without_aux] + + # indices of the rows we can only zero with auxiliary variable + # these rows will get a one in each auxiliary column + arows = nonzero_constraints[np.logical_not( + np.isin(nonzero_constraints, rows))] + n_aux = len(arows) + acols = n + np.arange(n_aux) # indices of auxiliary columns + + basis_ng = np.concatenate((cols, acols)) # basis columns not from guess + basis_ng_rows = np.concatenate((rows, arows)) # rows we need to zero + + # add auxiliary singleton columns + A = np.hstack((A, np.zeros((m, n_aux)))) + A[arows, acols] = 1 + + # generate initial BFS + x = np.concatenate((x, np.zeros(n_aux))) + x[basis_ng] = r[basis_ng_rows]/A[basis_ng_rows, basis_ng] + + # generate costs to minimize infeasibility + c = np.zeros(n_aux + n) + c[acols] = 1 + + # basis columns correspond with nonzeros in guess, those with column + # singletons we used to zero remaining constraints, and any additional + # columns to get a full set (m columns) + basis = np.concatenate((basis, basis_ng)) + basis = _get_more_basis_columns(A, basis) # add columns as needed + + return A, b, c, basis, x, status + + +def _select_singleton_columns(A, b): + """ + Finds singleton columns for which the singleton entry is of the same sign + as the right-hand side; these columns are eligible for inclusion in an + initial basis. Determines the rows in which the singleton entries are + located. For each of these rows, returns the indices of the one singleton + column and its corresponding row. + """ + # find indices of all singleton columns and corresponding row indices + column_indices = np.nonzero(np.sum(np.abs(A) != 0, axis=0) == 1)[0] + columns = A[:, column_indices] # array of singleton columns + row_indices = np.zeros(len(column_indices), dtype=int) + nonzero_rows, nonzero_columns = np.nonzero(columns) + row_indices[nonzero_columns] = nonzero_rows # corresponding row indices + + # keep only singletons with entries that have same sign as RHS + # this is necessary because all elements of BFS must be non-negative + same_sign = A[row_indices, column_indices]*b[row_indices] >= 0 + column_indices = column_indices[same_sign][::-1] + row_indices = row_indices[same_sign][::-1] + # Reversing the order so that steps below select rightmost columns + # for initial basis, which will tend to be slack variables. (If the + # guess corresponds with a basic feasible solution but a constraint + # is not satisfied with the corresponding slack variable zero, the slack + # variable must be basic.) + + # for each row, keep rightmost singleton column with an entry in that row + unique_row_indices, first_columns = np.unique(row_indices, + return_index=True) + return column_indices[first_columns], unique_row_indices + + +def _find_nonzero_rows(A, tol): + """ + Returns logical array indicating the locations of rows with at least + one nonzero element. + """ + return np.any(np.abs(A) > tol, axis=1) + + +def _select_enter_pivot(c_hat, bl, a, rule="bland", tol=1e-12): + """ + Selects a pivot to enter the basis. Currently Bland's rule - the smallest + index that has a negative reduced cost - is the default. + """ + if rule.lower() == "mrc": # index with minimum reduced cost + return a[~bl][np.argmin(c_hat)] + else: # smallest index w/ negative reduced cost + return a[~bl][c_hat < -tol][0] + + +def _display_iter(phase, iteration, slack, con, fun): + """ + Print indicators of optimization status to the console. + """ + header = True if not iteration % 20 else False + + if header: + print("Phase", + "Iteration", + "Minimum Slack ", + "Constraint Residual", + "Objective ") + + # := -tol): # all reduced costs positive -> terminate + break + + j = _select_enter_pivot(c_hat, bl, a, rule=pivot, tol=tol) + u = B.solve(A[:, j]) # similar to u = solve(B, A[:, j]) + + i = u > tol # if none of the u are positive, unbounded + if not np.any(i): + status = 3 + break + + th = xb[i]/u[i] + l = np.argmin(th) # implicitly selects smallest subscript + th_star = th[l] # step size + + x[b] = x[b] - th_star*u # take step + x[j] = th_star + B.update(ab[i][l], j) # modify basis + b = B.b # similar to b[ab[i][l]] = + + else: + # If the end of the for loop is reached (without a break statement), + # then another step has been taken, so the iteration counter should + # increment, info should be displayed, and callback should be called. + iteration += 1 + status = 1 + if disp or callback is not None: + _display_and_callback(phase_one_n, x, postsolve_args, status, + iteration, disp, callback) + + return x, b, status, iteration + + +def _linprog_rs(c, c0, A, b, x0, callback, postsolve_args, + maxiter=5000, tol=1e-12, disp=False, + maxupdate=10, mast=False, pivot="mrc", + **unknown_options): + """ + Solve the following linear programming problem via a two-phase + revised simplex algorithm.:: + + minimize: c @ x + + subject to: A @ x == b + 0 <= x < oo + + User-facing documentation is in _linprog_doc.py. + + Parameters + ---------- + c : 1-D array + Coefficients of the linear objective function to be minimized. + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. (Currently unused.) + A : 2-D array + 2-D array which, when matrix-multiplied by ``x``, gives the values of + the equality constraints at ``x``. + b : 1-D array + 1-D array of values representing the RHS of each equality constraint + (row) in ``A_eq``. + x0 : 1-D array, optional + Starting values of the independent variables, which will be refined by + the optimization algorithm. For the revised simplex method, these must + correspond with a basic feasible solution. + callback : callable, optional + If a callback function is provided, it will be called within each + iteration of the algorithm. The callback function must accept a single + `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + Current solution vector. + fun : float + Current value of the objective function ``c @ x``. + success : bool + True only when an algorithm has completed successfully, + so this is always False as the callback function is called + only while the algorithm is still iterating. + slack : 1-D array + The values of the slack variables. Each slack variable + corresponds to an inequality constraint. If the slack is zero, + the corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + that is, ``b - A_eq @ x``. + phase : int + The phase of the algorithm being executed. + status : int + For revised simplex, this is always 0 because if a different + status is detected, the algorithm terminates. + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem. + + Options + ------- + maxiter : int + The maximum number of iterations to perform in either phase. + tol : float + The tolerance which determines when a solution is "close enough" to + zero in Phase 1 to be considered a basic feasible solution or close + enough to positive to serve as an optimal solution. + disp : bool + Set to ``True`` if indicators of optimization status are to be printed + to the console each iteration. + maxupdate : int + The maximum number of updates performed on the LU factorization. + After this many updates is reached, the basis matrix is factorized + from scratch. + mast : bool + Minimize Amortized Solve Time. If enabled, the average time to solve + a linear system using the basis factorization is measured. Typically, + the average solve time will decrease with each successive solve after + initial factorization, as factorization takes much more time than the + solve operation (and updates). Eventually, however, the updated + factorization becomes sufficiently complex that the average solve time + begins to increase. When this is detected, the basis is refactorized + from scratch. Enable this option to maximize speed at the risk of + nondeterministic behavior. Ignored if ``maxupdate`` is 0. + pivot : "mrc" or "bland" + Pivot rule: Minimum Reduced Cost (default) or Bland's rule. Choose + Bland's rule if iteration limit is reached and cycling is suspected. + unknown_options : dict + Optional arguments not used by this particular solver. If + `unknown_options` is non-empty a warning is issued listing all + unused options. + + Returns + ------- + x : 1-D array + Solution vector. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Numerical difficulties encountered + 5 : No constraints; turn presolve on + 6 : Guess x0 cannot be converted to a basic feasible solution + + message : str + A string descriptor of the exit status of the optimization. + iteration : int + The number of iterations taken to solve the problem. + """ + + _check_unknown_options(unknown_options) + + messages = ["Optimization terminated successfully.", + "Iteration limit reached.", + "The problem appears infeasible, as the phase one auxiliary " + "problem terminated successfully with a residual of {0:.1e}, " + "greater than the tolerance {1} required for the solution to " + "be considered feasible. Consider increasing the tolerance to " + "be greater than {0:.1e}. If this tolerance is unnaceptably " + "large, the problem is likely infeasible.", + "The problem is unbounded, as the simplex algorithm found " + "a basic feasible solution from which there is a direction " + "with negative reduced cost in which all decision variables " + "increase.", + "Numerical difficulties encountered; consider trying " + "method='interior-point'.", + "Problems with no constraints are trivially solved; please " + "turn presolve on.", + "The guess x0 cannot be converted to a basic feasible " + "solution. " + ] + + if A.size == 0: # address test_unbounded_below_no_presolve_corrected + return np.zeros(c.shape), 5, messages[5], 0 + + x, basis, A, b, residual, status, iteration = ( + _phase_one(A, b, x0, callback, postsolve_args, + maxiter, tol, disp, maxupdate, mast, pivot)) + + if status == 0: + x, basis, status, iteration = _phase_two(c, A, x, basis, callback, + postsolve_args, + maxiter, tol, disp, + maxupdate, mast, pivot, + iteration) + + return x, status, messages[status].format(residual, tol), iteration diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_simplex.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_simplex.py new file mode 100644 index 0000000000000000000000000000000000000000..b13418c369864ca528efe76d9f45c07da2bcf680 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_simplex.py @@ -0,0 +1,661 @@ +"""Simplex method for linear programming + +The *simplex* method uses a traditional, full-tableau implementation of +Dantzig's simplex algorithm [1]_, [2]_ (*not* the Nelder-Mead simplex). +This algorithm is included for backwards compatibility and educational +purposes. + + .. versionadded:: 0.15.0 + +Warnings +-------- + +The simplex method may encounter numerical difficulties when pivot +values are close to the specified tolerance. If encountered try +remove any redundant constraints, change the pivot strategy to Bland's +rule or increase the tolerance value. + +Alternatively, more robust methods maybe be used. See +:ref:`'interior-point' ` and +:ref:`'revised simplex' `. + +References +---------- +.. [1] Dantzig, George B., Linear programming and extensions. Rand + Corporation Research Study Princeton Univ. Press, Princeton, NJ, + 1963 +.. [2] Hillier, S.H. and Lieberman, G.J. (1995), "Introduction to + Mathematical Programming", McGraw-Hill, Chapter 4. +""" + +import numpy as np +from warnings import warn +from ._optimize import OptimizeResult, OptimizeWarning, _check_unknown_options +from ._linprog_util import _postsolve + + +def _pivot_col(T, tol=1e-9, bland=False): + """ + Given a linear programming simplex tableau, determine the column + of the variable to enter the basis. + + Parameters + ---------- + T : 2-D array + A 2-D array representing the simplex tableau, T, corresponding to the + linear programming problem. It should have the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0]] + + for a Phase 2 problem, or the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0], + [c'[0], c'[1], ..., c'[n_total], 0]] + + for a Phase 1 problem (a problem in which a basic feasible solution is + sought prior to maximizing the actual objective. ``T`` is modified in + place by ``_solve_simplex``. + tol : float + Elements in the objective row larger than -tol will not be considered + for pivoting. Nominally this value is zero, but numerical issues + cause a tolerance about zero to be necessary. + bland : bool + If True, use Bland's rule for selection of the column (select the + first column with a negative coefficient in the objective row, + regardless of magnitude). + + Returns + ------- + status: bool + True if a suitable pivot column was found, otherwise False. + A return of False indicates that the linear programming simplex + algorithm is complete. + col: int + The index of the column of the pivot element. + If status is False, col will be returned as nan. + """ + ma = np.ma.masked_where(T[-1, :-1] >= -tol, T[-1, :-1], copy=False) + if ma.count() == 0: + return False, np.nan + if bland: + # ma.mask is sometimes 0d + return True, np.nonzero(np.logical_not(np.atleast_1d(ma.mask)))[0][0] + return True, np.ma.nonzero(ma == ma.min())[0][0] + + +def _pivot_row(T, basis, pivcol, phase, tol=1e-9, bland=False): + """ + Given a linear programming simplex tableau, determine the row for the + pivot operation. + + Parameters + ---------- + T : 2-D array + A 2-D array representing the simplex tableau, T, corresponding to the + linear programming problem. It should have the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0]] + + for a Phase 2 problem, or the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0], + [c'[0], c'[1], ..., c'[n_total], 0]] + + for a Phase 1 problem (a Problem in which a basic feasible solution is + sought prior to maximizing the actual objective. ``T`` is modified in + place by ``_solve_simplex``. + basis : array + A list of the current basic variables. + pivcol : int + The index of the pivot column. + phase : int + The phase of the simplex algorithm (1 or 2). + tol : float + Elements in the pivot column smaller than tol will not be considered + for pivoting. Nominally this value is zero, but numerical issues + cause a tolerance about zero to be necessary. + bland : bool + If True, use Bland's rule for selection of the row (if more than one + row can be used, choose the one with the lowest variable index). + + Returns + ------- + status: bool + True if a suitable pivot row was found, otherwise False. A return + of False indicates that the linear programming problem is unbounded. + row: int + The index of the row of the pivot element. If status is False, row + will be returned as nan. + """ + if phase == 1: + k = 2 + else: + k = 1 + ma = np.ma.masked_where(T[:-k, pivcol] <= tol, T[:-k, pivcol], copy=False) + if ma.count() == 0: + return False, np.nan + mb = np.ma.masked_where(T[:-k, pivcol] <= tol, T[:-k, -1], copy=False) + q = mb / ma + min_rows = np.ma.nonzero(q == q.min())[0] + if bland: + return True, min_rows[np.argmin(np.take(basis, min_rows))] + return True, min_rows[0] + + +def _apply_pivot(T, basis, pivrow, pivcol, tol=1e-9): + """ + Pivot the simplex tableau inplace on the element given by (pivrow, pivol). + The entering variable corresponds to the column given by pivcol forcing + the variable basis[pivrow] to leave the basis. + + Parameters + ---------- + T : 2-D array + A 2-D array representing the simplex tableau, T, corresponding to the + linear programming problem. It should have the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0]] + + for a Phase 2 problem, or the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0], + [c'[0], c'[1], ..., c'[n_total], 0]] + + for a Phase 1 problem (a problem in which a basic feasible solution is + sought prior to maximizing the actual objective. ``T`` is modified in + place by ``_solve_simplex``. + basis : 1-D array + An array of the indices of the basic variables, such that basis[i] + contains the column corresponding to the basic variable for row i. + Basis is modified in place by _apply_pivot. + pivrow : int + Row index of the pivot. + pivcol : int + Column index of the pivot. + """ + basis[pivrow] = pivcol + pivval = T[pivrow, pivcol] + T[pivrow] = T[pivrow] / pivval + for irow in range(T.shape[0]): + if irow != pivrow: + T[irow] = T[irow] - T[pivrow] * T[irow, pivcol] + + # The selected pivot should never lead to a pivot value less than the tol. + if np.isclose(pivval, tol, atol=0, rtol=1e4): + message = ( + f"The pivot operation produces a pivot value of:{pivval: .1e}, " + "which is only slightly greater than the specified " + f"tolerance{tol: .1e}. This may lead to issues regarding the " + "numerical stability of the simplex method. " + "Removing redundant constraints, changing the pivot strategy " + "via Bland's rule or increasing the tolerance may " + "help reduce the issue.") + warn(message, OptimizeWarning, stacklevel=5) + + +def _solve_simplex(T, n, basis, callback, postsolve_args, + maxiter=1000, tol=1e-9, phase=2, bland=False, nit0=0, + ): + """ + Solve a linear programming problem in "standard form" using the Simplex + Method. Linear Programming is intended to solve the following problem form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + Parameters + ---------- + T : 2-D array + A 2-D array representing the simplex tableau, T, corresponding to the + linear programming problem. It should have the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0]] + + for a Phase 2 problem, or the form: + + [[A[0, 0], A[0, 1], ..., A[0, n_total], b[0]], + [A[1, 0], A[1, 1], ..., A[1, n_total], b[1]], + . + . + . + [A[m, 0], A[m, 1], ..., A[m, n_total], b[m]], + [c[0], c[1], ..., c[n_total], 0], + [c'[0], c'[1], ..., c'[n_total], 0]] + + for a Phase 1 problem (a problem in which a basic feasible solution is + sought prior to maximizing the actual objective. ``T`` is modified in + place by ``_solve_simplex``. + n : int + The number of true variables in the problem. + basis : 1-D array + An array of the indices of the basic variables, such that basis[i] + contains the column corresponding to the basic variable for row i. + Basis is modified in place by _solve_simplex + callback : callable, optional + If a callback function is provided, it will be called within each + iteration of the algorithm. The callback must accept a + `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + Current solution vector + fun : float + Current value of the objective function + success : bool + True only when a phase has completed successfully. This + will be False for most iterations. + slack : 1-D array + The values of the slack variables. Each slack variable + corresponds to an inequality constraint. If the slack is zero, + the corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + that is, ``b - A_eq @ x`` + phase : int + The phase of the optimization being executed. In phase 1 a basic + feasible solution is sought and the T has an additional row + representing an alternate objective function. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem. + maxiter : int + The maximum number of iterations to perform before aborting the + optimization. + tol : float + The tolerance which determines when a solution is "close enough" to + zero in Phase 1 to be considered a basic feasible solution or close + enough to positive to serve as an optimal solution. + phase : int + The phase of the optimization being executed. In phase 1 a basic + feasible solution is sought and the T has an additional row + representing an alternate objective function. + bland : bool + If True, choose pivots using Bland's rule [3]_. In problems which + fail to converge due to cycling, using Bland's rule can provide + convergence at the expense of a less optimal path about the simplex. + nit0 : int + The initial iteration number used to keep an accurate iteration total + in a two-phase problem. + + Returns + ------- + nit : int + The number of iterations. Used to keep an accurate iteration total + in the two-phase problem. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + """ + nit = nit0 + status = 0 + message = '' + complete = False + + if phase == 1: + m = T.shape[1]-2 + elif phase == 2: + m = T.shape[1]-1 + else: + raise ValueError("Argument 'phase' to _solve_simplex must be 1 or 2") + + if phase == 2: + # Check if any artificial variables are still in the basis. + # If yes, check if any coefficients from this row and a column + # corresponding to one of the non-artificial variable is non-zero. + # If found, pivot at this term. If not, start phase 2. + # Do this for all artificial variables in the basis. + # Ref: "An Introduction to Linear Programming and Game Theory" + # by Paul R. Thie, Gerard E. Keough, 3rd Ed, + # Chapter 3.7 Redundant Systems (pag 102) + for pivrow in [row for row in range(basis.size) + if basis[row] > T.shape[1] - 2]: + non_zero_row = [col for col in range(T.shape[1] - 1) + if abs(T[pivrow, col]) > tol] + if len(non_zero_row) > 0: + pivcol = non_zero_row[0] + _apply_pivot(T, basis, pivrow, pivcol, tol) + nit += 1 + + if len(basis[:m]) == 0: + solution = np.empty(T.shape[1] - 1, dtype=np.float64) + else: + solution = np.empty(max(T.shape[1] - 1, max(basis[:m]) + 1), + dtype=np.float64) + + while not complete: + # Find the pivot column + pivcol_found, pivcol = _pivot_col(T, tol, bland) + if not pivcol_found: + pivcol = np.nan + pivrow = np.nan + status = 0 + complete = True + else: + # Find the pivot row + pivrow_found, pivrow = _pivot_row(T, basis, pivcol, phase, tol, bland) + if not pivrow_found: + status = 3 + complete = True + + if callback is not None: + solution[:] = 0 + solution[basis[:n]] = T[:n, -1] + x = solution[:m] + x, fun, slack, con = _postsolve( + x, postsolve_args + ) + res = OptimizeResult({ + 'x': x, + 'fun': fun, + 'slack': slack, + 'con': con, + 'status': status, + 'message': message, + 'nit': nit, + 'success': status == 0 and complete, + 'phase': phase, + 'complete': complete, + }) + callback(res) + + if not complete: + if nit >= maxiter: + # Iteration limit exceeded + status = 1 + complete = True + else: + _apply_pivot(T, basis, pivrow, pivcol, tol) + nit += 1 + return nit, status + + +def _linprog_simplex(c, c0, A, b, callback, postsolve_args, + maxiter=1000, tol=1e-9, disp=False, bland=False, + **unknown_options): + """ + Minimize a linear objective function subject to linear equality and + non-negativity constraints using the two phase simplex method. + Linear programming is intended to solve problems of the following form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + User-facing documentation is in _linprog_doc.py. + + Parameters + ---------- + c : 1-D array + Coefficients of the linear objective function to be minimized. + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. (Purely for display.) + A : 2-D array + 2-D array such that ``A @ x``, gives the values of the equality + constraints at ``x``. + b : 1-D array + 1-D array of values representing the right hand side of each equality + constraint (row) in ``A``. + callback : callable, optional + If a callback function is provided, it will be called within each + iteration of the algorithm. The callback function must accept a single + `scipy.optimize.OptimizeResult` consisting of the following fields: + + x : 1-D array + Current solution vector + fun : float + Current value of the objective function + success : bool + True when an algorithm has completed successfully. + slack : 1-D array + The values of the slack variables. Each slack variable + corresponds to an inequality constraint. If the slack is zero, + the corresponding constraint is active. + con : 1-D array + The (nominally zero) residuals of the equality constraints, + that is, ``b - A_eq @ x`` + phase : int + The phase of the algorithm being executed. + status : int + An integer representing the status of the optimization:: + + 0 : Algorithm proceeding nominally + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + nit : int + The number of iterations performed. + message : str + A string descriptor of the exit status of the optimization. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem. + + Options + ------- + maxiter : int + The maximum number of iterations to perform. + disp : bool + If True, print exit status message to sys.stdout + tol : float + The tolerance which determines when a solution is "close enough" to + zero in Phase 1 to be considered a basic feasible solution or close + enough to positive to serve as an optimal solution. + bland : bool + If True, use Bland's anti-cycling rule [3]_ to choose pivots to + prevent cycling. If False, choose pivots which should lead to a + converged solution more quickly. The latter method is subject to + cycling (non-convergence) in rare instances. + unknown_options : dict + Optional arguments not used by this particular solver. If + `unknown_options` is non-empty a warning is issued listing all + unused options. + + Returns + ------- + x : 1-D array + Solution vector. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + message : str + A string descriptor of the exit status of the optimization. + iteration : int + The number of iterations taken to solve the problem. + + References + ---------- + .. [1] Dantzig, George B., Linear programming and extensions. Rand + Corporation Research Study Princeton Univ. Press, Princeton, NJ, + 1963 + .. [2] Hillier, S.H. and Lieberman, G.J. (1995), "Introduction to + Mathematical Programming", McGraw-Hill, Chapter 4. + .. [3] Bland, Robert G. New finite pivoting rules for the simplex method. + Mathematics of Operations Research (2), 1977: pp. 103-107. + + + Notes + ----- + The expected problem formulation differs between the top level ``linprog`` + module and the method specific solvers. The method specific solvers expect a + problem in standard form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + Whereas the top level ``linprog`` module expects a problem of form: + + Minimize:: + + c @ x + + Subject to:: + + A_ub @ x <= b_ub + A_eq @ x == b_eq + lb <= x <= ub + + where ``lb = 0`` and ``ub = None`` unless set in ``bounds``. + + The original problem contains equality, upper-bound and variable constraints + whereas the method specific solver requires equality constraints and + variable non-negativity. + + ``linprog`` module converts the original problem to standard form by + converting the simple bounds to upper bound constraints, introducing + non-negative slack variables for inequality constraints, and expressing + unbounded variables as the difference between two non-negative variables. + """ + _check_unknown_options(unknown_options) + + status = 0 + messages = {0: "Optimization terminated successfully.", + 1: "Iteration limit reached.", + 2: "Optimization failed. Unable to find a feasible" + " starting point.", + 3: "Optimization failed. The problem appears to be unbounded.", + 4: "Optimization failed. Singular matrix encountered."} + + n, m = A.shape + + # All constraints must have b >= 0. + is_negative_constraint = np.less(b, 0) + A[is_negative_constraint] *= -1 + b[is_negative_constraint] *= -1 + + # As all constraints are equality constraints the artificial variables + # will also be basic variables. + av = np.arange(n) + m + basis = av.copy() + + # Format the phase one tableau by adding artificial variables and stacking + # the constraints, the objective row and pseudo-objective row. + row_constraints = np.hstack((A, np.eye(n), b[:, np.newaxis])) + row_objective = np.hstack((c, np.zeros(n), c0)) + row_pseudo_objective = -row_constraints.sum(axis=0) + row_pseudo_objective[av] = 0 + T = np.vstack((row_constraints, row_objective, row_pseudo_objective)) + + nit1, status = _solve_simplex(T, n, basis, callback=callback, + postsolve_args=postsolve_args, + maxiter=maxiter, tol=tol, phase=1, + bland=bland + ) + # if pseudo objective is zero, remove the last row from the tableau and + # proceed to phase 2 + nit2 = nit1 + if abs(T[-1, -1]) < tol: + # Remove the pseudo-objective row from the tableau + T = T[:-1, :] + # Remove the artificial variable columns from the tableau + T = np.delete(T, av, 1) + else: + # Failure to find a feasible starting point + status = 2 + messages[status] = ( + "Phase 1 of the simplex method failed to find a feasible " + "solution. The pseudo-objective function evaluates to {0:.1e} " + "which exceeds the required tolerance of {1} for a solution to be " + "considered 'close enough' to zero to be a basic solution. " + "Consider increasing the tolerance to be greater than {0:.1e}. " + "If this tolerance is unacceptably large the problem may be " + "infeasible.".format(abs(T[-1, -1]), tol) + ) + + if status == 0: + # Phase 2 + nit2, status = _solve_simplex(T, n, basis, callback=callback, + postsolve_args=postsolve_args, + maxiter=maxiter, tol=tol, phase=2, + bland=bland, nit0=nit1 + ) + + solution = np.zeros(n + m) + solution[basis[:n]] = T[:n, -1] + x = solution[:m] + + return x, status, messages[status], int(nit2) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_util.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_util.py new file mode 100644 index 0000000000000000000000000000000000000000..3d25cee4d9ce6b1c5a40cc474d97ec13474ebafc --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_linprog_util.py @@ -0,0 +1,1522 @@ +""" +Method agnostic utility functions for linear programming +""" + +import numpy as np +import scipy.sparse as sps +from warnings import warn +from ._optimize import OptimizeWarning +from scipy.optimize._remove_redundancy import ( + _remove_redundancy_svd, _remove_redundancy_pivot_sparse, + _remove_redundancy_pivot_dense, _remove_redundancy_id + ) +from collections import namedtuple + +_LPProblem = namedtuple('_LPProblem', + 'c A_ub b_ub A_eq b_eq bounds x0 integrality') +_LPProblem.__new__.__defaults__ = (None,) * 7 # make c the only required arg +_LPProblem.__doc__ = \ + """ Represents a linear-programming problem. + + Attributes + ---------- + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : various valid formats, optional + The bounds of ``x``, as ``min`` and ``max`` pairs. + If bounds are specified for all N variables separately, valid formats + are: + * a 2D array (N x 2); + * a sequence of N sequences, each with 2 values. + If all variables have the same bounds, the bounds can be specified as + a 1-D or 2-D array or sequence with 2 scalar values. + If all variables have a lower bound of 0 and no upper bound, the bounds + parameter can be omitted (or given as None). + Absent lower and/or upper bounds can be specified as -numpy.inf (no + lower bound), numpy.inf (no upper bound) or None (both). + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + integrality : 1-D array or int, optional + Indicates the type of integrality constraint on each decision variable. + + ``0`` : Continuous variable; no integrality constraint. + + ``1`` : Integer variable; decision variable must be an integer + within `bounds`. + + ``2`` : Semi-continuous variable; decision variable must be within + `bounds` or take value ``0``. + + ``3`` : Semi-integer variable; decision variable must be an integer + within `bounds` or take value ``0``. + + By default, all variables are continuous. + + For mixed integrality constraints, supply an array of shape `c.shape`. + To infer a constraint on each decision variable from shorter inputs, + the argument will be broadcasted to `c.shape` using `np.broadcast_to`. + + This argument is currently used only by the ``'highs'`` method and + ignored otherwise. + + Notes + ----- + This namedtuple supports 2 ways of initialization: + >>> lp1 = _LPProblem(c=[-1, 4], A_ub=[[-3, 1], [1, 2]], b_ub=[6, 4]) + >>> lp2 = _LPProblem([-1, 4], [[-3, 1], [1, 2]], [6, 4]) + + Note that only ``c`` is a required argument here, whereas all other arguments + ``A_ub``, ``b_ub``, ``A_eq``, ``b_eq``, ``bounds``, ``x0`` are optional with + default values of None. + For example, ``A_eq`` and ``b_eq`` can be set without ``A_ub`` or ``b_ub``: + >>> lp3 = _LPProblem(c=[-1, 4], A_eq=[[2, 1]], b_eq=[10]) + """ + + +def _check_sparse_inputs(options, meth, A_ub, A_eq): + """ + Check the provided ``A_ub`` and ``A_eq`` matrices conform to the specified + optional sparsity variables. + + Parameters + ---------- + A_ub : 2-D array, optional + 2-D array such that ``A_ub @ x`` gives the values of the upper-bound + inequality constraints at ``x``. + A_eq : 2-D array, optional + 2-D array such that ``A_eq @ x`` gives the values of the equality + constraints at ``x``. + options : dict + A dictionary of solver options. All methods accept the following + generic options: + + maxiter : int + Maximum number of iterations to perform. + disp : bool + Set to True to print convergence messages. + + For method-specific options, see :func:`show_options('linprog')`. + method : str, optional + The algorithm used to solve the standard form problem. + + Returns + ------- + A_ub : 2-D array, optional + 2-D array such that ``A_ub @ x`` gives the values of the upper-bound + inequality constraints at ``x``. + A_eq : 2-D array, optional + 2-D array such that ``A_eq @ x`` gives the values of the equality + constraints at ``x``. + options : dict + A dictionary of solver options. All methods accept the following + generic options: + + maxiter : int + Maximum number of iterations to perform. + disp : bool + Set to True to print convergence messages. + + For method-specific options, see :func:`show_options('linprog')`. + """ + # This is an undocumented option for unit testing sparse presolve + _sparse_presolve = options.pop('_sparse_presolve', False) + if _sparse_presolve and A_eq is not None: + A_eq = sps.coo_matrix(A_eq) + if _sparse_presolve and A_ub is not None: + A_ub = sps.coo_matrix(A_ub) + + sparse_constraint = sps.issparse(A_eq) or sps.issparse(A_ub) + + preferred_methods = {"highs", "highs-ds", "highs-ipm"} + dense_methods = {"simplex", "revised simplex"} + if meth in dense_methods and sparse_constraint: + raise ValueError(f"Method '{meth}' does not support sparse " + "constraint matrices. Please consider using one of " + f"{preferred_methods}.") + + sparse = options.get('sparse', False) + if not sparse and sparse_constraint and meth == 'interior-point': + options['sparse'] = True + warn("Sparse constraint matrix detected; setting 'sparse':True.", + OptimizeWarning, stacklevel=4) + return options, A_ub, A_eq + + +def _format_A_constraints(A, n_x, sparse_lhs=False): + """Format the left hand side of the constraints to a 2-D array + + Parameters + ---------- + A : 2-D array + 2-D array such that ``A @ x`` gives the values of the upper-bound + (in)equality constraints at ``x``. + n_x : int + The number of variables in the linear programming problem. + sparse_lhs : bool + Whether either of `A_ub` or `A_eq` are sparse. If true return a + coo_matrix instead of a numpy array. + + Returns + ------- + np.ndarray or sparse.coo_matrix + 2-D array such that ``A @ x`` gives the values of the upper-bound + (in)equality constraints at ``x``. + + """ + if sparse_lhs: + return sps.coo_matrix( + (0, n_x) if A is None else A, dtype=float, copy=True + ) + elif A is None: + return np.zeros((0, n_x), dtype=float) + else: + return np.array(A, dtype=float, copy=True) + + +def _format_b_constraints(b): + """Format the upper bounds of the constraints to a 1-D array + + Parameters + ---------- + b : 1-D array + 1-D array of values representing the upper-bound of each (in)equality + constraint (row) in ``A``. + + Returns + ------- + 1-D np.array + 1-D array of values representing the upper-bound of each (in)equality + constraint (row) in ``A``. + + """ + if b is None: + return np.array([], dtype=float) + b = np.array(b, dtype=float, copy=True).squeeze() + return b if b.size != 1 else b.reshape(-1) + + +def _clean_inputs(lp): + """ + Given user inputs for a linear programming problem, return the + objective vector, upper bound constraints, equality constraints, + and simple bounds in a preferred format. + + Parameters + ---------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : various valid formats, optional + The bounds of ``x``, as ``min`` and ``max`` pairs. + If bounds are specified for all N variables separately, valid formats are: + * a 2D array (2 x N or N x 2); + * a sequence of N sequences, each with 2 values. + If all variables have the same bounds, a single pair of values can + be specified. Valid formats are: + * a sequence with 2 scalar values; + * a sequence with a single element containing 2 scalar values. + If all variables have a lower bound of 0 and no upper bound, the bounds + parameter can be omitted (or given as None). + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + Returns + ------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N + elements of ``x``. The N x 2 array contains lower bounds in the first + column and upper bounds in the 2nd. Unbounded variables have lower + bound -np.inf and/or upper bound np.inf. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + """ + c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality = lp + + if c is None: + raise TypeError + + try: + c = np.array(c, dtype=np.float64, copy=True).squeeze() + except ValueError as e: + raise TypeError( + "Invalid input for linprog: c must be a 1-D array of numerical " + "coefficients") from e + else: + # If c is a single value, convert it to a 1-D array. + if c.size == 1: + c = c.reshape(-1) + + n_x = len(c) + if n_x == 0 or len(c.shape) != 1: + raise ValueError( + "Invalid input for linprog: c must be a 1-D array and must " + "not have more than one non-singleton dimension") + if not np.isfinite(c).all(): + raise ValueError( + "Invalid input for linprog: c must not contain values " + "inf, nan, or None") + + sparse_lhs = sps.issparse(A_eq) or sps.issparse(A_ub) + try: + A_ub = _format_A_constraints(A_ub, n_x, sparse_lhs=sparse_lhs) + except ValueError as e: + raise TypeError( + "Invalid input for linprog: A_ub must be a 2-D array " + "of numerical values") from e + else: + n_ub = A_ub.shape[0] + if len(A_ub.shape) != 2 or A_ub.shape[1] != n_x: + raise ValueError( + "Invalid input for linprog: A_ub must have exactly two " + "dimensions, and the number of columns in A_ub must be " + "equal to the size of c") + if (sps.issparse(A_ub) and not np.isfinite(A_ub.data).all() + or not sps.issparse(A_ub) and not np.isfinite(A_ub).all()): + raise ValueError( + "Invalid input for linprog: A_ub must not contain values " + "inf, nan, or None") + + try: + b_ub = _format_b_constraints(b_ub) + except ValueError as e: + raise TypeError( + "Invalid input for linprog: b_ub must be a 1-D array of " + "numerical values, each representing the upper bound of an " + "inequality constraint (row) in A_ub") from e + else: + if b_ub.shape != (n_ub,): + raise ValueError( + "Invalid input for linprog: b_ub must be a 1-D array; b_ub " + "must not have more than one non-singleton dimension and " + "the number of rows in A_ub must equal the number of values " + "in b_ub") + if not np.isfinite(b_ub).all(): + raise ValueError( + "Invalid input for linprog: b_ub must not contain values " + "inf, nan, or None") + + try: + A_eq = _format_A_constraints(A_eq, n_x, sparse_lhs=sparse_lhs) + except ValueError as e: + raise TypeError( + "Invalid input for linprog: A_eq must be a 2-D array " + "of numerical values") from e + else: + n_eq = A_eq.shape[0] + if len(A_eq.shape) != 2 or A_eq.shape[1] != n_x: + raise ValueError( + "Invalid input for linprog: A_eq must have exactly two " + "dimensions, and the number of columns in A_eq must be " + "equal to the size of c") + + if (sps.issparse(A_eq) and not np.isfinite(A_eq.data).all() + or not sps.issparse(A_eq) and not np.isfinite(A_eq).all()): + raise ValueError( + "Invalid input for linprog: A_eq must not contain values " + "inf, nan, or None") + + try: + b_eq = _format_b_constraints(b_eq) + except ValueError as e: + raise TypeError( + "Invalid input for linprog: b_eq must be a dense, 1-D array of " + "numerical values, each representing the right hand side of an " + "equality constraint (row) in A_eq") from e + else: + if b_eq.shape != (n_eq,): + raise ValueError( + "Invalid input for linprog: b_eq must be a 1-D array; b_eq " + "must not have more than one non-singleton dimension and " + "the number of rows in A_eq must equal the number of values " + "in b_eq") + if not np.isfinite(b_eq).all(): + raise ValueError( + "Invalid input for linprog: b_eq must not contain values " + "inf, nan, or None") + + # x0 gives a (optional) starting solution to the solver. If x0 is None, + # skip the checks. Initial solution will be generated automatically. + if x0 is not None: + try: + x0 = np.array(x0, dtype=float, copy=True).squeeze() + except ValueError as e: + raise TypeError( + "Invalid input for linprog: x0 must be a 1-D array of " + "numerical coefficients") from e + if x0.ndim == 0: + x0 = x0.reshape(-1) + if len(x0) == 0 or x0.ndim != 1: + raise ValueError( + "Invalid input for linprog: x0 should be a 1-D array; it " + "must not have more than one non-singleton dimension") + if not x0.size == c.size: + raise ValueError( + "Invalid input for linprog: x0 and c should contain the " + "same number of elements") + if not np.isfinite(x0).all(): + raise ValueError( + "Invalid input for linprog: x0 must not contain values " + "inf, nan, or None") + + # Bounds can be one of these formats: + # (1) a 2-D array or sequence, with shape N x 2 + # (2) a 1-D or 2-D sequence or array with 2 scalars + # (3) None (or an empty sequence or array) + # Unspecified bounds can be represented by None or (-)np.inf. + # All formats are converted into a N x 2 np.array with (-)np.inf where + # bounds are unspecified. + + # Prepare clean bounds array + bounds_clean = np.zeros((n_x, 2), dtype=float) + + # Convert to a numpy array. + # np.array(..,dtype=float) raises an error if dimensions are inconsistent + # or if there are invalid data types in bounds. Just add a linprog prefix + # to the error and re-raise. + # Creating at least a 2-D array simplifies the cases to distinguish below. + if bounds is None or np.array_equal(bounds, []) or np.array_equal(bounds, [[]]): + bounds = (0, np.inf) + try: + bounds_conv = np.atleast_2d(np.array(bounds, dtype=float)) + except ValueError as e: + raise ValueError( + "Invalid input for linprog: unable to interpret bounds, " + "check values and dimensions: " + e.args[0]) from e + except TypeError as e: + raise TypeError( + "Invalid input for linprog: unable to interpret bounds, " + "check values and dimensions: " + e.args[0]) from e + + # Check bounds options + bsh = bounds_conv.shape + if len(bsh) > 2: + # Do not try to handle multidimensional bounds input + raise ValueError( + "Invalid input for linprog: provide a 2-D array for bounds, " + f"not a {len(bsh):d}-D array.") + elif np.all(bsh == (n_x, 2)): + # Regular N x 2 array + bounds_clean = bounds_conv + elif (np.all(bsh == (2, 1)) or np.all(bsh == (1, 2))): + # 2 values: interpret as overall lower and upper bound + bounds_flat = bounds_conv.flatten() + bounds_clean[:, 0] = bounds_flat[0] + bounds_clean[:, 1] = bounds_flat[1] + elif np.all(bsh == (2, n_x)): + # Reject a 2 x N array + raise ValueError( + f"Invalid input for linprog: provide a {n_x:d} x 2 array for bounds, " + f"not a 2 x {n_x:d} array.") + else: + raise ValueError( + "Invalid input for linprog: unable to interpret bounds with this " + f"dimension tuple: {bsh}.") + + # The process above creates nan-s where the input specified None + # Convert the nan-s in the 1st column to -np.inf and in the 2nd column + # to np.inf + i_none = np.isnan(bounds_clean[:, 0]) + bounds_clean[i_none, 0] = -np.inf + i_none = np.isnan(bounds_clean[:, 1]) + bounds_clean[i_none, 1] = np.inf + + return _LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds_clean, x0, integrality) + + +def _presolve(lp, rr, rr_method, tol=1e-9): + """ + Given inputs for a linear programming problem in preferred format, + presolve the problem: identify trivial infeasibilities, redundancies, + and unboundedness, tighten bounds where possible, and eliminate fixed + variables. + + Parameters + ---------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N + elements of ``x``. The N x 2 array contains lower bounds in the first + column and upper bounds in the 2nd. Unbounded variables have lower + bound -np.inf and/or upper bound np.inf. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + rr : bool + If ``True`` attempts to eliminate any redundant rows in ``A_eq``. + Set False if ``A_eq`` is known to be of full row rank, or if you are + looking for a potential speedup (at the expense of reliability). + rr_method : string + Method used to identify and remove redundant rows from the + equality constraint matrix after presolve. + tol : float + The tolerance which determines when a solution is "close enough" to + zero in Phase 1 to be considered a basic feasible solution or close + enough to positive to serve as an optimal solution. + + Returns + ------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, as ``min`` and ``max`` pairs, possibly tightened. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + c0 : 1D array + Constant term in objective function due to fixed (and eliminated) + variables. + x : 1D array + Solution vector (when the solution is trivial and can be determined + in presolve) + revstack: list of functions + the functions in the list reverse the operations of _presolve() + the function signature is x_org = f(x_mod), where x_mod is the result + of a presolve step and x_org the value at the start of the step + (currently, the revstack contains only one function) + complete: bool + Whether the solution is complete (solved or determined to be infeasible + or unbounded in presolve) + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + message : str + A string descriptor of the exit status of the optimization. + + References + ---------- + .. [5] Andersen, Erling D. "Finding all linearly dependent rows in + large-scale linear programming." Optimization Methods and Software + 6.3 (1995): 219-227. + .. [8] Andersen, Erling D., and Knud D. Andersen. "Presolving in linear + programming." Mathematical Programming 71.2 (1995): 221-245. + + """ + # ideas from Reference [5] by Andersen and Andersen + # however, unlike the reference, this is performed before converting + # problem to standard form + # There are a few advantages: + # * artificial variables have not been added, so matrices are smaller + # * bounds have not been converted to constraints yet. (It is better to + # do that after presolve because presolve may adjust the simple bounds.) + # There are many improvements that can be made, namely: + # * implement remaining checks from [5] + # * loop presolve until no additional changes are made + # * implement additional efficiency improvements in redundancy removal [2] + + c, A_ub, b_ub, A_eq, b_eq, bounds, x0, _ = lp + + revstack = [] # record of variables eliminated from problem + # constant term in cost function may be added if variables are eliminated + c0 = 0 + complete = False # complete is True if detected infeasible/unbounded + x = np.zeros(c.shape) # this is solution vector if completed in presolve + + status = 0 # all OK unless determined otherwise + message = "" + + # Lower and upper bounds. Copy to prevent feedback. + lb = bounds[:, 0].copy() + ub = bounds[:, 1].copy() + + m_eq, n = A_eq.shape + m_ub, n = A_ub.shape + + if (rr_method is not None + and rr_method.lower() not in {"svd", "pivot", "id"}): + message = ("'" + str(rr_method) + "' is not a valid option " + "for redundancy removal. Valid options are 'SVD', " + "'pivot', and 'ID'.") + raise ValueError(message) + + if sps.issparse(A_eq): + A_eq = A_eq.tocsr() + A_ub = A_ub.tocsr() + + def where(A): + return A.nonzero() + + vstack = sps.vstack + else: + where = np.where + vstack = np.vstack + + # upper bounds > lower bounds + if np.any(ub < lb) or np.any(lb == np.inf) or np.any(ub == -np.inf): + status = 2 + message = ("The problem is (trivially) infeasible since one " + "or more upper bounds are smaller than the corresponding " + "lower bounds, a lower bound is np.inf or an upper bound " + "is -np.inf.") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + + # zero row in equality constraints + zero_row = np.array(np.sum(A_eq != 0, axis=1) == 0).flatten() + if np.any(zero_row): + if np.any( + np.logical_and( + zero_row, + np.abs(b_eq) > tol)): # test_zero_row_1 + # infeasible if RHS is not zero + status = 2 + message = ("The problem is (trivially) infeasible due to a row " + "of zeros in the equality constraint matrix with a " + "nonzero corresponding constraint value.") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + else: # test_zero_row_2 + # if RHS is zero, we can eliminate this equation entirely + A_eq = A_eq[np.logical_not(zero_row), :] + b_eq = b_eq[np.logical_not(zero_row)] + + # zero row in inequality constraints + zero_row = np.array(np.sum(A_ub != 0, axis=1) == 0).flatten() + if np.any(zero_row): + if np.any(np.logical_and(zero_row, b_ub < -tol)): # test_zero_row_1 + # infeasible if RHS is less than zero (because LHS is zero) + status = 2 + message = ("The problem is (trivially) infeasible due to a row " + "of zeros in the equality constraint matrix with a " + "nonzero corresponding constraint value.") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + else: # test_zero_row_2 + # if LHS is >= 0, we can eliminate this constraint entirely + A_ub = A_ub[np.logical_not(zero_row), :] + b_ub = b_ub[np.logical_not(zero_row)] + + # zero column in (both) constraints + # this indicates that a variable isn't constrained and can be removed + A = vstack((A_eq, A_ub)) + if A.shape[0] > 0: + zero_col = np.array(np.sum(A != 0, axis=0) == 0).flatten() + # variable will be at upper or lower bound, depending on objective + x[np.logical_and(zero_col, c < 0)] = ub[ + np.logical_and(zero_col, c < 0)] + x[np.logical_and(zero_col, c > 0)] = lb[ + np.logical_and(zero_col, c > 0)] + if np.any(np.isinf(x)): # if an unconstrained variable has no bound + status = 3 + message = ("If feasible, the problem is (trivially) unbounded " + "due to a zero column in the constraint matrices. If " + "you wish to check whether the problem is infeasible, " + "turn presolve off.") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + # variables will equal upper/lower bounds will be removed later + lb[np.logical_and(zero_col, c < 0)] = ub[ + np.logical_and(zero_col, c < 0)] + ub[np.logical_and(zero_col, c > 0)] = lb[ + np.logical_and(zero_col, c > 0)] + + # row singleton in equality constraints + # this fixes a variable and removes the constraint + singleton_row = np.array(np.sum(A_eq != 0, axis=1) == 1).flatten() + rows = where(singleton_row)[0] + cols = where(A_eq[rows, :])[1] + if len(rows) > 0: + for row, col in zip(rows, cols): + val = b_eq[row] / A_eq[row, col] + if not lb[col] - tol <= val <= ub[col] + tol: + # infeasible if fixed value is not within bounds + status = 2 + message = ("The problem is (trivially) infeasible because a " + "singleton row in the equality constraints is " + "inconsistent with the bounds.") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + else: + # sets upper and lower bounds at that fixed value - variable + # will be removed later + lb[col] = val + ub[col] = val + A_eq = A_eq[np.logical_not(singleton_row), :] + b_eq = b_eq[np.logical_not(singleton_row)] + + # row singleton in inequality constraints + # this indicates a simple bound and the constraint can be removed + # simple bounds may be adjusted here + # After all of the simple bound information is combined here, get_Abc will + # turn the simple bounds into constraints + singleton_row = np.array(np.sum(A_ub != 0, axis=1) == 1).flatten() + cols = where(A_ub[singleton_row, :])[1] + rows = where(singleton_row)[0] + if len(rows) > 0: + for row, col in zip(rows, cols): + val = b_ub[row] / A_ub[row, col] + if A_ub[row, col] > 0: # upper bound + if val < lb[col] - tol: # infeasible + complete = True + elif val < ub[col]: # new upper bound + ub[col] = val + else: # lower bound + if val > ub[col] + tol: # infeasible + complete = True + elif val > lb[col]: # new lower bound + lb[col] = val + if complete: + status = 2 + message = ("The problem is (trivially) infeasible because a " + "singleton row in the upper bound constraints is " + "inconsistent with the bounds.") + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + A_ub = A_ub[np.logical_not(singleton_row), :] + b_ub = b_ub[np.logical_not(singleton_row)] + + # identical bounds indicate that variable can be removed + i_f = np.abs(lb - ub) < tol # indices of "fixed" variables + i_nf = np.logical_not(i_f) # indices of "not fixed" variables + + # test_bounds_equal_but_infeasible + if np.all(i_f): # if bounds define solution, check for consistency + residual = b_eq - A_eq.dot(lb) + slack = b_ub - A_ub.dot(lb) + if ((A_ub.size > 0 and np.any(slack < 0)) or + (A_eq.size > 0 and not np.allclose(residual, 0))): + status = 2 + message = ("The problem is (trivially) infeasible because the " + "bounds fix all variables to values inconsistent with " + "the constraints") + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + + ub_mod = ub + lb_mod = lb + if np.any(i_f): + c0 += c[i_f].dot(lb[i_f]) + b_eq = b_eq - A_eq[:, i_f].dot(lb[i_f]) + b_ub = b_ub - A_ub[:, i_f].dot(lb[i_f]) + c = c[i_nf] + x_undo = lb[i_f] # not x[i_f], x is just zeroes + x = x[i_nf] + # user guess x0 stays separate from presolve solution x + if x0 is not None: + x0 = x0[i_nf] + A_eq = A_eq[:, i_nf] + A_ub = A_ub[:, i_nf] + # modify bounds + lb_mod = lb[i_nf] + ub_mod = ub[i_nf] + + def rev(x_mod): + # Function to restore x: insert x_undo into x_mod. + # When elements have been removed at positions k1, k2, k3, ... + # then these must be replaced at (after) positions k1-1, k2-2, + # k3-3, ... in the modified array to recreate the original + i = np.flatnonzero(i_f) + # Number of variables to restore + N = len(i) + index_offset = np.arange(N) + # Create insert indices + insert_indices = i - index_offset + x_rev = np.insert(x_mod.astype(float), insert_indices, x_undo) + return x_rev + + # Use revstack as a list of functions, currently just this one. + revstack.append(rev) + + # no constraints indicates that problem is trivial + if A_eq.size == 0 and A_ub.size == 0: + b_eq = np.array([]) + b_ub = np.array([]) + # test_empty_constraint_1 + if c.size == 0: + status = 0 + message = ("The solution was determined in presolve as there are " + "no non-trivial constraints.") + elif (np.any(np.logical_and(c < 0, ub_mod == np.inf)) or + np.any(np.logical_and(c > 0, lb_mod == -np.inf))): + # test_no_constraints() + # test_unbounded_no_nontrivial_constraints_1 + # test_unbounded_no_nontrivial_constraints_2 + status = 3 + message = ("The problem is (trivially) unbounded " + "because there are no non-trivial constraints and " + "a) at least one decision variable is unbounded " + "above and its corresponding cost is negative, or " + "b) at least one decision variable is unbounded below " + "and its corresponding cost is positive. ") + else: # test_empty_constraint_2 + status = 0 + message = ("The solution was determined in presolve as there are " + "no non-trivial constraints.") + complete = True + x[c < 0] = ub_mod[c < 0] + x[c > 0] = lb_mod[c > 0] + # where c is zero, set x to a finite bound or zero + x_zero_c = ub_mod[c == 0] + x_zero_c[np.isinf(x_zero_c)] = ub_mod[c == 0][np.isinf(x_zero_c)] + x_zero_c[np.isinf(x_zero_c)] = 0 + x[c == 0] = x_zero_c + # if this is not the last step of presolve, should convert bounds back + # to array and return here + + # Convert modified lb and ub back into N x 2 bounds + bounds = np.hstack((lb_mod[:, np.newaxis], ub_mod[:, np.newaxis])) + + # remove redundant (linearly dependent) rows from equality constraints + n_rows_A = A_eq.shape[0] + redundancy_warning = ("A_eq does not appear to be of full row rank. To " + "improve performance, check the problem formulation " + "for redundant equality constraints.") + if (sps.issparse(A_eq)): + if rr and A_eq.size > 0: # TODO: Fast sparse rank check? + rr_res = _remove_redundancy_pivot_sparse(A_eq, b_eq) + A_eq, b_eq, status, message = rr_res + if A_eq.shape[0] < n_rows_A: + warn(redundancy_warning, OptimizeWarning, stacklevel=1) + if status != 0: + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + + # This is a wild guess for which redundancy removal algorithm will be + # faster. More testing would be good. + small_nullspace = 5 + if rr and A_eq.size > 0: + try: # TODO: use results of first SVD in _remove_redundancy_svd + rank = np.linalg.matrix_rank(A_eq) + # oh well, we'll have to go with _remove_redundancy_pivot_dense + except Exception: + rank = 0 + if rr and A_eq.size > 0 and rank < A_eq.shape[0]: + warn(redundancy_warning, OptimizeWarning, stacklevel=3) + dim_row_nullspace = A_eq.shape[0]-rank + if rr_method is None: + if dim_row_nullspace <= small_nullspace: + rr_res = _remove_redundancy_svd(A_eq, b_eq) + A_eq, b_eq, status, message = rr_res + if dim_row_nullspace > small_nullspace or status == 4: + rr_res = _remove_redundancy_pivot_dense(A_eq, b_eq) + A_eq, b_eq, status, message = rr_res + + else: + rr_method = rr_method.lower() + if rr_method == "svd": + rr_res = _remove_redundancy_svd(A_eq, b_eq) + A_eq, b_eq, status, message = rr_res + elif rr_method == "pivot": + rr_res = _remove_redundancy_pivot_dense(A_eq, b_eq) + A_eq, b_eq, status, message = rr_res + elif rr_method == "id": + rr_res = _remove_redundancy_id(A_eq, b_eq, rank) + A_eq, b_eq, status, message = rr_res + else: # shouldn't get here; option validity checked above + pass + if A_eq.shape[0] < rank: + message = ("Due to numerical issues, redundant equality " + "constraints could not be removed automatically. " + "Try providing your constraint matrices as sparse " + "matrices to activate sparse presolve, try turning " + "off redundancy removal, or try turning off presolve " + "altogether.") + status = 4 + if status != 0: + complete = True + return (_LPProblem(c, A_ub, b_ub, A_eq, b_eq, bounds, x0), + c0, x, revstack, complete, status, message) + + +def _parse_linprog(lp, options, meth): + """ + Parse the provided linear programming problem + + ``_parse_linprog`` employs two main steps ``_check_sparse_inputs`` and + ``_clean_inputs``. ``_check_sparse_inputs`` checks for sparsity in the + provided constraints (``A_ub`` and ``A_eq) and if these match the provided + sparsity optional values. + + ``_clean inputs`` checks of the provided inputs. If no violations are + identified the objective vector, upper bound constraints, equality + constraints, and simple bounds are returned in the expected format. + + Parameters + ---------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : various valid formats, optional + The bounds of ``x``, as ``min`` and ``max`` pairs. + If bounds are specified for all N variables separately, valid formats are: + * a 2D array (2 x N or N x 2); + * a sequence of N sequences, each with 2 values. + If all variables have the same bounds, a single pair of values can + be specified. Valid formats are: + * a sequence with 2 scalar values; + * a sequence with a single element containing 2 scalar values. + If all variables have a lower bound of 0 and no upper bound, the bounds + parameter can be omitted (or given as None). + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + options : dict + A dictionary of solver options. All methods accept the following + generic options: + + maxiter : int + Maximum number of iterations to perform. + disp : bool + Set to True to print convergence messages. + + For method-specific options, see :func:`show_options('linprog')`. + + Returns + ------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, as ``min`` and ``max`` pairs, one for each of the N + elements of ``x``. The N x 2 array contains lower bounds in the first + column and upper bounds in the 2nd. Unbounded variables have lower + bound -np.inf and/or upper bound np.inf. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + options : dict, optional + A dictionary of solver options. All methods accept the following + generic options: + + maxiter : int + Maximum number of iterations to perform. + disp : bool + Set to True to print convergence messages. + + For method-specific options, see :func:`show_options('linprog')`. + + """ + if options is None: + options = {} + + solver_options = {k: v for k, v in options.items()} + solver_options, A_ub, A_eq = _check_sparse_inputs(solver_options, meth, + lp.A_ub, lp.A_eq) + # Convert lists to numpy arrays, etc... + lp = _clean_inputs(lp._replace(A_ub=A_ub, A_eq=A_eq)) + return lp, solver_options + + +def _get_Abc(lp, c0): + """ + Given a linear programming problem of the form: + + Minimize:: + + c @ x + + Subject to:: + + A_ub @ x <= b_ub + A_eq @ x == b_eq + lb <= x <= ub + + where ``lb = 0`` and ``ub = None`` unless set in ``bounds``. + + Return the problem in standard form: + + Minimize:: + + c @ x + + Subject to:: + + A @ x == b + x >= 0 + + by adding slack variables and making variable substitutions as necessary. + + Parameters + ---------- + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, lower bounds in the 1st column, upper + bounds in the 2nd column. The bounds are possibly tightened + by the presolve procedure. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. + + Returns + ------- + A : 2-D array + 2-D array such that ``A`` @ ``x``, gives the values of the equality + constraints at ``x``. + b : 1-D array + 1-D array of values representing the RHS of each equality constraint + (row) in A (for standard form problem). + c : 1-D array + Coefficients of the linear objective function to be minimized (for + standard form problem). + c0 : float + Constant term in objective function due to fixed (and eliminated) + variables. + x0 : 1-D array + Starting values of the independent variables, which will be refined by + the optimization algorithm + + References + ---------- + .. [9] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear + programming." Athena Scientific 1 (1997): 997. + + """ + c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality = lp + + if sps.issparse(A_eq): + sparse = True + A_eq = sps.csr_matrix(A_eq) + A_ub = sps.csr_matrix(A_ub) + + def hstack(blocks): + return sps.hstack(blocks, format="csr") + + def vstack(blocks): + return sps.vstack(blocks, format="csr") + + zeros = sps.csr_matrix + eye = sps.eye + else: + sparse = False + hstack = np.hstack + vstack = np.vstack + zeros = np.zeros + eye = np.eye + + # Variables lbs and ubs (see below) may be changed, which feeds back into + # bounds, so copy. + bounds = np.array(bounds, copy=True) + + # modify problem such that all variables have only non-negativity bounds + lbs = bounds[:, 0] + ubs = bounds[:, 1] + m_ub, n_ub = A_ub.shape + + lb_none = np.equal(lbs, -np.inf) + ub_none = np.equal(ubs, np.inf) + lb_some = np.logical_not(lb_none) + ub_some = np.logical_not(ub_none) + + # unbounded below: substitute xi = -xi' (unbounded above) + # if -inf <= xi <= ub, then -ub <= -xi <= inf, so swap and invert bounds + l_nolb_someub = np.logical_and(lb_none, ub_some) + i_nolb = np.nonzero(l_nolb_someub)[0] + lbs[l_nolb_someub], ubs[l_nolb_someub] = ( + -ubs[l_nolb_someub], -lbs[l_nolb_someub]) + lb_none = np.equal(lbs, -np.inf) + ub_none = np.equal(ubs, np.inf) + lb_some = np.logical_not(lb_none) + ub_some = np.logical_not(ub_none) + c[i_nolb] *= -1 + if x0 is not None: + x0[i_nolb] *= -1 + if len(i_nolb) > 0: + if A_ub.shape[0] > 0: # sometimes needed for sparse arrays... weird + A_ub[:, i_nolb] *= -1 + if A_eq.shape[0] > 0: + A_eq[:, i_nolb] *= -1 + + # upper bound: add inequality constraint + i_newub, = ub_some.nonzero() + ub_newub = ubs[ub_some] + n_bounds = len(i_newub) + if n_bounds > 0: + shape = (n_bounds, A_ub.shape[1]) + if sparse: + idxs = (np.arange(n_bounds), i_newub) + A_ub = vstack((A_ub, sps.csr_matrix((np.ones(n_bounds), idxs), + shape=shape))) + else: + A_ub = vstack((A_ub, np.zeros(shape))) + A_ub[np.arange(m_ub, A_ub.shape[0]), i_newub] = 1 + b_ub = np.concatenate((b_ub, np.zeros(n_bounds))) + b_ub[m_ub:] = ub_newub + + A1 = vstack((A_ub, A_eq)) + b = np.concatenate((b_ub, b_eq)) + c = np.concatenate((c, np.zeros((A_ub.shape[0],)))) + if x0 is not None: + x0 = np.concatenate((x0, np.zeros((A_ub.shape[0],)))) + # unbounded: substitute xi = xi+ + xi- + l_free = np.logical_and(lb_none, ub_none) + i_free = np.nonzero(l_free)[0] + n_free = len(i_free) + c = np.concatenate((c, np.zeros(n_free))) + if x0 is not None: + x0 = np.concatenate((x0, np.zeros(n_free))) + A1 = hstack((A1[:, :n_ub], -A1[:, i_free])) + c[n_ub:n_ub+n_free] = -c[i_free] + if x0 is not None: + i_free_neg = x0[i_free] < 0 + x0[np.arange(n_ub, A1.shape[1])[i_free_neg]] = -x0[i_free[i_free_neg]] + x0[i_free[i_free_neg]] = 0 + + # add slack variables + A2 = vstack([eye(A_ub.shape[0]), zeros((A_eq.shape[0], A_ub.shape[0]))]) + + A = hstack([A1, A2]) + + # lower bound: substitute xi = xi' + lb + # now there is a constant term in objective + i_shift = np.nonzero(lb_some)[0] + lb_shift = lbs[lb_some].astype(float) + c0 += np.sum(lb_shift * c[i_shift]) + if sparse: + b = b.reshape(-1, 1) + A = A.tocsc() + b -= (A[:, i_shift] * sps.diags(lb_shift)).sum(axis=1) + b = b.ravel() + else: + b -= (A[:, i_shift] * lb_shift).sum(axis=1) + if x0 is not None: + x0[i_shift] -= lb_shift + + return A, b, c, c0, x0 + + +def _round_to_power_of_two(x): + """ + Round elements of the array to the nearest power of two. + """ + return 2**np.around(np.log2(x)) + + +def _autoscale(A, b, c, x0): + """ + Scales the problem according to equilibration from [12]. + Also normalizes the right hand side vector by its maximum element. + """ + m, n = A.shape + + C = 1 + R = 1 + + if A.size > 0: + + R = np.max(np.abs(A), axis=1) + if sps.issparse(A): + R = R.toarray().flatten() + R[R == 0] = 1 + R = 1/_round_to_power_of_two(R) + A = sps.diags(R)*A if sps.issparse(A) else A*R.reshape(m, 1) + b = b*R + + C = np.max(np.abs(A), axis=0) + if sps.issparse(A): + C = C.toarray().flatten() + C[C == 0] = 1 + C = 1/_round_to_power_of_two(C) + A = A*sps.diags(C) if sps.issparse(A) else A*C + c = c*C + + b_scale = np.max(np.abs(b)) if b.size > 0 else 1 + if b_scale == 0: + b_scale = 1. + b = b/b_scale + + if x0 is not None: + x0 = x0/b_scale*(1/C) + return A, b, c, x0, C, b_scale + + +def _unscale(x, C, b_scale): + """ + Converts solution to _autoscale problem -> solution to original problem. + """ + + try: + n = len(C) + # fails if sparse or scalar; that's OK. + # this is only needed for original simplex (never sparse) + except TypeError: + n = len(x) + + return x[:n]*b_scale*C + + +def _display_summary(message, status, fun, iteration): + """ + Print the termination summary of the linear program + + Parameters + ---------- + message : str + A string descriptor of the exit status of the optimization. + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + fun : float + Value of the objective function. + iteration : iteration + The number of iterations performed. + """ + print(message) + if status in (0, 1): + print(f" Current function value: {fun: <12.6f}") + print(f" Iterations: {iteration:d}") + + +def _postsolve(x, postsolve_args, complete=False): + """ + Given solution x to presolved, standard form linear program x, add + fixed variables back into the problem and undo the variable substitutions + to get solution to original linear program. Also, calculate the objective + function value, slack in original upper bound constraints, and residuals + in original equality constraints. + + Parameters + ---------- + x : 1-D array + Solution vector to the standard-form problem. + postsolve_args : tuple + Data needed by _postsolve to convert the solution to the standard-form + problem into the solution to the original problem, including: + + lp : A `scipy.optimize._linprog_util._LPProblem` consisting of the following fields: + + c : 1D array + The coefficients of the linear objective function to be minimized. + A_ub : 2D array, optional + The inequality constraint matrix. Each row of ``A_ub`` specifies the + coefficients of a linear inequality constraint on ``x``. + b_ub : 1D array, optional + The inequality constraint vector. Each element represents an + upper bound on the corresponding value of ``A_ub @ x``. + A_eq : 2D array, optional + The equality constraint matrix. Each row of ``A_eq`` specifies the + coefficients of a linear equality constraint on ``x``. + b_eq : 1D array, optional + The equality constraint vector. Each element of ``A_eq @ x`` must equal + the corresponding element of ``b_eq``. + bounds : 2D array + The bounds of ``x``, lower bounds in the 1st column, upper + bounds in the 2nd column. The bounds are possibly tightened + by the presolve procedure. + x0 : 1D array, optional + Guess values of the decision variables, which will be refined by + the optimization algorithm. This argument is currently used only by the + 'revised simplex' method, and can only be used if `x0` represents a + basic feasible solution. + + revstack: list of functions + the functions in the list reverse the operations of _presolve() + the function signature is x_org = f(x_mod), where x_mod is the result + of a presolve step and x_org the value at the start of the step + complete : bool + Whether the solution is was determined in presolve (``True`` if so) + + Returns + ------- + x : 1-D array + Solution vector to original linear programming problem + fun: float + optimal objective value for original problem + slack : 1-D array + The (non-negative) slack in the upper bound constraints, that is, + ``b_ub - A_ub @ x`` + con : 1-D array + The (nominally zero) residuals of the equality constraints, that is, + ``b - A_eq @ x`` + """ + # note that all the inputs are the ORIGINAL, unmodified versions + # no rows, columns have been removed + + c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality = postsolve_args[0] + revstack, C, b_scale = postsolve_args[1:] + + x = _unscale(x, C, b_scale) + + # Undo variable substitutions of _get_Abc() + # if "complete", problem was solved in presolve; don't do anything here + n_x = bounds.shape[0] + if not complete and bounds is not None: # bounds are never none, probably + n_unbounded = 0 + for i, bi in enumerate(bounds): + lbi = bi[0] + ubi = bi[1] + if lbi == -np.inf and ubi == np.inf: + n_unbounded += 1 + x[i] = x[i] - x[n_x + n_unbounded - 1] + else: + if lbi == -np.inf: + x[i] = ubi - x[i] + else: + x[i] += lbi + # all the rest of the variables were artificial + x = x[:n_x] + + # If there were variables removed from the problem, add them back into the + # solution vector + # Apply the functions in revstack (reverse direction) + for rev in reversed(revstack): + x = rev(x) + + fun = x.dot(c) + slack = b_ub - A_ub.dot(x) # report slack for ORIGINAL UB constraints + # report residuals of ORIGINAL EQ constraints + con = b_eq - A_eq.dot(x) + + return x, fun, slack, con + + +def _check_result(x, fun, status, slack, con, bounds, tol, message, + integrality): + """ + Check the validity of the provided solution. + + A valid (optimal) solution satisfies all bounds, all slack variables are + negative and all equality constraint residuals are strictly non-zero. + Further, the lower-bounds, upper-bounds, slack and residuals contain + no nan values. + + Parameters + ---------- + x : 1-D array + Solution vector to original linear programming problem + fun: float + optimal objective value for original problem + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + slack : 1-D array + The (non-negative) slack in the upper bound constraints, that is, + ``b_ub - A_ub @ x`` + con : 1-D array + The (nominally zero) residuals of the equality constraints, that is, + ``b - A_eq @ x`` + bounds : 2D array + The bounds on the original variables ``x`` + message : str + A string descriptor of the exit status of the optimization. + tol : float + Termination tolerance; see [1]_ Section 4.5. + + Returns + ------- + status : int + An integer representing the exit status of the optimization:: + + 0 : Optimization terminated successfully + 1 : Iteration limit reached + 2 : Problem appears to be infeasible + 3 : Problem appears to be unbounded + 4 : Serious numerical difficulties encountered + + message : str + A string descriptor of the exit status of the optimization. + """ + # Somewhat arbitrary + tol = np.sqrt(tol) * 10 + + if x is None: + # HiGHS does not provide x if infeasible/unbounded + if status == 0: # Observed with HiGHS Simplex Primal + status = 4 + message = ("The solver did not provide a solution nor did it " + "report a failure. Please submit a bug report.") + return status, message + + contains_nans = ( + np.isnan(x).any() + or np.isnan(fun) + or np.isnan(slack).any() + or np.isnan(con).any() + ) + + if contains_nans: + is_feasible = False + else: + if integrality is None: + integrality = 0 + valid_bounds = (x >= bounds[:, 0] - tol) & (x <= bounds[:, 1] + tol) + # When integrality is 2 or 3, x must be within bounds OR take value 0 + valid_bounds |= (integrality > 1) & np.isclose(x, 0, atol=tol) + invalid_bounds = not np.all(valid_bounds) + + invalid_slack = status != 3 and (slack < -tol).any() + invalid_con = status != 3 and (np.abs(con) > tol).any() + is_feasible = not (invalid_bounds or invalid_slack or invalid_con) + + if status == 0 and not is_feasible: + status = 4 + message = ("The solution does not satisfy the constraints within the " + "required tolerance of " + f"{tol:.2E}" + ", yet " + "no errors were raised and there is no certificate of " + "infeasibility or unboundedness. Check whether " + "the slack and constraint residuals are acceptable; " + "if not, consider enabling presolve, adjusting the " + "tolerance option(s), and/or using a different method. " + "Please consider submitting a bug report.") + elif status == 2 and is_feasible: + # Occurs if the simplex method exits after phase one with a very + # nearly basic feasible solution. Postsolving can make the solution + # basic, however, this solution is NOT optimal + status = 4 + message = ("The solution is feasible, but the solver did not report " + "that the solution was optimal. Please try a different " + "method.") + + return status, message diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minimize.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minimize.py new file mode 100644 index 0000000000000000000000000000000000000000..195e31f23e227155040c6c54d7552e11bba7b1c0 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minimize.py @@ -0,0 +1,1116 @@ +""" +Unified interfaces to minimization algorithms. + +Functions +--------- +- minimize : minimization of a function of several variables. +- minimize_scalar : minimization of a function of one variable. +""" + +__all__ = ['minimize', 'minimize_scalar'] + + +from warnings import warn + +import numpy as np + +# unconstrained minimization +from ._optimize import (_minimize_neldermead, _minimize_powell, _minimize_cg, + _minimize_bfgs, _minimize_newtoncg, + _minimize_scalar_brent, _minimize_scalar_bounded, + _minimize_scalar_golden, MemoizeJac, OptimizeResult, + _wrap_callback, _recover_from_bracket_error) +from ._trustregion_dogleg import _minimize_dogleg +from ._trustregion_ncg import _minimize_trust_ncg +from ._trustregion_krylov import _minimize_trust_krylov +from ._trustregion_exact import _minimize_trustregion_exact +from ._trustregion_constr import _minimize_trustregion_constr + +# constrained minimization +from ._lbfgsb_py import _minimize_lbfgsb +from ._tnc import _minimize_tnc +from ._cobyla_py import _minimize_cobyla +from ._cobyqa_py import _minimize_cobyqa +from ._slsqp_py import _minimize_slsqp +from ._constraints import (old_bound_to_new, new_bounds_to_old, + old_constraint_to_new, new_constraint_to_old, + NonlinearConstraint, LinearConstraint, Bounds, + PreparedConstraint) +from ._differentiable_functions import FD_METHODS + +MINIMIZE_METHODS = ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg', + 'l-bfgs-b', 'tnc', 'cobyla', 'cobyqa', 'slsqp', + 'trust-constr', 'dogleg', 'trust-ncg', 'trust-exact', + 'trust-krylov'] + +# These methods support the new callback interface (passed an OptimizeResult) +MINIMIZE_METHODS_NEW_CB = ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg', + 'l-bfgs-b', 'trust-constr', 'dogleg', 'trust-ncg', + 'trust-exact', 'trust-krylov', 'cobyqa'] + +MINIMIZE_SCALAR_METHODS = ['brent', 'bounded', 'golden'] + +def minimize(fun, x0, args=(), method=None, jac=None, hess=None, + hessp=None, bounds=None, constraints=(), tol=None, + callback=None, options=None): + """Minimization of scalar function of one or more variables. + + Parameters + ---------- + fun : callable + The objective function to be minimized. + + ``fun(x, *args) -> float`` + + where ``x`` is a 1-D array with shape (n,) and ``args`` + is a tuple of the fixed parameters needed to completely + specify the function. + x0 : ndarray, shape (n,) + Initial guess. Array of real elements of size (n,), + where ``n`` is the number of independent variables. + args : tuple, optional + Extra arguments passed to the objective function and its + derivatives (`fun`, `jac` and `hess` functions). + method : str or callable, optional + Type of solver. Should be one of + + - 'Nelder-Mead' :ref:`(see here) ` + - 'Powell' :ref:`(see here) ` + - 'CG' :ref:`(see here) ` + - 'BFGS' :ref:`(see here) ` + - 'Newton-CG' :ref:`(see here) ` + - 'L-BFGS-B' :ref:`(see here) ` + - 'TNC' :ref:`(see here) ` + - 'COBYLA' :ref:`(see here) ` + - 'COBYQA' :ref:`(see here) ` + - 'SLSQP' :ref:`(see here) ` + - 'trust-constr':ref:`(see here) ` + - 'dogleg' :ref:`(see here) ` + - 'trust-ncg' :ref:`(see here) ` + - 'trust-exact' :ref:`(see here) ` + - 'trust-krylov' :ref:`(see here) ` + - custom - a callable object, see below for description. + + If not given, chosen to be one of ``BFGS``, ``L-BFGS-B``, ``SLSQP``, + depending on whether or not the problem has constraints or bounds. + jac : {callable, '2-point', '3-point', 'cs', bool}, optional + Method for computing the gradient vector. Only for CG, BFGS, + Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg, trust-krylov, + trust-exact and trust-constr. + If it is a callable, it should be a function that returns the gradient + vector: + + ``jac(x, *args) -> array_like, shape (n,)`` + + where ``x`` is an array with shape (n,) and ``args`` is a tuple with + the fixed parameters. If `jac` is a Boolean and is True, `fun` is + assumed to return a tuple ``(f, g)`` containing the objective + function and the gradient. + Methods 'Newton-CG', 'trust-ncg', 'dogleg', 'trust-exact', and + 'trust-krylov' require that either a callable be supplied, or that + `fun` return the objective and gradient. + If None or False, the gradient will be estimated using 2-point finite + difference estimation with an absolute step size. + Alternatively, the keywords {'2-point', '3-point', 'cs'} can be used + to select a finite difference scheme for numerical estimation of the + gradient with a relative step size. These finite difference schemes + obey any specified `bounds`. + hess : {callable, '2-point', '3-point', 'cs', HessianUpdateStrategy}, optional + Method for computing the Hessian matrix. Only for Newton-CG, dogleg, + trust-ncg, trust-krylov, trust-exact and trust-constr. + If it is callable, it should return the Hessian matrix: + + ``hess(x, *args) -> {LinearOperator, spmatrix, array}, (n, n)`` + + where ``x`` is a (n,) ndarray and ``args`` is a tuple with the fixed + parameters. + The keywords {'2-point', '3-point', 'cs'} can also be used to select + a finite difference scheme for numerical estimation of the hessian. + Alternatively, objects implementing the `HessianUpdateStrategy` + interface can be used to approximate the Hessian. Available + quasi-Newton methods implementing this interface are: + + - `BFGS`; + - `SR1`. + + Not all of the options are available for each of the methods; for + availability refer to the notes. + hessp : callable, optional + Hessian of objective function times an arbitrary vector p. Only for + Newton-CG, trust-ncg, trust-krylov, trust-constr. + Only one of `hessp` or `hess` needs to be given. If `hess` is + provided, then `hessp` will be ignored. `hessp` must compute the + Hessian times an arbitrary vector: + + ``hessp(x, p, *args) -> ndarray shape (n,)`` + + where ``x`` is a (n,) ndarray, ``p`` is an arbitrary vector with + dimension (n,) and ``args`` is a tuple with the fixed + parameters. + bounds : sequence or `Bounds`, optional + Bounds on variables for Nelder-Mead, L-BFGS-B, TNC, SLSQP, Powell, + trust-constr, COBYLA, and COBYQA methods. There are two ways to specify + the bounds: + + 1. Instance of `Bounds` class. + 2. Sequence of ``(min, max)`` pairs for each element in `x`. None + is used to specify no bound. + + constraints : {Constraint, dict} or List of {Constraint, dict}, optional + Constraints definition. Only for COBYLA, COBYQA, SLSQP and trust-constr. + + Constraints for 'trust-constr' and 'cobyqa' are defined as a single object + or a list of objects specifying constraints to the optimization problem. + Available constraints are: + + - `LinearConstraint` + - `NonlinearConstraint` + + Constraints for COBYLA, SLSQP are defined as a list of dictionaries. + Each dictionary with fields: + + type : str + Constraint type: 'eq' for equality, 'ineq' for inequality. + fun : callable + The function defining the constraint. + jac : callable, optional + The Jacobian of `fun` (only for SLSQP). + args : sequence, optional + Extra arguments to be passed to the function and Jacobian. + + Equality constraint means that the constraint function result is to + be zero whereas inequality means that it is to be non-negative. + Note that COBYLA only supports inequality constraints. + + tol : float, optional + Tolerance for termination. When `tol` is specified, the selected + minimization algorithm sets some relevant solver-specific tolerance(s) + equal to `tol`. For detailed control, use solver-specific + options. + options : dict, optional + A dictionary of solver options. All methods except `TNC` accept the + following generic options: + + maxiter : int + Maximum number of iterations to perform. Depending on the + method each iteration may use several function evaluations. + + For `TNC` use `maxfun` instead of `maxiter`. + disp : bool + Set to True to print convergence messages. + + For method-specific options, see :func:`show_options()`. + callback : callable, optional + A callable called after each iteration. + + All methods except TNC, SLSQP, and COBYLA support a callable with + the signature: + + ``callback(intermediate_result: OptimizeResult)`` + + where ``intermediate_result`` is a keyword parameter containing an + `OptimizeResult` with attributes ``x`` and ``fun``, the present values + of the parameter vector and objective function. Note that the name + of the parameter must be ``intermediate_result`` for the callback + to be passed an `OptimizeResult`. These methods will also terminate if + the callback raises ``StopIteration``. + + All methods except trust-constr (also) support a signature like: + + ``callback(xk)`` + + where ``xk`` is the current parameter vector. + + Introspection is used to determine which of the signatures above to + invoke. + + Returns + ------- + res : OptimizeResult + The optimization result represented as a ``OptimizeResult`` object. + Important attributes are: ``x`` the solution array, ``success`` a + Boolean flag indicating if the optimizer exited successfully and + ``message`` which describes the cause of the termination. See + `OptimizeResult` for a description of other attributes. + + See also + -------- + minimize_scalar : Interface to minimization algorithms for scalar + univariate functions + show_options : Additional options accepted by the solvers + + Notes + ----- + This section describes the available solvers that can be selected by the + 'method' parameter. The default method is *BFGS*. + + **Unconstrained minimization** + + Method :ref:`CG ` uses a nonlinear conjugate + gradient algorithm by Polak and Ribiere, a variant of the + Fletcher-Reeves method described in [5]_ pp.120-122. Only the + first derivatives are used. + + Method :ref:`BFGS ` uses the quasi-Newton + method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) [5]_ + pp. 136. It uses the first derivatives only. BFGS has proven good + performance even for non-smooth optimizations. This method also + returns an approximation of the Hessian inverse, stored as + `hess_inv` in the OptimizeResult object. + + Method :ref:`Newton-CG ` uses a + Newton-CG algorithm [5]_ pp. 168 (also known as the truncated + Newton method). It uses a CG method to the compute the search + direction. See also *TNC* method for a box-constrained + minimization with a similar algorithm. Suitable for large-scale + problems. + + Method :ref:`dogleg ` uses the dog-leg + trust-region algorithm [5]_ for unconstrained minimization. This + algorithm requires the gradient and Hessian; furthermore the + Hessian is required to be positive definite. + + Method :ref:`trust-ncg ` uses the + Newton conjugate gradient trust-region algorithm [5]_ for + unconstrained minimization. This algorithm requires the gradient + and either the Hessian or a function that computes the product of + the Hessian with a given vector. Suitable for large-scale problems. + + Method :ref:`trust-krylov ` uses + the Newton GLTR trust-region algorithm [14]_, [15]_ for unconstrained + minimization. This algorithm requires the gradient + and either the Hessian or a function that computes the product of + the Hessian with a given vector. Suitable for large-scale problems. + On indefinite problems it requires usually less iterations than the + `trust-ncg` method and is recommended for medium and large-scale problems. + + Method :ref:`trust-exact ` + is a trust-region method for unconstrained minimization in which + quadratic subproblems are solved almost exactly [13]_. This + algorithm requires the gradient and the Hessian (which is + *not* required to be positive definite). It is, in many + situations, the Newton method to converge in fewer iterations + and the most recommended for small and medium-size problems. + + **Bound-Constrained minimization** + + Method :ref:`Nelder-Mead ` uses the + Simplex algorithm [1]_, [2]_. This algorithm is robust in many + applications. However, if numerical computation of derivative can be + trusted, other algorithms using the first and/or second derivatives + information might be preferred for their better performance in + general. + + Method :ref:`L-BFGS-B ` uses the L-BFGS-B + algorithm [6]_, [7]_ for bound constrained minimization. + + Method :ref:`Powell ` is a modification + of Powell's method [3]_, [4]_ which is a conjugate direction + method. It performs sequential one-dimensional minimizations along + each vector of the directions set (`direc` field in `options` and + `info`), which is updated at each iteration of the main + minimization loop. The function need not be differentiable, and no + derivatives are taken. If bounds are not provided, then an + unbounded line search will be used. If bounds are provided and + the initial guess is within the bounds, then every function + evaluation throughout the minimization procedure will be within + the bounds. If bounds are provided, the initial guess is outside + the bounds, and `direc` is full rank (default has full rank), then + some function evaluations during the first iteration may be + outside the bounds, but every function evaluation after the first + iteration will be within the bounds. If `direc` is not full rank, + then some parameters may not be optimized and the solution is not + guaranteed to be within the bounds. + + Method :ref:`TNC ` uses a truncated Newton + algorithm [5]_, [8]_ to minimize a function with variables subject + to bounds. This algorithm uses gradient information; it is also + called Newton Conjugate-Gradient. It differs from the *Newton-CG* + method described above as it wraps a C implementation and allows + each variable to be given upper and lower bounds. + + **Constrained Minimization** + + Method :ref:`COBYLA ` uses the + Constrained Optimization BY Linear Approximation (COBYLA) method + [9]_, [10]_, [11]_. The algorithm is based on linear + approximations to the objective function and each constraint. The + method wraps a FORTRAN implementation of the algorithm. The + constraints functions 'fun' may return either a single number + or an array or list of numbers. + + Method :ref:`COBYQA ` uses the Constrained + Optimization BY Quadratic Approximations (COBYQA) method [18]_. The + algorithm is a derivative-free trust-region SQP method based on quadratic + approximations to the objective function and each nonlinear constraint. The + bounds are treated as unrelaxable constraints, in the sense that the + algorithm always respects them throughout the optimization process. + + Method :ref:`SLSQP ` uses Sequential + Least SQuares Programming to minimize a function of several + variables with any combination of bounds, equality and inequality + constraints. The method wraps the SLSQP Optimization subroutine + originally implemented by Dieter Kraft [12]_. Note that the + wrapper handles infinite values in bounds by converting them into + large floating values. + + Method :ref:`trust-constr ` is a + trust-region algorithm for constrained optimization. It switches + between two implementations depending on the problem definition. + It is the most versatile constrained minimization algorithm + implemented in SciPy and the most appropriate for large-scale problems. + For equality constrained problems it is an implementation of Byrd-Omojokun + Trust-Region SQP method described in [17]_ and in [5]_, p. 549. When + inequality constraints are imposed as well, it switches to the trust-region + interior point method described in [16]_. This interior point algorithm, + in turn, solves inequality constraints by introducing slack variables + and solving a sequence of equality-constrained barrier problems + for progressively smaller values of the barrier parameter. + The previously described equality constrained SQP method is + used to solve the subproblems with increasing levels of accuracy + as the iterate gets closer to a solution. + + **Finite-Difference Options** + + For Method :ref:`trust-constr ` + the gradient and the Hessian may be approximated using + three finite-difference schemes: {'2-point', '3-point', 'cs'}. + The scheme 'cs' is, potentially, the most accurate but it + requires the function to correctly handle complex inputs and to + be differentiable in the complex plane. The scheme '3-point' is more + accurate than '2-point' but requires twice as many operations. If the + gradient is estimated via finite-differences the Hessian must be + estimated using one of the quasi-Newton strategies. + + **Method specific options for the** `hess` **keyword** + + +--------------+------+----------+-------------------------+-----+ + | method/Hess | None | callable | '2-point/'3-point'/'cs' | HUS | + +==============+======+==========+=========================+=====+ + | Newton-CG | x | (n, n) | x | x | + | | | LO | | | + +--------------+------+----------+-------------------------+-----+ + | dogleg | | (n, n) | | | + +--------------+------+----------+-------------------------+-----+ + | trust-ncg | | (n, n) | x | x | + +--------------+------+----------+-------------------------+-----+ + | trust-krylov | | (n, n) | x | x | + +--------------+------+----------+-------------------------+-----+ + | trust-exact | | (n, n) | | | + +--------------+------+----------+-------------------------+-----+ + | trust-constr | x | (n, n) | x | x | + | | | LO | | | + | | | sp | | | + +--------------+------+----------+-------------------------+-----+ + + where LO=LinearOperator, sp=Sparse matrix, HUS=HessianUpdateStrategy + + **Custom minimizers** + + It may be useful to pass a custom minimization method, for example + when using a frontend to this method such as `scipy.optimize.basinhopping` + or a different library. You can simply pass a callable as the ``method`` + parameter. + + The callable is called as ``method(fun, x0, args, **kwargs, **options)`` + where ``kwargs`` corresponds to any other parameters passed to `minimize` + (such as `callback`, `hess`, etc.), except the `options` dict, which has + its contents also passed as `method` parameters pair by pair. Also, if + `jac` has been passed as a bool type, `jac` and `fun` are mangled so that + `fun` returns just the function values and `jac` is converted to a function + returning the Jacobian. The method shall return an `OptimizeResult` + object. + + The provided `method` callable must be able to accept (and possibly ignore) + arbitrary parameters; the set of parameters accepted by `minimize` may + expand in future versions and then these parameters will be passed to + the method. You can find an example in the scipy.optimize tutorial. + + References + ---------- + .. [1] Nelder, J A, and R Mead. 1965. A Simplex Method for Function + Minimization. The Computer Journal 7: 308-13. + .. [2] Wright M H. 1996. Direct search methods: Once scorned, now + respectable, in Numerical Analysis 1995: Proceedings of the 1995 + Dundee Biennial Conference in Numerical Analysis (Eds. D F + Griffiths and G A Watson). Addison Wesley Longman, Harlow, UK. + 191-208. + .. [3] Powell, M J D. 1964. An efficient method for finding the minimum of + a function of several variables without calculating derivatives. The + Computer Journal 7: 155-162. + .. [4] Press W, S A Teukolsky, W T Vetterling and B P Flannery. + Numerical Recipes (any edition), Cambridge University Press. + .. [5] Nocedal, J, and S J Wright. 2006. Numerical Optimization. + Springer New York. + .. [6] Byrd, R H and P Lu and J. Nocedal. 1995. A Limited Memory + Algorithm for Bound Constrained Optimization. SIAM Journal on + Scientific and Statistical Computing 16 (5): 1190-1208. + .. [7] Zhu, C and R H Byrd and J Nocedal. 1997. L-BFGS-B: Algorithm + 778: L-BFGS-B, FORTRAN routines for large scale bound constrained + optimization. ACM Transactions on Mathematical Software 23 (4): + 550-560. + .. [8] Nash, S G. Newton-Type Minimization Via the Lanczos Method. + 1984. SIAM Journal of Numerical Analysis 21: 770-778. + .. [9] Powell, M J D. A direct search optimization method that models + the objective and constraint functions by linear interpolation. + 1994. Advances in Optimization and Numerical Analysis, eds. S. Gomez + and J-P Hennart, Kluwer Academic (Dordrecht), 51-67. + .. [10] Powell M J D. Direct search algorithms for optimization + calculations. 1998. Acta Numerica 7: 287-336. + .. [11] Powell M J D. A view of algorithms for optimization without + derivatives. 2007.Cambridge University Technical Report DAMTP + 2007/NA03 + .. [12] Kraft, D. A software package for sequential quadratic + programming. 1988. Tech. Rep. DFVLR-FB 88-28, DLR German Aerospace + Center -- Institute for Flight Mechanics, Koln, Germany. + .. [13] Conn, A. R., Gould, N. I., and Toint, P. L. + Trust region methods. 2000. Siam. pp. 169-200. + .. [14] F. Lenders, C. Kirches, A. Potschka: "trlib: A vector-free + implementation of the GLTR method for iterative solution of + the trust region problem", :arxiv:`1611.04718` + .. [15] N. Gould, S. Lucidi, M. Roma, P. Toint: "Solving the + Trust-Region Subproblem using the Lanczos Method", + SIAM J. Optim., 9(2), 504--525, (1999). + .. [16] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal. 1999. + An interior point algorithm for large-scale nonlinear programming. + SIAM Journal on Optimization 9.4: 877-900. + .. [17] Lalee, Marucha, Jorge Nocedal, and Todd Plantega. 1998. On the + implementation of an algorithm for large-scale equality constrained + optimization. SIAM Journal on Optimization 8.3: 682-706. + .. [18] Ragonneau, T. M. *Model-Based Derivative-Free Optimization Methods + and Software*. PhD thesis, Department of Applied Mathematics, The Hong + Kong Polytechnic University, Hong Kong, China, 2022. URL: + https://theses.lib.polyu.edu.hk/handle/200/12294. + + Examples + -------- + Let us consider the problem of minimizing the Rosenbrock function. This + function (and its respective derivatives) is implemented in `rosen` + (resp. `rosen_der`, `rosen_hess`) in the `scipy.optimize`. + + >>> from scipy.optimize import minimize, rosen, rosen_der + + A simple application of the *Nelder-Mead* method is: + + >>> x0 = [1.3, 0.7, 0.8, 1.9, 1.2] + >>> res = minimize(rosen, x0, method='Nelder-Mead', tol=1e-6) + >>> res.x + array([ 1., 1., 1., 1., 1.]) + + Now using the *BFGS* algorithm, using the first derivative and a few + options: + + >>> res = minimize(rosen, x0, method='BFGS', jac=rosen_der, + ... options={'gtol': 1e-6, 'disp': True}) + Optimization terminated successfully. + Current function value: 0.000000 + Iterations: 26 + Function evaluations: 31 + Gradient evaluations: 31 + >>> res.x + array([ 1., 1., 1., 1., 1.]) + >>> print(res.message) + Optimization terminated successfully. + >>> res.hess_inv + array([ + [ 0.00749589, 0.01255155, 0.02396251, 0.04750988, 0.09495377], # may vary + [ 0.01255155, 0.02510441, 0.04794055, 0.09502834, 0.18996269], + [ 0.02396251, 0.04794055, 0.09631614, 0.19092151, 0.38165151], + [ 0.04750988, 0.09502834, 0.19092151, 0.38341252, 0.7664427 ], + [ 0.09495377, 0.18996269, 0.38165151, 0.7664427, 1.53713523] + ]) + + + Next, consider a minimization problem with several constraints (namely + Example 16.4 from [5]_). The objective function is: + + >>> fun = lambda x: (x[0] - 1)**2 + (x[1] - 2.5)**2 + + There are three constraints defined as: + + >>> cons = ({'type': 'ineq', 'fun': lambda x: x[0] - 2 * x[1] + 2}, + ... {'type': 'ineq', 'fun': lambda x: -x[0] - 2 * x[1] + 6}, + ... {'type': 'ineq', 'fun': lambda x: -x[0] + 2 * x[1] + 2}) + + And variables must be positive, hence the following bounds: + + >>> bnds = ((0, None), (0, None)) + + The optimization problem is solved using the SLSQP method as: + + >>> res = minimize(fun, (2, 0), method='SLSQP', bounds=bnds, + ... constraints=cons) + + It should converge to the theoretical solution (1.4 ,1.7). + + """ + x0 = np.atleast_1d(np.asarray(x0)) + + if x0.ndim != 1: + raise ValueError("'x0' must only have one dimension.") + + if x0.dtype.kind in np.typecodes["AllInteger"]: + x0 = np.asarray(x0, dtype=float) + + if not isinstance(args, tuple): + args = (args,) + + if method is None: + # Select automatically + if constraints: + method = 'SLSQP' + elif bounds is not None: + method = 'L-BFGS-B' + else: + method = 'BFGS' + + if callable(method): + meth = "_custom" + else: + meth = method.lower() + + if options is None: + options = {} + # check if optional parameters are supported by the selected method + # - jac + if meth in ('nelder-mead', 'powell', 'cobyla', 'cobyqa') and bool(jac): + warn('Method %s does not use gradient information (jac).' % method, + RuntimeWarning, stacklevel=2) + # - hess + if meth not in ('newton-cg', 'dogleg', 'trust-ncg', 'trust-constr', + 'trust-krylov', 'trust-exact', '_custom') and hess is not None: + warn('Method %s does not use Hessian information (hess).' % method, + RuntimeWarning, stacklevel=2) + # - hessp + if meth not in ('newton-cg', 'trust-ncg', 'trust-constr', + 'trust-krylov', '_custom') \ + and hessp is not None: + warn('Method %s does not use Hessian-vector product ' + 'information (hessp).' % method, + RuntimeWarning, stacklevel=2) + # - constraints or bounds + if (meth not in ('cobyla', 'cobyqa', 'slsqp', 'trust-constr', '_custom') and + np.any(constraints)): + warn('Method %s cannot handle constraints.' % method, + RuntimeWarning, stacklevel=2) + if meth not in ( + 'nelder-mead', 'powell', 'l-bfgs-b', 'cobyla', 'cobyqa', 'slsqp', + 'tnc', 'trust-constr', '_custom') and bounds is not None: + warn('Method %s cannot handle bounds.' % method, + RuntimeWarning, stacklevel=2) + # - return_all + if (meth in ('l-bfgs-b', 'tnc', 'cobyla', 'cobyqa', 'slsqp') and + options.get('return_all', False)): + warn('Method %s does not support the return_all option.' % method, + RuntimeWarning, stacklevel=2) + + # check gradient vector + if callable(jac): + pass + elif jac is True: + # fun returns func and grad + fun = MemoizeJac(fun) + jac = fun.derivative + elif (jac in FD_METHODS and + meth in ['trust-constr', 'bfgs', 'cg', 'l-bfgs-b', 'tnc', 'slsqp']): + # finite differences with relative step + pass + elif meth in ['trust-constr']: + # default jac calculation for this method + jac = '2-point' + elif jac is None or bool(jac) is False: + # this will cause e.g. LBFGS to use forward difference, absolute step + jac = None + else: + # default if jac option is not understood + jac = None + + # set default tolerances + if tol is not None: + options = dict(options) + if meth == 'nelder-mead': + options.setdefault('xatol', tol) + options.setdefault('fatol', tol) + if meth in ('newton-cg', 'powell', 'tnc'): + options.setdefault('xtol', tol) + if meth in ('powell', 'l-bfgs-b', 'tnc', 'slsqp'): + options.setdefault('ftol', tol) + if meth in ('bfgs', 'cg', 'l-bfgs-b', 'tnc', 'dogleg', + 'trust-ncg', 'trust-exact', 'trust-krylov'): + options.setdefault('gtol', tol) + if meth in ('cobyla', '_custom'): + options.setdefault('tol', tol) + if meth == 'cobyqa': + options.setdefault('final_tr_radius', tol) + if meth == 'trust-constr': + options.setdefault('xtol', tol) + options.setdefault('gtol', tol) + options.setdefault('barrier_tol', tol) + + if meth == '_custom': + # custom method called before bounds and constraints are 'standardised' + # custom method should be able to accept whatever bounds/constraints + # are provided to it. + return method(fun, x0, args=args, jac=jac, hess=hess, hessp=hessp, + bounds=bounds, constraints=constraints, + callback=callback, **options) + + constraints = standardize_constraints(constraints, x0, meth) + + remove_vars = False + if bounds is not None: + # convert to new-style bounds so we only have to consider one case + bounds = standardize_bounds(bounds, x0, 'new') + bounds = _validate_bounds(bounds, x0, meth) + + if meth in {"tnc", "slsqp", "l-bfgs-b"}: + # These methods can't take the finite-difference derivatives they + # need when a variable is fixed by the bounds. To avoid this issue, + # remove fixed variables from the problem. + # NOTE: if this list is expanded, then be sure to update the + # accompanying tests and test_optimize.eb_data. Consider also if + # default OptimizeResult will need updating. + + # determine whether any variables are fixed + i_fixed = (bounds.lb == bounds.ub) + + if np.all(i_fixed): + # all the parameters are fixed, a minimizer is not able to do + # anything + return _optimize_result_for_equal_bounds( + fun, bounds, meth, args=args, constraints=constraints + ) + + # determine whether finite differences are needed for any grad/jac + fd_needed = (not callable(jac)) + for con in constraints: + if not callable(con.get('jac', None)): + fd_needed = True + + # If finite differences are ever used, remove all fixed variables + # Always remove fixed variables for TNC; see gh-14565 + remove_vars = i_fixed.any() and (fd_needed or meth == "tnc") + if remove_vars: + x_fixed = (bounds.lb)[i_fixed] + x0 = x0[~i_fixed] + bounds = _remove_from_bounds(bounds, i_fixed) + fun = _remove_from_func(fun, i_fixed, x_fixed) + if callable(callback): + callback = _remove_from_func(callback, i_fixed, x_fixed) + if callable(jac): + jac = _remove_from_func(jac, i_fixed, x_fixed, remove=1) + + # make a copy of the constraints so the user's version doesn't + # get changed. (Shallow copy is ok) + constraints = [con.copy() for con in constraints] + for con in constraints: # yes, guaranteed to be a list + con['fun'] = _remove_from_func(con['fun'], i_fixed, + x_fixed, min_dim=1, + remove=0) + if callable(con.get('jac', None)): + con['jac'] = _remove_from_func(con['jac'], i_fixed, + x_fixed, min_dim=2, + remove=1) + bounds = standardize_bounds(bounds, x0, meth) + + callback = _wrap_callback(callback, meth) + + if meth == 'nelder-mead': + res = _minimize_neldermead(fun, x0, args, callback, bounds=bounds, + **options) + elif meth == 'powell': + res = _minimize_powell(fun, x0, args, callback, bounds, **options) + elif meth == 'cg': + res = _minimize_cg(fun, x0, args, jac, callback, **options) + elif meth == 'bfgs': + res = _minimize_bfgs(fun, x0, args, jac, callback, **options) + elif meth == 'newton-cg': + res = _minimize_newtoncg(fun, x0, args, jac, hess, hessp, callback, + **options) + elif meth == 'l-bfgs-b': + res = _minimize_lbfgsb(fun, x0, args, jac, bounds, + callback=callback, **options) + elif meth == 'tnc': + res = _minimize_tnc(fun, x0, args, jac, bounds, callback=callback, + **options) + elif meth == 'cobyla': + res = _minimize_cobyla(fun, x0, args, constraints, callback=callback, + bounds=bounds, **options) + elif meth == 'cobyqa': + res = _minimize_cobyqa(fun, x0, args, bounds, constraints, callback, + **options) + elif meth == 'slsqp': + res = _minimize_slsqp(fun, x0, args, jac, bounds, + constraints, callback=callback, **options) + elif meth == 'trust-constr': + res = _minimize_trustregion_constr(fun, x0, args, jac, hess, hessp, + bounds, constraints, + callback=callback, **options) + elif meth == 'dogleg': + res = _minimize_dogleg(fun, x0, args, jac, hess, + callback=callback, **options) + elif meth == 'trust-ncg': + res = _minimize_trust_ncg(fun, x0, args, jac, hess, hessp, + callback=callback, **options) + elif meth == 'trust-krylov': + res = _minimize_trust_krylov(fun, x0, args, jac, hess, hessp, + callback=callback, **options) + elif meth == 'trust-exact': + res = _minimize_trustregion_exact(fun, x0, args, jac, hess, + callback=callback, **options) + else: + raise ValueError('Unknown solver %s' % method) + + if remove_vars: + res.x = _add_to_array(res.x, i_fixed, x_fixed) + res.jac = _add_to_array(res.jac, i_fixed, np.nan) + if "hess_inv" in res: + res.hess_inv = None # unknown + + if getattr(callback, 'stop_iteration', False): + res.success = False + res.status = 99 + res.message = "`callback` raised `StopIteration`." + + return res + + +def minimize_scalar(fun, bracket=None, bounds=None, args=(), + method=None, tol=None, options=None): + """Local minimization of scalar function of one variable. + + Parameters + ---------- + fun : callable + Objective function. + Scalar function, must return a scalar. + bracket : sequence, optional + For methods 'brent' and 'golden', `bracket` defines the bracketing + interval and is required. + Either a triple ``(xa, xb, xc)`` satisfying ``xa < xb < xc`` and + ``func(xb) < func(xa) and func(xb) < func(xc)``, or a pair + ``(xa, xb)`` to be used as initial points for a downhill bracket search + (see `scipy.optimize.bracket`). + The minimizer ``res.x`` will not necessarily satisfy + ``xa <= res.x <= xb``. + bounds : sequence, optional + For method 'bounded', `bounds` is mandatory and must have two finite + items corresponding to the optimization bounds. + args : tuple, optional + Extra arguments passed to the objective function. + method : str or callable, optional + Type of solver. Should be one of: + + - :ref:`Brent ` + - :ref:`Bounded ` + - :ref:`Golden ` + - custom - a callable object (added in version 0.14.0), see below + + Default is "Bounded" if bounds are provided and "Brent" otherwise. + See the 'Notes' section for details of each solver. + + tol : float, optional + Tolerance for termination. For detailed control, use solver-specific + options. + options : dict, optional + A dictionary of solver options. + + maxiter : int + Maximum number of iterations to perform. + disp : bool + Set to True to print convergence messages. + + See :func:`show_options()` for solver-specific options. + + Returns + ------- + res : OptimizeResult + The optimization result represented as a ``OptimizeResult`` object. + Important attributes are: ``x`` the solution array, ``success`` a + Boolean flag indicating if the optimizer exited successfully and + ``message`` which describes the cause of the termination. See + `OptimizeResult` for a description of other attributes. + + See also + -------- + minimize : Interface to minimization algorithms for scalar multivariate + functions + show_options : Additional options accepted by the solvers + + Notes + ----- + This section describes the available solvers that can be selected by the + 'method' parameter. The default method is the ``"Bounded"`` Brent method if + `bounds` are passed and unbounded ``"Brent"`` otherwise. + + Method :ref:`Brent ` uses Brent's + algorithm [1]_ to find a local minimum. The algorithm uses inverse + parabolic interpolation when possible to speed up convergence of + the golden section method. + + Method :ref:`Golden ` uses the + golden section search technique [1]_. It uses analog of the bisection + method to decrease the bracketed interval. It is usually + preferable to use the *Brent* method. + + Method :ref:`Bounded ` can + perform bounded minimization [2]_ [3]_. It uses the Brent method to find a + local minimum in the interval x1 < xopt < x2. + + Note that the Brent and Golden methods do not guarantee success unless a + valid ``bracket`` triple is provided. If a three-point bracket cannot be + found, consider `scipy.optimize.minimize`. Also, all methods are intended + only for local minimization. When the function of interest has more than + one local minimum, consider :ref:`global_optimization`. + + **Custom minimizers** + + It may be useful to pass a custom minimization method, for example + when using some library frontend to minimize_scalar. You can simply + pass a callable as the ``method`` parameter. + + The callable is called as ``method(fun, args, **kwargs, **options)`` + where ``kwargs`` corresponds to any other parameters passed to `minimize` + (such as `bracket`, `tol`, etc.), except the `options` dict, which has + its contents also passed as `method` parameters pair by pair. The method + shall return an `OptimizeResult` object. + + The provided `method` callable must be able to accept (and possibly ignore) + arbitrary parameters; the set of parameters accepted by `minimize` may + expand in future versions and then these parameters will be passed to + the method. You can find an example in the scipy.optimize tutorial. + + .. versionadded:: 0.11.0 + + References + ---------- + .. [1] Press, W., S.A. Teukolsky, W.T. Vetterling, and B.P. Flannery. + Numerical Recipes in C. Cambridge University Press. + .. [2] Forsythe, G.E., M. A. Malcolm, and C. B. Moler. "Computer Methods + for Mathematical Computations." Prentice-Hall Series in Automatic + Computation 259 (1977). + .. [3] Brent, Richard P. Algorithms for Minimization Without Derivatives. + Courier Corporation, 2013. + + Examples + -------- + Consider the problem of minimizing the following function. + + >>> def f(x): + ... return (x - 2) * x * (x + 2)**2 + + Using the *Brent* method, we find the local minimum as: + + >>> from scipy.optimize import minimize_scalar + >>> res = minimize_scalar(f) + >>> res.fun + -9.9149495908 + + The minimizer is: + + >>> res.x + 1.28077640403 + + Using the *Bounded* method, we find a local minimum with specified + bounds as: + + >>> res = minimize_scalar(f, bounds=(-3, -1), method='bounded') + >>> res.fun # minimum + 3.28365179850e-13 + >>> res.x # minimizer + -2.0000002026 + + """ + if not isinstance(args, tuple): + args = (args,) + + if callable(method): + meth = "_custom" + elif method is None: + meth = 'brent' if bounds is None else 'bounded' + else: + meth = method.lower() + if options is None: + options = {} + + if bounds is not None and meth in {'brent', 'golden'}: + message = f"Use of `bounds` is incompatible with 'method={method}'." + raise ValueError(message) + + if tol is not None: + options = dict(options) + if meth == 'bounded' and 'xatol' not in options: + warn("Method 'bounded' does not support relative tolerance in x; " + "defaulting to absolute tolerance.", + RuntimeWarning, stacklevel=2) + options['xatol'] = tol + elif meth == '_custom': + options.setdefault('tol', tol) + else: + options.setdefault('xtol', tol) + + # replace boolean "disp" option, if specified, by an integer value. + disp = options.get('disp') + if isinstance(disp, bool): + options['disp'] = 2 * int(disp) + + if meth == '_custom': + res = method(fun, args=args, bracket=bracket, bounds=bounds, **options) + elif meth == 'brent': + res = _recover_from_bracket_error(_minimize_scalar_brent, + fun, bracket, args, **options) + elif meth == 'bounded': + if bounds is None: + raise ValueError('The `bounds` parameter is mandatory for ' + 'method `bounded`.') + res = _minimize_scalar_bounded(fun, bounds, args, **options) + elif meth == 'golden': + res = _recover_from_bracket_error(_minimize_scalar_golden, + fun, bracket, args, **options) + else: + raise ValueError('Unknown solver %s' % method) + + # gh-16196 reported inconsistencies in the output shape of `res.x`. While + # fixing this, future-proof it for when the function is vectorized: + # the shape of `res.x` should match that of `res.fun`. + res.fun = np.asarray(res.fun)[()] + res.x = np.reshape(res.x, res.fun.shape)[()] + return res + + +def _remove_from_bounds(bounds, i_fixed): + """Removes fixed variables from a `Bounds` instance""" + lb = bounds.lb[~i_fixed] + ub = bounds.ub[~i_fixed] + return Bounds(lb, ub) # don't mutate original Bounds object + + +def _remove_from_func(fun_in, i_fixed, x_fixed, min_dim=None, remove=0): + """Wraps a function such that fixed variables need not be passed in""" + def fun_out(x_in, *args, **kwargs): + x_out = np.zeros_like(i_fixed, dtype=x_in.dtype) + x_out[i_fixed] = x_fixed + x_out[~i_fixed] = x_in + y_out = fun_in(x_out, *args, **kwargs) + y_out = np.array(y_out) + + if min_dim == 1: + y_out = np.atleast_1d(y_out) + elif min_dim == 2: + y_out = np.atleast_2d(y_out) + + if remove == 1: + y_out = y_out[..., ~i_fixed] + elif remove == 2: + y_out = y_out[~i_fixed, ~i_fixed] + + return y_out + return fun_out + + +def _add_to_array(x_in, i_fixed, x_fixed): + """Adds fixed variables back to an array""" + i_free = ~i_fixed + if x_in.ndim == 2: + i_free = i_free[:, None] @ i_free[None, :] + x_out = np.zeros_like(i_free, dtype=x_in.dtype) + x_out[~i_free] = x_fixed + x_out[i_free] = x_in.ravel() + return x_out + + +def _validate_bounds(bounds, x0, meth): + """Check that bounds are valid.""" + + msg = "An upper bound is less than the corresponding lower bound." + if np.any(bounds.ub < bounds.lb): + raise ValueError(msg) + + msg = "The number of bounds is not compatible with the length of `x0`." + try: + bounds.lb = np.broadcast_to(bounds.lb, x0.shape) + bounds.ub = np.broadcast_to(bounds.ub, x0.shape) + except Exception as e: + raise ValueError(msg) from e + + return bounds + +def standardize_bounds(bounds, x0, meth): + """Converts bounds to the form required by the solver.""" + if meth in {'trust-constr', 'powell', 'nelder-mead', 'cobyla', 'cobyqa', + 'new'}: + if not isinstance(bounds, Bounds): + lb, ub = old_bound_to_new(bounds) + bounds = Bounds(lb, ub) + elif meth in ('l-bfgs-b', 'tnc', 'slsqp', 'old'): + if isinstance(bounds, Bounds): + bounds = new_bounds_to_old(bounds.lb, bounds.ub, x0.shape[0]) + return bounds + + +def standardize_constraints(constraints, x0, meth): + """Converts constraints to the form required by the solver.""" + all_constraint_types = (NonlinearConstraint, LinearConstraint, dict) + new_constraint_types = all_constraint_types[:-1] + if constraints is None: + constraints = [] + elif isinstance(constraints, all_constraint_types): + constraints = [constraints] + else: + constraints = list(constraints) # ensure it's a mutable sequence + + if meth in ['trust-constr', 'cobyqa', 'new']: + for i, con in enumerate(constraints): + if not isinstance(con, new_constraint_types): + constraints[i] = old_constraint_to_new(i, con) + else: + # iterate over copy, changing original + for i, con in enumerate(list(constraints)): + if isinstance(con, new_constraint_types): + old_constraints = new_constraint_to_old(con, x0) + constraints[i] = old_constraints[0] + constraints.extend(old_constraints[1:]) # appends 1 if present + + return constraints + + +def _optimize_result_for_equal_bounds( + fun, bounds, method, args=(), constraints=() +): + """ + Provides a default OptimizeResult for when a bounded minimization method + has (lb == ub).all(). + + Parameters + ---------- + fun: callable + bounds: Bounds + method: str + constraints: Constraint + """ + success = True + message = 'All independent variables were fixed by bounds.' + + # bounds is new-style + x0 = bounds.lb + + if constraints: + message = ("All independent variables were fixed by bounds at values" + " that satisfy the constraints.") + constraints = standardize_constraints(constraints, x0, 'new') + + maxcv = 0 + for c in constraints: + pc = PreparedConstraint(c, x0) + violation = pc.violation(x0) + if np.sum(violation): + maxcv = max(maxcv, np.max(violation)) + success = False + message = (f"All independent variables were fixed by bounds, but " + f"the independent variables do not satisfy the " + f"constraints exactly. (Maximum violation: {maxcv}).") + + return OptimizeResult( + x=x0, fun=fun(x0, *args), success=success, message=message, nfev=1, + njev=0, nhev=0, + ) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minpack2.cpython-310-x86_64-linux-gnu.so b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minpack2.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f816e37ac89a947c5086a2a48830b8e73956382b Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_minpack2.cpython-310-x86_64-linux-gnu.so differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_moduleTNC.cpython-310-x86_64-linux-gnu.so b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_moduleTNC.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..052cf91b5917f4e7484ba340de6273c7f7d00fb0 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_moduleTNC.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7584b3d74b2c7f2804c049af2291355762236b8a294520a6c7a83085ac11544 +size 152168 diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nnls.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nnls.py new file mode 100644 index 0000000000000000000000000000000000000000..17fcdc9e4cc52b1839cd938f21a78256cfb19436 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nnls.py @@ -0,0 +1,164 @@ +import numpy as np +from scipy.linalg import solve, LinAlgWarning +import warnings + +__all__ = ['nnls'] + + +def nnls(A, b, maxiter=None, *, atol=None): + """ + Solve ``argmin_x || Ax - b ||_2`` for ``x>=0``. + + This problem, often called as NonNegative Least Squares, is a convex + optimization problem with convex constraints. It typically arises when + the ``x`` models quantities for which only nonnegative values are + attainable; weight of ingredients, component costs and so on. + + Parameters + ---------- + A : (m, n) ndarray + Coefficient array + b : (m,) ndarray, float + Right-hand side vector. + maxiter: int, optional + Maximum number of iterations, optional. Default value is ``3 * n``. + atol: float + Tolerance value used in the algorithm to assess closeness to zero in + the projected residual ``(A.T @ (A x - b)`` entries. Increasing this + value relaxes the solution constraints. A typical relaxation value can + be selected as ``max(m, n) * np.linalg.norm(a, 1) * np.spacing(1.)``. + This value is not set as default since the norm operation becomes + expensive for large problems hence can be used only when necessary. + + Returns + ------- + x : ndarray + Solution vector. + rnorm : float + The 2-norm of the residual, ``|| Ax-b ||_2``. + + See Also + -------- + lsq_linear : Linear least squares with bounds on the variables + + Notes + ----- + The code is based on [2]_ which is an improved version of the classical + algorithm of [1]_. It utilizes an active set method and solves the KKT + (Karush-Kuhn-Tucker) conditions for the non-negative least squares problem. + + References + ---------- + .. [1] : Lawson C., Hanson R.J., "Solving Least Squares Problems", SIAM, + 1995, :doi:`10.1137/1.9781611971217` + .. [2] : Bro, Rasmus and de Jong, Sijmen, "A Fast Non-Negativity- + Constrained Least Squares Algorithm", Journal Of Chemometrics, 1997, + :doi:`10.1002/(SICI)1099-128X(199709/10)11:5<393::AID-CEM483>3.0.CO;2-L` + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import nnls + ... + >>> A = np.array([[1, 0], [1, 0], [0, 1]]) + >>> b = np.array([2, 1, 1]) + >>> nnls(A, b) + (array([1.5, 1. ]), 0.7071067811865475) + + >>> b = np.array([-1, -1, -1]) + >>> nnls(A, b) + (array([0., 0.]), 1.7320508075688772) + + """ + + A = np.asarray_chkfinite(A) + b = np.asarray_chkfinite(b) + + if len(A.shape) != 2: + raise ValueError("Expected a two-dimensional array (matrix)" + + f", but the shape of A is {A.shape}") + if len(b.shape) != 1: + raise ValueError("Expected a one-dimensional array (vector)" + + f", but the shape of b is {b.shape}") + + m, n = A.shape + + if m != b.shape[0]: + raise ValueError( + "Incompatible dimensions. The first dimension of " + + f"A is {m}, while the shape of b is {(b.shape[0], )}") + + x, rnorm, mode = _nnls(A, b, maxiter, tol=atol) + if mode != 1: + raise RuntimeError("Maximum number of iterations reached.") + + return x, rnorm + + +def _nnls(A, b, maxiter=None, tol=None): + """ + This is a single RHS algorithm from ref [2] above. For multiple RHS + support, the algorithm is given in :doi:`10.1002/cem.889` + """ + m, n = A.shape + + AtA = A.T @ A + Atb = b @ A # Result is 1D - let NumPy figure it out + + if not maxiter: + maxiter = 3*n + if tol is None: + tol = 10 * max(m, n) * np.spacing(1.) + + # Initialize vars + x = np.zeros(n, dtype=np.float64) + s = np.zeros(n, dtype=np.float64) + # Inactive constraint switches + P = np.zeros(n, dtype=bool) + + # Projected residual + w = Atb.copy().astype(np.float64) # x=0. Skip (-AtA @ x) term + + # Overall iteration counter + # Outer loop is not counted, inner iter is counted across outer spins + iter = 0 + + while (not P.all()) and (w[~P] > tol).any(): # B + # Get the "most" active coeff index and move to inactive set + k = np.argmax(w * (~P)) # B.2 + P[k] = True # B.3 + + # Iteration solution + s[:] = 0. + # B.4 + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', message='Ill-conditioned matrix', + category=LinAlgWarning) + s[P] = solve(AtA[np.ix_(P, P)], Atb[P], assume_a='sym', check_finite=False) + + # Inner loop + while (iter < maxiter) and (s[P].min() < 0): # C.1 + iter += 1 + inds = P * (s < 0) + alpha = (x[inds] / (x[inds] - s[inds])).min() # C.2 + x *= (1 - alpha) + x += alpha*s + P[x <= tol] = False + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', message='Ill-conditioned matrix', + category=LinAlgWarning) + s[P] = solve(AtA[np.ix_(P, P)], Atb[P], assume_a='sym', + check_finite=False) + s[~P] = 0 # C.6 + + x[:] = s[:] + w[:] = Atb - AtA @ x + + if iter == maxiter: + # Typically following line should return + # return x, np.linalg.norm(A@x - b), -1 + # however at the top level, -1 raises an exception wasting norm + # Instead return dummy number 0. + return x, 0., -1 + + return x, np.linalg.norm(A@x - b), 1 diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nonlin.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nonlin.py new file mode 100644 index 0000000000000000000000000000000000000000..cbaa3d4ced448df492e965cffe39e99f593c8895 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_nonlin.py @@ -0,0 +1,1585 @@ +# Copyright (C) 2009, Pauli Virtanen +# Distributed under the same license as SciPy. + +import inspect +import sys +import warnings + +import numpy as np +from numpy import asarray, dot, vdot + +from scipy.linalg import norm, solve, inv, qr, svd, LinAlgError +import scipy.sparse.linalg +import scipy.sparse +from scipy.linalg import get_blas_funcs +from scipy._lib._util import copy_if_needed +from scipy._lib._util import getfullargspec_no_self as _getfullargspec +from ._linesearch import scalar_search_wolfe1, scalar_search_armijo + + +__all__ = [ + 'broyden1', 'broyden2', 'anderson', 'linearmixing', + 'diagbroyden', 'excitingmixing', 'newton_krylov', + 'BroydenFirst', 'KrylovJacobian', 'InverseJacobian', 'NoConvergence'] + +#------------------------------------------------------------------------------ +# Utility functions +#------------------------------------------------------------------------------ + + +class NoConvergence(Exception): + """Exception raised when nonlinear solver fails to converge within the specified + `maxiter`.""" + pass + + +def maxnorm(x): + return np.absolute(x).max() + + +def _as_inexact(x): + """Return `x` as an array, of either floats or complex floats""" + x = asarray(x) + if not np.issubdtype(x.dtype, np.inexact): + return asarray(x, dtype=np.float64) + return x + + +def _array_like(x, x0): + """Return ndarray `x` as same array subclass and shape as `x0`""" + x = np.reshape(x, np.shape(x0)) + wrap = getattr(x0, '__array_wrap__', x.__array_wrap__) + return wrap(x) + + +def _safe_norm(v): + if not np.isfinite(v).all(): + return np.array(np.inf) + return norm(v) + +#------------------------------------------------------------------------------ +# Generic nonlinear solver machinery +#------------------------------------------------------------------------------ + + +_doc_parts = dict( + params_basic=""" + F : function(x) -> f + Function whose root to find; should take and return an array-like + object. + xin : array_like + Initial guess for the solution + """.strip(), + params_extra=""" + iter : int, optional + Number of iterations to make. If omitted (default), make as many + as required to meet tolerances. + verbose : bool, optional + Print status to stdout on every iteration. + maxiter : int, optional + Maximum number of iterations to make. If more are needed to + meet convergence, `NoConvergence` is raised. + f_tol : float, optional + Absolute tolerance (in max-norm) for the residual. + If omitted, default is 6e-6. + f_rtol : float, optional + Relative tolerance for the residual. If omitted, not used. + x_tol : float, optional + Absolute minimum step size, as determined from the Jacobian + approximation. If the step size is smaller than this, optimization + is terminated as successful. If omitted, not used. + x_rtol : float, optional + Relative minimum step size. If omitted, not used. + tol_norm : function(vector) -> scalar, optional + Norm to use in convergence check. Default is the maximum norm. + line_search : {None, 'armijo' (default), 'wolfe'}, optional + Which type of a line search to use to determine the step size in the + direction given by the Jacobian approximation. Defaults to 'armijo'. + callback : function, optional + Optional callback function. It is called on every iteration as + ``callback(x, f)`` where `x` is the current solution and `f` + the corresponding residual. + + Returns + ------- + sol : ndarray + An array (of similar array type as `x0`) containing the final solution. + + Raises + ------ + NoConvergence + When a solution was not found. + + """.strip() +) + + +def _set_doc(obj): + if obj.__doc__: + obj.__doc__ = obj.__doc__ % _doc_parts + + +def nonlin_solve(F, x0, jacobian='krylov', iter=None, verbose=False, + maxiter=None, f_tol=None, f_rtol=None, x_tol=None, x_rtol=None, + tol_norm=None, line_search='armijo', callback=None, + full_output=False, raise_exception=True): + """ + Find a root of a function, in a way suitable for large-scale problems. + + Parameters + ---------- + %(params_basic)s + jacobian : Jacobian + A Jacobian approximation: `Jacobian` object or something that + `asjacobian` can transform to one. Alternatively, a string specifying + which of the builtin Jacobian approximations to use: + + krylov, broyden1, broyden2, anderson + diagbroyden, linearmixing, excitingmixing + + %(params_extra)s + full_output : bool + If true, returns a dictionary `info` containing convergence + information. + raise_exception : bool + If True, a `NoConvergence` exception is raise if no solution is found. + + See Also + -------- + asjacobian, Jacobian + + Notes + ----- + This algorithm implements the inexact Newton method, with + backtracking or full line searches. Several Jacobian + approximations are available, including Krylov and Quasi-Newton + methods. + + References + ---------- + .. [KIM] C. T. Kelley, \"Iterative Methods for Linear and Nonlinear + Equations\". Society for Industrial and Applied Mathematics. (1995) + https://archive.siam.org/books/kelley/fr16/ + + """ + # Can't use default parameters because it's being explicitly passed as None + # from the calling function, so we need to set it here. + tol_norm = maxnorm if tol_norm is None else tol_norm + condition = TerminationCondition(f_tol=f_tol, f_rtol=f_rtol, + x_tol=x_tol, x_rtol=x_rtol, + iter=iter, norm=tol_norm) + + x0 = _as_inexact(x0) + def func(z): + return _as_inexact(F(_array_like(z, x0))).flatten() + x = x0.flatten() + + dx = np.full_like(x, np.inf) + Fx = func(x) + Fx_norm = norm(Fx) + + jacobian = asjacobian(jacobian) + jacobian.setup(x.copy(), Fx, func) + + if maxiter is None: + if iter is not None: + maxiter = iter + 1 + else: + maxiter = 100*(x.size+1) + + if line_search is True: + line_search = 'armijo' + elif line_search is False: + line_search = None + + if line_search not in (None, 'armijo', 'wolfe'): + raise ValueError("Invalid line search") + + # Solver tolerance selection + gamma = 0.9 + eta_max = 0.9999 + eta_treshold = 0.1 + eta = 1e-3 + + for n in range(maxiter): + status = condition.check(Fx, x, dx) + if status: + break + + # The tolerance, as computed for scipy.sparse.linalg.* routines + tol = min(eta, eta*Fx_norm) + dx = -jacobian.solve(Fx, tol=tol) + + if norm(dx) == 0: + raise ValueError("Jacobian inversion yielded zero vector. " + "This indicates a bug in the Jacobian " + "approximation.") + + # Line search, or Newton step + if line_search: + s, x, Fx, Fx_norm_new = _nonlin_line_search(func, x, Fx, dx, + line_search) + else: + s = 1.0 + x = x + dx + Fx = func(x) + Fx_norm_new = norm(Fx) + + jacobian.update(x.copy(), Fx) + + if callback: + callback(x, Fx) + + # Adjust forcing parameters for inexact methods + eta_A = gamma * Fx_norm_new**2 / Fx_norm**2 + if gamma * eta**2 < eta_treshold: + eta = min(eta_max, eta_A) + else: + eta = min(eta_max, max(eta_A, gamma*eta**2)) + + Fx_norm = Fx_norm_new + + # Print status + if verbose: + sys.stdout.write("%d: |F(x)| = %g; step %g\n" % ( + n, tol_norm(Fx), s)) + sys.stdout.flush() + else: + if raise_exception: + raise NoConvergence(_array_like(x, x0)) + else: + status = 2 + + if full_output: + info = {'nit': condition.iteration, + 'fun': Fx, + 'status': status, + 'success': status == 1, + 'message': {1: 'A solution was found at the specified ' + 'tolerance.', + 2: 'The maximum number of iterations allowed ' + 'has been reached.' + }[status] + } + return _array_like(x, x0), info + else: + return _array_like(x, x0) + + +_set_doc(nonlin_solve) + + +def _nonlin_line_search(func, x, Fx, dx, search_type='armijo', rdiff=1e-8, + smin=1e-2): + tmp_s = [0] + tmp_Fx = [Fx] + tmp_phi = [norm(Fx)**2] + s_norm = norm(x) / norm(dx) + + def phi(s, store=True): + if s == tmp_s[0]: + return tmp_phi[0] + xt = x + s*dx + v = func(xt) + p = _safe_norm(v)**2 + if store: + tmp_s[0] = s + tmp_phi[0] = p + tmp_Fx[0] = v + return p + + def derphi(s): + ds = (abs(s) + s_norm + 1) * rdiff + return (phi(s+ds, store=False) - phi(s)) / ds + + if search_type == 'wolfe': + s, phi1, phi0 = scalar_search_wolfe1(phi, derphi, tmp_phi[0], + xtol=1e-2, amin=smin) + elif search_type == 'armijo': + s, phi1 = scalar_search_armijo(phi, tmp_phi[0], -tmp_phi[0], + amin=smin) + + if s is None: + # XXX: No suitable step length found. Take the full Newton step, + # and hope for the best. + s = 1.0 + + x = x + s*dx + if s == tmp_s[0]: + Fx = tmp_Fx[0] + else: + Fx = func(x) + Fx_norm = norm(Fx) + + return s, x, Fx, Fx_norm + + +class TerminationCondition: + """ + Termination condition for an iteration. It is terminated if + + - |F| < f_rtol*|F_0|, AND + - |F| < f_tol + + AND + + - |dx| < x_rtol*|x|, AND + - |dx| < x_tol + + """ + def __init__(self, f_tol=None, f_rtol=None, x_tol=None, x_rtol=None, + iter=None, norm=maxnorm): + + if f_tol is None: + f_tol = np.finfo(np.float64).eps ** (1./3) + if f_rtol is None: + f_rtol = np.inf + if x_tol is None: + x_tol = np.inf + if x_rtol is None: + x_rtol = np.inf + + self.x_tol = x_tol + self.x_rtol = x_rtol + self.f_tol = f_tol + self.f_rtol = f_rtol + + self.norm = norm + + self.iter = iter + + self.f0_norm = None + self.iteration = 0 + + def check(self, f, x, dx): + self.iteration += 1 + f_norm = self.norm(f) + x_norm = self.norm(x) + dx_norm = self.norm(dx) + + if self.f0_norm is None: + self.f0_norm = f_norm + + if f_norm == 0: + return 1 + + if self.iter is not None: + # backwards compatibility with SciPy 0.6.0 + return 2 * (self.iteration > self.iter) + + # NB: condition must succeed for rtol=inf even if norm == 0 + return int((f_norm <= self.f_tol + and f_norm/self.f_rtol <= self.f0_norm) + and (dx_norm <= self.x_tol + and dx_norm/self.x_rtol <= x_norm)) + + +#------------------------------------------------------------------------------ +# Generic Jacobian approximation +#------------------------------------------------------------------------------ + +class Jacobian: + """ + Common interface for Jacobians or Jacobian approximations. + + The optional methods come useful when implementing trust region + etc., algorithms that often require evaluating transposes of the + Jacobian. + + Methods + ------- + solve + Returns J^-1 * v + update + Updates Jacobian to point `x` (where the function has residual `Fx`) + + matvec : optional + Returns J * v + rmatvec : optional + Returns A^H * v + rsolve : optional + Returns A^-H * v + matmat : optional + Returns A * V, where V is a dense matrix with dimensions (N,K). + todense : optional + Form the dense Jacobian matrix. Necessary for dense trust region + algorithms, and useful for testing. + + Attributes + ---------- + shape + Matrix dimensions (M, N) + dtype + Data type of the matrix. + func : callable, optional + Function the Jacobian corresponds to + + """ + + def __init__(self, **kw): + names = ["solve", "update", "matvec", "rmatvec", "rsolve", + "matmat", "todense", "shape", "dtype"] + for name, value in kw.items(): + if name not in names: + raise ValueError("Unknown keyword argument %s" % name) + if value is not None: + setattr(self, name, kw[name]) + + + if hasattr(self, "todense"): + def __array__(self, dtype=None, copy=None): + if dtype is not None: + raise ValueError(f"`dtype` must be None, was {dtype}") + return self.todense() + + def aspreconditioner(self): + return InverseJacobian(self) + + def solve(self, v, tol=0): + raise NotImplementedError + + def update(self, x, F): + pass + + def setup(self, x, F, func): + self.func = func + self.shape = (F.size, x.size) + self.dtype = F.dtype + if self.__class__.setup is Jacobian.setup: + # Call on the first point unless overridden + self.update(x, F) + + +class InverseJacobian: + def __init__(self, jacobian): + self.jacobian = jacobian + self.matvec = jacobian.solve + self.update = jacobian.update + if hasattr(jacobian, 'setup'): + self.setup = jacobian.setup + if hasattr(jacobian, 'rsolve'): + self.rmatvec = jacobian.rsolve + + @property + def shape(self): + return self.jacobian.shape + + @property + def dtype(self): + return self.jacobian.dtype + + +def asjacobian(J): + """ + Convert given object to one suitable for use as a Jacobian. + """ + spsolve = scipy.sparse.linalg.spsolve + if isinstance(J, Jacobian): + return J + elif inspect.isclass(J) and issubclass(J, Jacobian): + return J() + elif isinstance(J, np.ndarray): + if J.ndim > 2: + raise ValueError('array must have rank <= 2') + J = np.atleast_2d(np.asarray(J)) + if J.shape[0] != J.shape[1]: + raise ValueError('array must be square') + + return Jacobian(matvec=lambda v: dot(J, v), + rmatvec=lambda v: dot(J.conj().T, v), + solve=lambda v, tol=0: solve(J, v), + rsolve=lambda v, tol=0: solve(J.conj().T, v), + dtype=J.dtype, shape=J.shape) + elif scipy.sparse.issparse(J): + if J.shape[0] != J.shape[1]: + raise ValueError('matrix must be square') + return Jacobian(matvec=lambda v: J @ v, + rmatvec=lambda v: J.conj().T @ v, + solve=lambda v, tol=0: spsolve(J, v), + rsolve=lambda v, tol=0: spsolve(J.conj().T, v), + dtype=J.dtype, shape=J.shape) + elif hasattr(J, 'shape') and hasattr(J, 'dtype') and hasattr(J, 'solve'): + return Jacobian(matvec=getattr(J, 'matvec'), + rmatvec=getattr(J, 'rmatvec'), + solve=J.solve, + rsolve=getattr(J, 'rsolve'), + update=getattr(J, 'update'), + setup=getattr(J, 'setup'), + dtype=J.dtype, + shape=J.shape) + elif callable(J): + # Assume it's a function J(x) that returns the Jacobian + class Jac(Jacobian): + def update(self, x, F): + self.x = x + + def solve(self, v, tol=0): + m = J(self.x) + if isinstance(m, np.ndarray): + return solve(m, v) + elif scipy.sparse.issparse(m): + return spsolve(m, v) + else: + raise ValueError("Unknown matrix type") + + def matvec(self, v): + m = J(self.x) + if isinstance(m, np.ndarray): + return dot(m, v) + elif scipy.sparse.issparse(m): + return m @ v + else: + raise ValueError("Unknown matrix type") + + def rsolve(self, v, tol=0): + m = J(self.x) + if isinstance(m, np.ndarray): + return solve(m.conj().T, v) + elif scipy.sparse.issparse(m): + return spsolve(m.conj().T, v) + else: + raise ValueError("Unknown matrix type") + + def rmatvec(self, v): + m = J(self.x) + if isinstance(m, np.ndarray): + return dot(m.conj().T, v) + elif scipy.sparse.issparse(m): + return m.conj().T @ v + else: + raise ValueError("Unknown matrix type") + return Jac() + elif isinstance(J, str): + return dict(broyden1=BroydenFirst, + broyden2=BroydenSecond, + anderson=Anderson, + diagbroyden=DiagBroyden, + linearmixing=LinearMixing, + excitingmixing=ExcitingMixing, + krylov=KrylovJacobian)[J]() + else: + raise TypeError('Cannot convert object to a Jacobian') + + +#------------------------------------------------------------------------------ +# Broyden +#------------------------------------------------------------------------------ + +class GenericBroyden(Jacobian): + def setup(self, x0, f0, func): + Jacobian.setup(self, x0, f0, func) + self.last_f = f0 + self.last_x = x0 + + if hasattr(self, 'alpha') and self.alpha is None: + # Autoscale the initial Jacobian parameter + # unless we have already guessed the solution. + normf0 = norm(f0) + if normf0: + self.alpha = 0.5*max(norm(x0), 1) / normf0 + else: + self.alpha = 1.0 + + def _update(self, x, f, dx, df, dx_norm, df_norm): + raise NotImplementedError + + def update(self, x, f): + df = f - self.last_f + dx = x - self.last_x + self._update(x, f, dx, df, norm(dx), norm(df)) + self.last_f = f + self.last_x = x + + +class LowRankMatrix: + r""" + A matrix represented as + + .. math:: \alpha I + \sum_{n=0}^{n=M} c_n d_n^\dagger + + However, if the rank of the matrix reaches the dimension of the vectors, + full matrix representation will be used thereon. + + """ + + def __init__(self, alpha, n, dtype): + self.alpha = alpha + self.cs = [] + self.ds = [] + self.n = n + self.dtype = dtype + self.collapsed = None + + @staticmethod + def _matvec(v, alpha, cs, ds): + axpy, scal, dotc = get_blas_funcs(['axpy', 'scal', 'dotc'], + cs[:1] + [v]) + w = alpha * v + for c, d in zip(cs, ds): + a = dotc(d, v) + w = axpy(c, w, w.size, a) + return w + + @staticmethod + def _solve(v, alpha, cs, ds): + """Evaluate w = M^-1 v""" + if len(cs) == 0: + return v/alpha + + # (B + C D^H)^-1 = B^-1 - B^-1 C (I + D^H B^-1 C)^-1 D^H B^-1 + + axpy, dotc = get_blas_funcs(['axpy', 'dotc'], cs[:1] + [v]) + + c0 = cs[0] + A = alpha * np.identity(len(cs), dtype=c0.dtype) + for i, d in enumerate(ds): + for j, c in enumerate(cs): + A[i,j] += dotc(d, c) + + q = np.zeros(len(cs), dtype=c0.dtype) + for j, d in enumerate(ds): + q[j] = dotc(d, v) + q /= alpha + q = solve(A, q) + + w = v/alpha + for c, qc in zip(cs, q): + w = axpy(c, w, w.size, -qc) + + return w + + def matvec(self, v): + """Evaluate w = M v""" + if self.collapsed is not None: + return np.dot(self.collapsed, v) + return LowRankMatrix._matvec(v, self.alpha, self.cs, self.ds) + + def rmatvec(self, v): + """Evaluate w = M^H v""" + if self.collapsed is not None: + return np.dot(self.collapsed.T.conj(), v) + return LowRankMatrix._matvec(v, np.conj(self.alpha), self.ds, self.cs) + + def solve(self, v, tol=0): + """Evaluate w = M^-1 v""" + if self.collapsed is not None: + return solve(self.collapsed, v) + return LowRankMatrix._solve(v, self.alpha, self.cs, self.ds) + + def rsolve(self, v, tol=0): + """Evaluate w = M^-H v""" + if self.collapsed is not None: + return solve(self.collapsed.T.conj(), v) + return LowRankMatrix._solve(v, np.conj(self.alpha), self.ds, self.cs) + + def append(self, c, d): + if self.collapsed is not None: + self.collapsed += c[:,None] * d[None,:].conj() + return + + self.cs.append(c) + self.ds.append(d) + + if len(self.cs) > c.size: + self.collapse() + + def __array__(self, dtype=None, copy=None): + if dtype is not None: + warnings.warn("LowRankMatrix is scipy-internal code, `dtype` " + f"should only be None but was {dtype} (not handled)", + stacklevel=3) + if copy is not None: + warnings.warn("LowRankMatrix is scipy-internal code, `copy` " + f"should only be None but was {copy} (not handled)", + stacklevel=3) + if self.collapsed is not None: + return self.collapsed + + Gm = self.alpha*np.identity(self.n, dtype=self.dtype) + for c, d in zip(self.cs, self.ds): + Gm += c[:,None]*d[None,:].conj() + return Gm + + def collapse(self): + """Collapse the low-rank matrix to a full-rank one.""" + self.collapsed = np.array(self, copy=copy_if_needed) + self.cs = None + self.ds = None + self.alpha = None + + def restart_reduce(self, rank): + """ + Reduce the rank of the matrix by dropping all vectors. + """ + if self.collapsed is not None: + return + assert rank > 0 + if len(self.cs) > rank: + del self.cs[:] + del self.ds[:] + + def simple_reduce(self, rank): + """ + Reduce the rank of the matrix by dropping oldest vectors. + """ + if self.collapsed is not None: + return + assert rank > 0 + while len(self.cs) > rank: + del self.cs[0] + del self.ds[0] + + def svd_reduce(self, max_rank, to_retain=None): + """ + Reduce the rank of the matrix by retaining some SVD components. + + This corresponds to the \"Broyden Rank Reduction Inverse\" + algorithm described in [1]_. + + Note that the SVD decomposition can be done by solving only a + problem whose size is the effective rank of this matrix, which + is viable even for large problems. + + Parameters + ---------- + max_rank : int + Maximum rank of this matrix after reduction. + to_retain : int, optional + Number of SVD components to retain when reduction is done + (ie. rank > max_rank). Default is ``max_rank - 2``. + + References + ---------- + .. [1] B.A. van der Rotten, PhD thesis, + \"A limited memory Broyden method to solve high-dimensional + systems of nonlinear equations\". Mathematisch Instituut, + Universiteit Leiden, The Netherlands (2003). + + https://web.archive.org/web/20161022015821/http://www.math.leidenuniv.nl/scripties/Rotten.pdf + + """ + if self.collapsed is not None: + return + + p = max_rank + if to_retain is not None: + q = to_retain + else: + q = p - 2 + + if self.cs: + p = min(p, len(self.cs[0])) + q = max(0, min(q, p-1)) + + m = len(self.cs) + if m < p: + # nothing to do + return + + C = np.array(self.cs).T + D = np.array(self.ds).T + + D, R = qr(D, mode='economic') + C = dot(C, R.T.conj()) + + U, S, WH = svd(C, full_matrices=False) + + C = dot(C, inv(WH)) + D = dot(D, WH.T.conj()) + + for k in range(q): + self.cs[k] = C[:,k].copy() + self.ds[k] = D[:,k].copy() + + del self.cs[q:] + del self.ds[q:] + + +_doc_parts['broyden_params'] = """ + alpha : float, optional + Initial guess for the Jacobian is ``(-1/alpha)``. + reduction_method : str or tuple, optional + Method used in ensuring that the rank of the Broyden matrix + stays low. Can either be a string giving the name of the method, + or a tuple of the form ``(method, param1, param2, ...)`` + that gives the name of the method and values for additional parameters. + + Methods available: + + - ``restart``: drop all matrix columns. Has no extra parameters. + - ``simple``: drop oldest matrix column. Has no extra parameters. + - ``svd``: keep only the most significant SVD components. + Takes an extra parameter, ``to_retain``, which determines the + number of SVD components to retain when rank reduction is done. + Default is ``max_rank - 2``. + + max_rank : int, optional + Maximum rank for the Broyden matrix. + Default is infinity (i.e., no rank reduction). + """.strip() + + +class BroydenFirst(GenericBroyden): + r""" + Find a root of a function, using Broyden's first Jacobian approximation. + + This method is also known as \"Broyden's good method\". + + Parameters + ---------- + %(params_basic)s + %(broyden_params)s + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='broyden1'`` in particular. + + Notes + ----- + This algorithm implements the inverse Jacobian Quasi-Newton update + + .. math:: H_+ = H + (dx - H df) dx^\dagger H / ( dx^\dagger H df) + + which corresponds to Broyden's first Jacobian update + + .. math:: J_+ = J + (df - J dx) dx^\dagger / dx^\dagger dx + + + References + ---------- + .. [1] B.A. van der Rotten, PhD thesis, + \"A limited memory Broyden method to solve high-dimensional + systems of nonlinear equations\". Mathematisch Instituut, + Universiteit Leiden, The Netherlands (2003). + + https://web.archive.org/web/20161022015821/http://www.math.leidenuniv.nl/scripties/Rotten.pdf + + Examples + -------- + The following functions define a system of nonlinear equations + + >>> def fun(x): + ... return [x[0] + 0.5 * (x[0] - x[1])**3 - 1.0, + ... 0.5 * (x[1] - x[0])**3 + x[1]] + + A solution can be obtained as follows. + + >>> from scipy import optimize + >>> sol = optimize.broyden1(fun, [0, 0]) + >>> sol + array([0.84116396, 0.15883641]) + + """ + + def __init__(self, alpha=None, reduction_method='restart', max_rank=None): + GenericBroyden.__init__(self) + self.alpha = alpha + self.Gm = None + + if max_rank is None: + max_rank = np.inf + self.max_rank = max_rank + + if isinstance(reduction_method, str): + reduce_params = () + else: + reduce_params = reduction_method[1:] + reduction_method = reduction_method[0] + reduce_params = (max_rank - 1,) + reduce_params + + if reduction_method == 'svd': + self._reduce = lambda: self.Gm.svd_reduce(*reduce_params) + elif reduction_method == 'simple': + self._reduce = lambda: self.Gm.simple_reduce(*reduce_params) + elif reduction_method == 'restart': + self._reduce = lambda: self.Gm.restart_reduce(*reduce_params) + else: + raise ValueError("Unknown rank reduction method '%s'" % + reduction_method) + + def setup(self, x, F, func): + GenericBroyden.setup(self, x, F, func) + self.Gm = LowRankMatrix(-self.alpha, self.shape[0], self.dtype) + + def todense(self): + return inv(self.Gm) + + def solve(self, f, tol=0): + r = self.Gm.matvec(f) + if not np.isfinite(r).all(): + # singular; reset the Jacobian approximation + self.setup(self.last_x, self.last_f, self.func) + return self.Gm.matvec(f) + return r + + def matvec(self, f): + return self.Gm.solve(f) + + def rsolve(self, f, tol=0): + return self.Gm.rmatvec(f) + + def rmatvec(self, f): + return self.Gm.rsolve(f) + + def _update(self, x, f, dx, df, dx_norm, df_norm): + self._reduce() # reduce first to preserve secant condition + + v = self.Gm.rmatvec(dx) + c = dx - self.Gm.matvec(df) + d = v / vdot(df, v) + + self.Gm.append(c, d) + + +class BroydenSecond(BroydenFirst): + """ + Find a root of a function, using Broyden\'s second Jacobian approximation. + + This method is also known as \"Broyden's bad method\". + + Parameters + ---------- + %(params_basic)s + %(broyden_params)s + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='broyden2'`` in particular. + + Notes + ----- + This algorithm implements the inverse Jacobian Quasi-Newton update + + .. math:: H_+ = H + (dx - H df) df^\\dagger / ( df^\\dagger df) + + corresponding to Broyden's second method. + + References + ---------- + .. [1] B.A. van der Rotten, PhD thesis, + \"A limited memory Broyden method to solve high-dimensional + systems of nonlinear equations\". Mathematisch Instituut, + Universiteit Leiden, The Netherlands (2003). + + https://web.archive.org/web/20161022015821/http://www.math.leidenuniv.nl/scripties/Rotten.pdf + + Examples + -------- + The following functions define a system of nonlinear equations + + >>> def fun(x): + ... return [x[0] + 0.5 * (x[0] - x[1])**3 - 1.0, + ... 0.5 * (x[1] - x[0])**3 + x[1]] + + A solution can be obtained as follows. + + >>> from scipy import optimize + >>> sol = optimize.broyden2(fun, [0, 0]) + >>> sol + array([0.84116365, 0.15883529]) + + """ + + def _update(self, x, f, dx, df, dx_norm, df_norm): + self._reduce() # reduce first to preserve secant condition + + v = df + c = dx - self.Gm.matvec(df) + d = v / df_norm**2 + self.Gm.append(c, d) + + +#------------------------------------------------------------------------------ +# Broyden-like (restricted memory) +#------------------------------------------------------------------------------ + +class Anderson(GenericBroyden): + """ + Find a root of a function, using (extended) Anderson mixing. + + The Jacobian is formed by for a 'best' solution in the space + spanned by last `M` vectors. As a result, only a MxM matrix + inversions and MxN multiplications are required. [Ey]_ + + Parameters + ---------- + %(params_basic)s + alpha : float, optional + Initial guess for the Jacobian is (-1/alpha). + M : float, optional + Number of previous vectors to retain. Defaults to 5. + w0 : float, optional + Regularization parameter for numerical stability. + Compared to unity, good values of the order of 0.01. + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='anderson'`` in particular. + + References + ---------- + .. [Ey] V. Eyert, J. Comp. Phys., 124, 271 (1996). + + Examples + -------- + The following functions define a system of nonlinear equations + + >>> def fun(x): + ... return [x[0] + 0.5 * (x[0] - x[1])**3 - 1.0, + ... 0.5 * (x[1] - x[0])**3 + x[1]] + + A solution can be obtained as follows. + + >>> from scipy import optimize + >>> sol = optimize.anderson(fun, [0, 0]) + >>> sol + array([0.84116588, 0.15883789]) + + """ + + # Note: + # + # Anderson method maintains a rank M approximation of the inverse Jacobian, + # + # J^-1 v ~ -v*alpha + (dX + alpha dF) A^-1 dF^H v + # A = W + dF^H dF + # W = w0^2 diag(dF^H dF) + # + # so that for w0 = 0 the secant condition applies for last M iterates, i.e., + # + # J^-1 df_j = dx_j + # + # for all j = 0 ... M-1. + # + # Moreover, (from Sherman-Morrison-Woodbury formula) + # + # J v ~ [ b I - b^2 C (I + b dF^H A^-1 C)^-1 dF^H ] v + # C = (dX + alpha dF) A^-1 + # b = -1/alpha + # + # and after simplification + # + # J v ~ -v/alpha + (dX/alpha + dF) (dF^H dX - alpha W)^-1 dF^H v + # + + def __init__(self, alpha=None, w0=0.01, M=5): + GenericBroyden.__init__(self) + self.alpha = alpha + self.M = M + self.dx = [] + self.df = [] + self.gamma = None + self.w0 = w0 + + def solve(self, f, tol=0): + dx = -self.alpha*f + + n = len(self.dx) + if n == 0: + return dx + + df_f = np.empty(n, dtype=f.dtype) + for k in range(n): + df_f[k] = vdot(self.df[k], f) + + try: + gamma = solve(self.a, df_f) + except LinAlgError: + # singular; reset the Jacobian approximation + del self.dx[:] + del self.df[:] + return dx + + for m in range(n): + dx += gamma[m]*(self.dx[m] + self.alpha*self.df[m]) + return dx + + def matvec(self, f): + dx = -f/self.alpha + + n = len(self.dx) + if n == 0: + return dx + + df_f = np.empty(n, dtype=f.dtype) + for k in range(n): + df_f[k] = vdot(self.df[k], f) + + b = np.empty((n, n), dtype=f.dtype) + for i in range(n): + for j in range(n): + b[i,j] = vdot(self.df[i], self.dx[j]) + if i == j and self.w0 != 0: + b[i,j] -= vdot(self.df[i], self.df[i])*self.w0**2*self.alpha + gamma = solve(b, df_f) + + for m in range(n): + dx += gamma[m]*(self.df[m] + self.dx[m]/self.alpha) + return dx + + def _update(self, x, f, dx, df, dx_norm, df_norm): + if self.M == 0: + return + + self.dx.append(dx) + self.df.append(df) + + while len(self.dx) > self.M: + self.dx.pop(0) + self.df.pop(0) + + n = len(self.dx) + a = np.zeros((n, n), dtype=f.dtype) + + for i in range(n): + for j in range(i, n): + if i == j: + wd = self.w0**2 + else: + wd = 0 + a[i,j] = (1+wd)*vdot(self.df[i], self.df[j]) + + a += np.triu(a, 1).T.conj() + self.a = a + +#------------------------------------------------------------------------------ +# Simple iterations +#------------------------------------------------------------------------------ + + +class DiagBroyden(GenericBroyden): + """ + Find a root of a function, using diagonal Broyden Jacobian approximation. + + The Jacobian approximation is derived from previous iterations, by + retaining only the diagonal of Broyden matrices. + + .. warning:: + + This algorithm may be useful for specific problems, but whether + it will work may depend strongly on the problem. + + Parameters + ---------- + %(params_basic)s + alpha : float, optional + Initial guess for the Jacobian is (-1/alpha). + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='diagbroyden'`` in particular. + + Examples + -------- + The following functions define a system of nonlinear equations + + >>> def fun(x): + ... return [x[0] + 0.5 * (x[0] - x[1])**3 - 1.0, + ... 0.5 * (x[1] - x[0])**3 + x[1]] + + A solution can be obtained as follows. + + >>> from scipy import optimize + >>> sol = optimize.diagbroyden(fun, [0, 0]) + >>> sol + array([0.84116403, 0.15883384]) + + """ + + def __init__(self, alpha=None): + GenericBroyden.__init__(self) + self.alpha = alpha + + def setup(self, x, F, func): + GenericBroyden.setup(self, x, F, func) + self.d = np.full((self.shape[0],), 1 / self.alpha, dtype=self.dtype) + + def solve(self, f, tol=0): + return -f / self.d + + def matvec(self, f): + return -f * self.d + + def rsolve(self, f, tol=0): + return -f / self.d.conj() + + def rmatvec(self, f): + return -f * self.d.conj() + + def todense(self): + return np.diag(-self.d) + + def _update(self, x, f, dx, df, dx_norm, df_norm): + self.d -= (df + self.d*dx)*dx/dx_norm**2 + + +class LinearMixing(GenericBroyden): + """ + Find a root of a function, using a scalar Jacobian approximation. + + .. warning:: + + This algorithm may be useful for specific problems, but whether + it will work may depend strongly on the problem. + + Parameters + ---------- + %(params_basic)s + alpha : float, optional + The Jacobian approximation is (-1/alpha). + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='linearmixing'`` in particular. + + """ + + def __init__(self, alpha=None): + GenericBroyden.__init__(self) + self.alpha = alpha + + def solve(self, f, tol=0): + return -f*self.alpha + + def matvec(self, f): + return -f/self.alpha + + def rsolve(self, f, tol=0): + return -f*np.conj(self.alpha) + + def rmatvec(self, f): + return -f/np.conj(self.alpha) + + def todense(self): + return np.diag(np.full(self.shape[0], -1/self.alpha)) + + def _update(self, x, f, dx, df, dx_norm, df_norm): + pass + + +class ExcitingMixing(GenericBroyden): + """ + Find a root of a function, using a tuned diagonal Jacobian approximation. + + The Jacobian matrix is diagonal and is tuned on each iteration. + + .. warning:: + + This algorithm may be useful for specific problems, but whether + it will work may depend strongly on the problem. + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='excitingmixing'`` in particular. + + Parameters + ---------- + %(params_basic)s + alpha : float, optional + Initial Jacobian approximation is (-1/alpha). + alphamax : float, optional + The entries of the diagonal Jacobian are kept in the range + ``[alpha, alphamax]``. + %(params_extra)s + """ + + def __init__(self, alpha=None, alphamax=1.0): + GenericBroyden.__init__(self) + self.alpha = alpha + self.alphamax = alphamax + self.beta = None + + def setup(self, x, F, func): + GenericBroyden.setup(self, x, F, func) + self.beta = np.full((self.shape[0],), self.alpha, dtype=self.dtype) + + def solve(self, f, tol=0): + return -f*self.beta + + def matvec(self, f): + return -f/self.beta + + def rsolve(self, f, tol=0): + return -f*self.beta.conj() + + def rmatvec(self, f): + return -f/self.beta.conj() + + def todense(self): + return np.diag(-1/self.beta) + + def _update(self, x, f, dx, df, dx_norm, df_norm): + incr = f*self.last_f > 0 + self.beta[incr] += self.alpha + self.beta[~incr] = self.alpha + np.clip(self.beta, 0, self.alphamax, out=self.beta) + + +#------------------------------------------------------------------------------ +# Iterative/Krylov approximated Jacobians +#------------------------------------------------------------------------------ + +class KrylovJacobian(Jacobian): + r""" + Find a root of a function, using Krylov approximation for inverse Jacobian. + + This method is suitable for solving large-scale problems. + + Parameters + ---------- + %(params_basic)s + rdiff : float, optional + Relative step size to use in numerical differentiation. + method : str or callable, optional + Krylov method to use to approximate the Jacobian. Can be a string, + or a function implementing the same interface as the iterative + solvers in `scipy.sparse.linalg`. If a string, needs to be one of: + ``'lgmres'``, ``'gmres'``, ``'bicgstab'``, ``'cgs'``, ``'minres'``, + ``'tfqmr'``. + + The default is `scipy.sparse.linalg.lgmres`. + inner_maxiter : int, optional + Parameter to pass to the "inner" Krylov solver: maximum number of + iterations. Iteration will stop after maxiter steps even if the + specified tolerance has not been achieved. + inner_M : LinearOperator or InverseJacobian + Preconditioner for the inner Krylov iteration. + Note that you can use also inverse Jacobians as (adaptive) + preconditioners. For example, + + >>> from scipy.optimize import BroydenFirst, KrylovJacobian + >>> from scipy.optimize import InverseJacobian + >>> jac = BroydenFirst() + >>> kjac = KrylovJacobian(inner_M=InverseJacobian(jac)) + + If the preconditioner has a method named 'update', it will be called + as ``update(x, f)`` after each nonlinear step, with ``x`` giving + the current point, and ``f`` the current function value. + outer_k : int, optional + Size of the subspace kept across LGMRES nonlinear iterations. + See `scipy.sparse.linalg.lgmres` for details. + inner_kwargs : kwargs + Keyword parameters for the "inner" Krylov solver + (defined with `method`). Parameter names must start with + the `inner_` prefix which will be stripped before passing on + the inner method. See, e.g., `scipy.sparse.linalg.gmres` for details. + %(params_extra)s + + See Also + -------- + root : Interface to root finding algorithms for multivariate + functions. See ``method='krylov'`` in particular. + scipy.sparse.linalg.gmres + scipy.sparse.linalg.lgmres + + Notes + ----- + This function implements a Newton-Krylov solver. The basic idea is + to compute the inverse of the Jacobian with an iterative Krylov + method. These methods require only evaluating the Jacobian-vector + products, which are conveniently approximated by a finite difference: + + .. math:: J v \approx (f(x + \omega*v/|v|) - f(x)) / \omega + + Due to the use of iterative matrix inverses, these methods can + deal with large nonlinear problems. + + SciPy's `scipy.sparse.linalg` module offers a selection of Krylov + solvers to choose from. The default here is `lgmres`, which is a + variant of restarted GMRES iteration that reuses some of the + information obtained in the previous Newton steps to invert + Jacobians in subsequent steps. + + For a review on Newton-Krylov methods, see for example [1]_, + and for the LGMRES sparse inverse method, see [2]_. + + References + ---------- + .. [1] C. T. Kelley, Solving Nonlinear Equations with Newton's Method, + SIAM, pp.57-83, 2003. + :doi:`10.1137/1.9780898718898.ch3` + .. [2] D.A. Knoll and D.E. Keyes, J. Comp. Phys. 193, 357 (2004). + :doi:`10.1016/j.jcp.2003.08.010` + .. [3] A.H. Baker and E.R. Jessup and T. Manteuffel, + SIAM J. Matrix Anal. Appl. 26, 962 (2005). + :doi:`10.1137/S0895479803422014` + + Examples + -------- + The following functions define a system of nonlinear equations + + >>> def fun(x): + ... return [x[0] + 0.5 * x[1] - 1.0, + ... 0.5 * (x[1] - x[0]) ** 2] + + A solution can be obtained as follows. + + >>> from scipy import optimize + >>> sol = optimize.newton_krylov(fun, [0, 0]) + >>> sol + array([0.66731771, 0.66536458]) + + """ + + def __init__(self, rdiff=None, method='lgmres', inner_maxiter=20, + inner_M=None, outer_k=10, **kw): + self.preconditioner = inner_M + self.rdiff = rdiff + # Note that this retrieves one of the named functions, or otherwise + # uses `method` as is (i.e., for a user-provided callable). + self.method = dict( + bicgstab=scipy.sparse.linalg.bicgstab, + gmres=scipy.sparse.linalg.gmres, + lgmres=scipy.sparse.linalg.lgmres, + cgs=scipy.sparse.linalg.cgs, + minres=scipy.sparse.linalg.minres, + tfqmr=scipy.sparse.linalg.tfqmr, + ).get(method, method) + + self.method_kw = dict(maxiter=inner_maxiter, M=self.preconditioner) + + if self.method is scipy.sparse.linalg.gmres: + # Replace GMRES's outer iteration with Newton steps + self.method_kw['restart'] = inner_maxiter + self.method_kw['maxiter'] = 1 + self.method_kw.setdefault('atol', 0) + elif self.method in (scipy.sparse.linalg.gcrotmk, + scipy.sparse.linalg.bicgstab, + scipy.sparse.linalg.cgs): + self.method_kw.setdefault('atol', 0) + elif self.method is scipy.sparse.linalg.lgmres: + self.method_kw['outer_k'] = outer_k + # Replace LGMRES's outer iteration with Newton steps + self.method_kw['maxiter'] = 1 + # Carry LGMRES's `outer_v` vectors across nonlinear iterations + self.method_kw.setdefault('outer_v', []) + self.method_kw.setdefault('prepend_outer_v', True) + # But don't carry the corresponding Jacobian*v products, in case + # the Jacobian changes a lot in the nonlinear step + # + # XXX: some trust-region inspired ideas might be more efficient... + # See e.g., Brown & Saad. But needs to be implemented separately + # since it's not an inexact Newton method. + self.method_kw.setdefault('store_outer_Av', False) + self.method_kw.setdefault('atol', 0) + + for key, value in kw.items(): + if not key.startswith('inner_'): + raise ValueError("Unknown parameter %s" % key) + self.method_kw[key[6:]] = value + + def _update_diff_step(self): + mx = abs(self.x0).max() + mf = abs(self.f0).max() + self.omega = self.rdiff * max(1, mx) / max(1, mf) + + def matvec(self, v): + nv = norm(v) + if nv == 0: + return 0*v + sc = self.omega / nv + r = (self.func(self.x0 + sc*v) - self.f0) / sc + if not np.all(np.isfinite(r)) and np.all(np.isfinite(v)): + raise ValueError('Function returned non-finite results') + return r + + def solve(self, rhs, tol=0): + if 'rtol' in self.method_kw: + sol, info = self.method(self.op, rhs, **self.method_kw) + else: + sol, info = self.method(self.op, rhs, rtol=tol, **self.method_kw) + return sol + + def update(self, x, f): + self.x0 = x + self.f0 = f + self._update_diff_step() + + # Update also the preconditioner, if possible + if self.preconditioner is not None: + if hasattr(self.preconditioner, 'update'): + self.preconditioner.update(x, f) + + def setup(self, x, f, func): + Jacobian.setup(self, x, f, func) + self.x0 = x + self.f0 = f + self.op = scipy.sparse.linalg.aslinearoperator(self) + + if self.rdiff is None: + self.rdiff = np.finfo(x.dtype).eps ** (1./2) + + self._update_diff_step() + + # Setup also the preconditioner, if possible + if self.preconditioner is not None: + if hasattr(self.preconditioner, 'setup'): + self.preconditioner.setup(x, f, func) + + +#------------------------------------------------------------------------------ +# Wrapper functions +#------------------------------------------------------------------------------ + +def _nonlin_wrapper(name, jac): + """ + Construct a solver wrapper with given name and Jacobian approx. + + It inspects the keyword arguments of ``jac.__init__``, and allows to + use the same arguments in the wrapper function, in addition to the + keyword arguments of `nonlin_solve` + + """ + signature = _getfullargspec(jac.__init__) + args, varargs, varkw, defaults, kwonlyargs, kwdefaults, _ = signature + kwargs = list(zip(args[-len(defaults):], defaults)) + kw_str = ", ".join([f"{k}={v!r}" for k, v in kwargs]) + if kw_str: + kw_str = ", " + kw_str + kwkw_str = ", ".join([f"{k}={k}" for k, v in kwargs]) + if kwkw_str: + kwkw_str = kwkw_str + ", " + if kwonlyargs: + raise ValueError('Unexpected signature %s' % signature) + + # Construct the wrapper function so that its keyword arguments + # are visible in pydoc.help etc. + wrapper = """ +def %(name)s(F, xin, iter=None %(kw)s, verbose=False, maxiter=None, + f_tol=None, f_rtol=None, x_tol=None, x_rtol=None, + tol_norm=None, line_search='armijo', callback=None, **kw): + jac = %(jac)s(%(kwkw)s **kw) + return nonlin_solve(F, xin, jac, iter, verbose, maxiter, + f_tol, f_rtol, x_tol, x_rtol, tol_norm, line_search, + callback) +""" + + wrapper = wrapper % dict(name=name, kw=kw_str, jac=jac.__name__, + kwkw=kwkw_str) + ns = {} + ns.update(globals()) + exec(wrapper, ns) + func = ns[name] + func.__doc__ = jac.__doc__ + _set_doc(func) + return func + + +broyden1 = _nonlin_wrapper('broyden1', BroydenFirst) +broyden2 = _nonlin_wrapper('broyden2', BroydenSecond) +anderson = _nonlin_wrapper('anderson', Anderson) +linearmixing = _nonlin_wrapper('linearmixing', LinearMixing) +diagbroyden = _nonlin_wrapper('diagbroyden', DiagBroyden) +excitingmixing = _nonlin_wrapper('excitingmixing', ExcitingMixing) +newton_krylov = _nonlin_wrapper('newton_krylov', KrylovJacobian) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_numdiff.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_numdiff.py new file mode 100644 index 0000000000000000000000000000000000000000..b5cb5724d8636bd149906a6356ac97bae169e289 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_numdiff.py @@ -0,0 +1,779 @@ +"""Routines for numerical differentiation.""" +import functools +import numpy as np +from numpy.linalg import norm + +from scipy.sparse.linalg import LinearOperator +from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find +from ._group_columns import group_dense, group_sparse +from scipy._lib._array_api import atleast_nd, array_namespace + + +def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub): + """Adjust final difference scheme to the presence of bounds. + + Parameters + ---------- + x0 : ndarray, shape (n,) + Point at which we wish to estimate derivative. + h : ndarray, shape (n,) + Desired absolute finite difference steps. + num_steps : int + Number of `h` steps in one direction required to implement finite + difference scheme. For example, 2 means that we need to evaluate + f(x0 + 2 * h) or f(x0 - 2 * h) + scheme : {'1-sided', '2-sided'} + Whether steps in one or both directions are required. In other + words '1-sided' applies to forward and backward schemes, '2-sided' + applies to center schemes. + lb : ndarray, shape (n,) + Lower bounds on independent variables. + ub : ndarray, shape (n,) + Upper bounds on independent variables. + + Returns + ------- + h_adjusted : ndarray, shape (n,) + Adjusted absolute step sizes. Step size decreases only if a sign flip + or switching to one-sided scheme doesn't allow to take a full step. + use_one_sided : ndarray of bool, shape (n,) + Whether to switch to one-sided scheme. Informative only for + ``scheme='2-sided'``. + """ + if scheme == '1-sided': + use_one_sided = np.ones_like(h, dtype=bool) + elif scheme == '2-sided': + h = np.abs(h) + use_one_sided = np.zeros_like(h, dtype=bool) + else: + raise ValueError("`scheme` must be '1-sided' or '2-sided'.") + + if np.all((lb == -np.inf) & (ub == np.inf)): + return h, use_one_sided + + h_total = h * num_steps + h_adjusted = h.copy() + + lower_dist = x0 - lb + upper_dist = ub - x0 + + if scheme == '1-sided': + x = x0 + h_total + violated = (x < lb) | (x > ub) + fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist) + h_adjusted[violated & fitting] *= -1 + + forward = (upper_dist >= lower_dist) & ~fitting + h_adjusted[forward] = upper_dist[forward] / num_steps + backward = (upper_dist < lower_dist) & ~fitting + h_adjusted[backward] = -lower_dist[backward] / num_steps + elif scheme == '2-sided': + central = (lower_dist >= h_total) & (upper_dist >= h_total) + + forward = (upper_dist >= lower_dist) & ~central + h_adjusted[forward] = np.minimum( + h[forward], 0.5 * upper_dist[forward] / num_steps) + use_one_sided[forward] = True + + backward = (upper_dist < lower_dist) & ~central + h_adjusted[backward] = -np.minimum( + h[backward], 0.5 * lower_dist[backward] / num_steps) + use_one_sided[backward] = True + + min_dist = np.minimum(upper_dist, lower_dist) / num_steps + adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist)) + h_adjusted[adjusted_central] = min_dist[adjusted_central] + use_one_sided[adjusted_central] = False + + return h_adjusted, use_one_sided + + +@functools.lru_cache +def _eps_for_method(x0_dtype, f0_dtype, method): + """ + Calculates relative EPS step to use for a given data type + and numdiff step method. + + Progressively smaller steps are used for larger floating point types. + + Parameters + ---------- + f0_dtype: np.dtype + dtype of function evaluation + + x0_dtype: np.dtype + dtype of parameter vector + + method: {'2-point', '3-point', 'cs'} + + Returns + ------- + EPS: float + relative step size. May be np.float16, np.float32, np.float64 + + Notes + ----- + The default relative step will be np.float64. However, if x0 or f0 are + smaller floating point types (np.float16, np.float32), then the smallest + floating point type is chosen. + """ + # the default EPS value + EPS = np.finfo(np.float64).eps + + x0_is_fp = False + if np.issubdtype(x0_dtype, np.inexact): + # if you're a floating point type then over-ride the default EPS + EPS = np.finfo(x0_dtype).eps + x0_itemsize = np.dtype(x0_dtype).itemsize + x0_is_fp = True + + if np.issubdtype(f0_dtype, np.inexact): + f0_itemsize = np.dtype(f0_dtype).itemsize + # choose the smallest itemsize between x0 and f0 + if x0_is_fp and f0_itemsize < x0_itemsize: + EPS = np.finfo(f0_dtype).eps + + if method in ["2-point", "cs"]: + return EPS**0.5 + elif method in ["3-point"]: + return EPS**(1/3) + else: + raise RuntimeError("Unknown step method, should be one of " + "{'2-point', '3-point', 'cs'}") + + +def _compute_absolute_step(rel_step, x0, f0, method): + """ + Computes an absolute step from a relative step for finite difference + calculation. + + Parameters + ---------- + rel_step: None or array-like + Relative step for the finite difference calculation + x0 : np.ndarray + Parameter vector + f0 : np.ndarray or scalar + method : {'2-point', '3-point', 'cs'} + + Returns + ------- + h : float + The absolute step size + + Notes + ----- + `h` will always be np.float64. However, if `x0` or `f0` are + smaller floating point dtypes (e.g. np.float32), then the absolute + step size will be calculated from the smallest floating point size. + """ + # this is used instead of np.sign(x0) because we need + # sign_x0 to be 1 when x0 == 0. + sign_x0 = (x0 >= 0).astype(float) * 2 - 1 + + rstep = _eps_for_method(x0.dtype, f0.dtype, method) + + if rel_step is None: + abs_step = rstep * sign_x0 * np.maximum(1.0, np.abs(x0)) + else: + # User has requested specific relative steps. + # Don't multiply by max(1, abs(x0) because if x0 < 1 then their + # requested step is not used. + abs_step = rel_step * sign_x0 * np.abs(x0) + + # however we don't want an abs_step of 0, which can happen if + # rel_step is 0, or x0 is 0. Instead, substitute a realistic step + dx = ((x0 + abs_step) - x0) + abs_step = np.where(dx == 0, + rstep * sign_x0 * np.maximum(1.0, np.abs(x0)), + abs_step) + + return abs_step + + +def _prepare_bounds(bounds, x0): + """ + Prepares new-style bounds from a two-tuple specifying the lower and upper + limits for values in x0. If a value is not bound then the lower/upper bound + will be expected to be -np.inf/np.inf. + + Examples + -------- + >>> _prepare_bounds([(0, 1, 2), (1, 2, np.inf)], [0.5, 1.5, 2.5]) + (array([0., 1., 2.]), array([ 1., 2., inf])) + """ + lb, ub = (np.asarray(b, dtype=float) for b in bounds) + if lb.ndim == 0: + lb = np.resize(lb, x0.shape) + + if ub.ndim == 0: + ub = np.resize(ub, x0.shape) + + return lb, ub + + +def group_columns(A, order=0): + """Group columns of a 2-D matrix for sparse finite differencing [1]_. + + Two columns are in the same group if in each row at least one of them + has zero. A greedy sequential algorithm is used to construct groups. + + Parameters + ---------- + A : array_like or sparse matrix, shape (m, n) + Matrix of which to group columns. + order : int, iterable of int with shape (n,) or None + Permutation array which defines the order of columns enumeration. + If int or None, a random permutation is used with `order` used as + a random seed. Default is 0, that is use a random permutation but + guarantee repeatability. + + Returns + ------- + groups : ndarray of int, shape (n,) + Contains values from 0 to n_groups-1, where n_groups is the number + of found groups. Each value ``groups[i]`` is an index of a group to + which ith column assigned. The procedure was helpful only if + n_groups is significantly less than n. + + References + ---------- + .. [1] A. Curtis, M. J. D. Powell, and J. Reid, "On the estimation of + sparse Jacobian matrices", Journal of the Institute of Mathematics + and its Applications, 13 (1974), pp. 117-120. + """ + if issparse(A): + A = csc_matrix(A) + else: + A = np.atleast_2d(A) + A = (A != 0).astype(np.int32) + + if A.ndim != 2: + raise ValueError("`A` must be 2-dimensional.") + + m, n = A.shape + + if order is None or np.isscalar(order): + rng = np.random.RandomState(order) + order = rng.permutation(n) + else: + order = np.asarray(order) + if order.shape != (n,): + raise ValueError("`order` has incorrect shape.") + + A = A[:, order] + + if issparse(A): + groups = group_sparse(m, n, A.indices, A.indptr) + else: + groups = group_dense(m, n, A) + + groups[order] = groups.copy() + + return groups + + +def approx_derivative(fun, x0, method='3-point', rel_step=None, abs_step=None, + f0=None, bounds=(-np.inf, np.inf), sparsity=None, + as_linear_operator=False, args=(), kwargs={}): + """Compute finite difference approximation of the derivatives of a + vector-valued function. + + If a function maps from R^n to R^m, its derivatives form m-by-n matrix + called the Jacobian, where an element (i, j) is a partial derivative of + f[i] with respect to x[j]. + + Parameters + ---------- + fun : callable + Function of which to estimate the derivatives. The argument x + passed to this function is ndarray of shape (n,) (never a scalar + even if n=1). It must return 1-D array_like of shape (m,) or a scalar. + x0 : array_like of shape (n,) or float + Point at which to estimate the derivatives. Float will be converted + to a 1-D array. + method : {'3-point', '2-point', 'cs'}, optional + Finite difference method to use: + - '2-point' - use the first order accuracy forward or backward + difference. + - '3-point' - use central difference in interior points and the + second order accuracy forward or backward difference + near the boundary. + - 'cs' - use a complex-step finite difference scheme. This assumes + that the user function is real-valued and can be + analytically continued to the complex plane. Otherwise, + produces bogus results. + rel_step : None or array_like, optional + Relative step size to use. If None (default) the absolute step size is + computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``, with + `rel_step` being selected automatically, see Notes. Otherwise + ``h = rel_step * sign(x0) * abs(x0)``. For ``method='3-point'`` the + sign of `h` is ignored. The calculated step size is possibly adjusted + to fit into the bounds. + abs_step : array_like, optional + Absolute step size to use, possibly adjusted to fit into the bounds. + For ``method='3-point'`` the sign of `abs_step` is ignored. By default + relative steps are used, only if ``abs_step is not None`` are absolute + steps used. + f0 : None or array_like, optional + If not None it is assumed to be equal to ``fun(x0)``, in this case + the ``fun(x0)`` is not called. Default is None. + bounds : tuple of array_like, optional + Lower and upper bounds on independent variables. Defaults to no bounds. + Each bound must match the size of `x0` or be a scalar, in the latter + case the bound will be the same for all variables. Use it to limit the + range of function evaluation. Bounds checking is not implemented + when `as_linear_operator` is True. + sparsity : {None, array_like, sparse matrix, 2-tuple}, optional + Defines a sparsity structure of the Jacobian matrix. If the Jacobian + matrix is known to have only few non-zero elements in each row, then + it's possible to estimate its several columns by a single function + evaluation [3]_. To perform such economic computations two ingredients + are required: + + * structure : array_like or sparse matrix of shape (m, n). A zero + element means that a corresponding element of the Jacobian + identically equals to zero. + * groups : array_like of shape (n,). A column grouping for a given + sparsity structure, use `group_columns` to obtain it. + + A single array or a sparse matrix is interpreted as a sparsity + structure, and groups are computed inside the function. A tuple is + interpreted as (structure, groups). If None (default), a standard + dense differencing will be used. + + Note, that sparse differencing makes sense only for large Jacobian + matrices where each row contains few non-zero elements. + as_linear_operator : bool, optional + When True the function returns an `scipy.sparse.linalg.LinearOperator`. + Otherwise it returns a dense array or a sparse matrix depending on + `sparsity`. The linear operator provides an efficient way of computing + ``J.dot(p)`` for any vector ``p`` of shape (n,), but does not allow + direct access to individual elements of the matrix. By default + `as_linear_operator` is False. + args, kwargs : tuple and dict, optional + Additional arguments passed to `fun`. Both empty by default. + The calling signature is ``fun(x, *args, **kwargs)``. + + Returns + ------- + J : {ndarray, sparse matrix, LinearOperator} + Finite difference approximation of the Jacobian matrix. + If `as_linear_operator` is True returns a LinearOperator + with shape (m, n). Otherwise it returns a dense array or sparse + matrix depending on how `sparsity` is defined. If `sparsity` + is None then a ndarray with shape (m, n) is returned. If + `sparsity` is not None returns a csr_matrix with shape (m, n). + For sparse matrices and linear operators it is always returned as + a 2-D structure, for ndarrays, if m=1 it is returned + as a 1-D gradient array with shape (n,). + + See Also + -------- + check_derivative : Check correctness of a function computing derivatives. + + Notes + ----- + If `rel_step` is not provided, it assigned as ``EPS**(1/s)``, where EPS is + determined from the smallest floating point dtype of `x0` or `fun(x0)`, + ``np.finfo(x0.dtype).eps``, s=2 for '2-point' method and + s=3 for '3-point' method. Such relative step approximately minimizes a sum + of truncation and round-off errors, see [1]_. Relative steps are used by + default. However, absolute steps are used when ``abs_step is not None``. + If any of the absolute or relative steps produces an indistinguishable + difference from the original `x0`, ``(x0 + dx) - x0 == 0``, then a + automatic step size is substituted for that particular entry. + + A finite difference scheme for '3-point' method is selected automatically. + The well-known central difference scheme is used for points sufficiently + far from the boundary, and 3-point forward or backward scheme is used for + points near the boundary. Both schemes have the second-order accuracy in + terms of Taylor expansion. Refer to [2]_ for the formulas of 3-point + forward and backward difference schemes. + + For dense differencing when m=1 Jacobian is returned with a shape (n,), + on the other hand when n=1 Jacobian is returned with a shape (m, 1). + Our motivation is the following: a) It handles a case of gradient + computation (m=1) in a conventional way. b) It clearly separates these two + different cases. b) In all cases np.atleast_2d can be called to get 2-D + Jacobian with correct dimensions. + + References + ---------- + .. [1] W. H. Press et. al. "Numerical Recipes. The Art of Scientific + Computing. 3rd edition", sec. 5.7. + + .. [2] A. Curtis, M. J. D. Powell, and J. Reid, "On the estimation of + sparse Jacobian matrices", Journal of the Institute of Mathematics + and its Applications, 13 (1974), pp. 117-120. + + .. [3] B. Fornberg, "Generation of Finite Difference Formulas on + Arbitrarily Spaced Grids", Mathematics of Computation 51, 1988. + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize._numdiff import approx_derivative + >>> + >>> def f(x, c1, c2): + ... return np.array([x[0] * np.sin(c1 * x[1]), + ... x[0] * np.cos(c2 * x[1])]) + ... + >>> x0 = np.array([1.0, 0.5 * np.pi]) + >>> approx_derivative(f, x0, args=(1, 2)) + array([[ 1., 0.], + [-1., 0.]]) + + Bounds can be used to limit the region of function evaluation. + In the example below we compute left and right derivative at point 1.0. + + >>> def g(x): + ... return x**2 if x >= 1 else x + ... + >>> x0 = 1.0 + >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) + array([ 1.]) + >>> approx_derivative(g, x0, bounds=(1.0, np.inf)) + array([ 2.]) + """ + if method not in ['2-point', '3-point', 'cs']: + raise ValueError("Unknown method '%s'. " % method) + + xp = array_namespace(x0) + _x = atleast_nd(x0, ndim=1, xp=xp) + _dtype = xp.float64 + if xp.isdtype(_x.dtype, "real floating"): + _dtype = _x.dtype + + # promotes to floating + x0 = xp.astype(_x, _dtype) + + if x0.ndim > 1: + raise ValueError("`x0` must have at most 1 dimension.") + + lb, ub = _prepare_bounds(bounds, x0) + + if lb.shape != x0.shape or ub.shape != x0.shape: + raise ValueError("Inconsistent shapes between bounds and `x0`.") + + if as_linear_operator and not (np.all(np.isinf(lb)) + and np.all(np.isinf(ub))): + raise ValueError("Bounds not supported when " + "`as_linear_operator` is True.") + + def fun_wrapped(x): + # send user function same fp type as x0. (but only if cs is not being + # used + if xp.isdtype(x.dtype, "real floating"): + x = xp.astype(x, x0.dtype) + + f = np.atleast_1d(fun(x, *args, **kwargs)) + if f.ndim > 1: + raise RuntimeError("`fun` return value has " + "more than 1 dimension.") + return f + + if f0 is None: + f0 = fun_wrapped(x0) + else: + f0 = np.atleast_1d(f0) + if f0.ndim > 1: + raise ValueError("`f0` passed has more than 1 dimension.") + + if np.any((x0 < lb) | (x0 > ub)): + raise ValueError("`x0` violates bound constraints.") + + if as_linear_operator: + if rel_step is None: + rel_step = _eps_for_method(x0.dtype, f0.dtype, method) + + return _linear_operator_difference(fun_wrapped, x0, + f0, rel_step, method) + else: + # by default we use rel_step + if abs_step is None: + h = _compute_absolute_step(rel_step, x0, f0, method) + else: + # user specifies an absolute step + sign_x0 = (x0 >= 0).astype(float) * 2 - 1 + h = abs_step + + # cannot have a zero step. This might happen if x0 is very large + # or small. In which case fall back to relative step. + dx = ((x0 + h) - x0) + h = np.where(dx == 0, + _eps_for_method(x0.dtype, f0.dtype, method) * + sign_x0 * np.maximum(1.0, np.abs(x0)), + h) + + if method == '2-point': + h, use_one_sided = _adjust_scheme_to_bounds( + x0, h, 1, '1-sided', lb, ub) + elif method == '3-point': + h, use_one_sided = _adjust_scheme_to_bounds( + x0, h, 1, '2-sided', lb, ub) + elif method == 'cs': + use_one_sided = False + + if sparsity is None: + return _dense_difference(fun_wrapped, x0, f0, h, + use_one_sided, method) + else: + if not issparse(sparsity) and len(sparsity) == 2: + structure, groups = sparsity + else: + structure = sparsity + groups = group_columns(sparsity) + + if issparse(structure): + structure = csc_matrix(structure) + else: + structure = np.atleast_2d(structure) + + groups = np.atleast_1d(groups) + return _sparse_difference(fun_wrapped, x0, f0, h, + use_one_sided, structure, + groups, method) + + +def _linear_operator_difference(fun, x0, f0, h, method): + m = f0.size + n = x0.size + + if method == '2-point': + def matvec(p): + if np.array_equal(p, np.zeros_like(p)): + return np.zeros(m) + dx = h / norm(p) + x = x0 + dx*p + df = fun(x) - f0 + return df / dx + + elif method == '3-point': + def matvec(p): + if np.array_equal(p, np.zeros_like(p)): + return np.zeros(m) + dx = 2*h / norm(p) + x1 = x0 - (dx/2)*p + x2 = x0 + (dx/2)*p + f1 = fun(x1) + f2 = fun(x2) + df = f2 - f1 + return df / dx + + elif method == 'cs': + def matvec(p): + if np.array_equal(p, np.zeros_like(p)): + return np.zeros(m) + dx = h / norm(p) + x = x0 + dx*p*1.j + f1 = fun(x) + df = f1.imag + return df / dx + + else: + raise RuntimeError("Never be here.") + + return LinearOperator((m, n), matvec) + + +def _dense_difference(fun, x0, f0, h, use_one_sided, method): + m = f0.size + n = x0.size + J_transposed = np.empty((n, m)) + x1 = x0.copy() + x2 = x0.copy() + xc = x0.astype(complex, copy=True) + + for i in range(h.size): + if method == '2-point': + x1[i] += h[i] + dx = x1[i] - x0[i] # Recompute dx as exactly representable number. + df = fun(x1) - f0 + elif method == '3-point' and use_one_sided[i]: + x1[i] += h[i] + x2[i] += 2 * h[i] + dx = x2[i] - x0[i] + f1 = fun(x1) + f2 = fun(x2) + df = -3.0 * f0 + 4 * f1 - f2 + elif method == '3-point' and not use_one_sided[i]: + x1[i] -= h[i] + x2[i] += h[i] + dx = x2[i] - x1[i] + f1 = fun(x1) + f2 = fun(x2) + df = f2 - f1 + elif method == 'cs': + xc[i] += h[i] * 1.j + f1 = fun(xc) + df = f1.imag + dx = h[i] + else: + raise RuntimeError("Never be here.") + + J_transposed[i] = df / dx + x1[i] = x2[i] = xc[i] = x0[i] + + if m == 1: + J_transposed = np.ravel(J_transposed) + + return J_transposed.T + + +def _sparse_difference(fun, x0, f0, h, use_one_sided, + structure, groups, method): + m = f0.size + n = x0.size + row_indices = [] + col_indices = [] + fractions = [] + + n_groups = np.max(groups) + 1 + for group in range(n_groups): + # Perturb variables which are in the same group simultaneously. + e = np.equal(group, groups) + h_vec = h * e + if method == '2-point': + x = x0 + h_vec + dx = x - x0 + df = fun(x) - f0 + # The result is written to columns which correspond to perturbed + # variables. + cols, = np.nonzero(e) + # Find all non-zero elements in selected columns of Jacobian. + i, j, _ = find(structure[:, cols]) + # Restore column indices in the full array. + j = cols[j] + elif method == '3-point': + # Here we do conceptually the same but separate one-sided + # and two-sided schemes. + x1 = x0.copy() + x2 = x0.copy() + + mask_1 = use_one_sided & e + x1[mask_1] += h_vec[mask_1] + x2[mask_1] += 2 * h_vec[mask_1] + + mask_2 = ~use_one_sided & e + x1[mask_2] -= h_vec[mask_2] + x2[mask_2] += h_vec[mask_2] + + dx = np.zeros(n) + dx[mask_1] = x2[mask_1] - x0[mask_1] + dx[mask_2] = x2[mask_2] - x1[mask_2] + + f1 = fun(x1) + f2 = fun(x2) + + cols, = np.nonzero(e) + i, j, _ = find(structure[:, cols]) + j = cols[j] + + mask = use_one_sided[j] + df = np.empty(m) + + rows = i[mask] + df[rows] = -3 * f0[rows] + 4 * f1[rows] - f2[rows] + + rows = i[~mask] + df[rows] = f2[rows] - f1[rows] + elif method == 'cs': + f1 = fun(x0 + h_vec*1.j) + df = f1.imag + dx = h_vec + cols, = np.nonzero(e) + i, j, _ = find(structure[:, cols]) + j = cols[j] + else: + raise ValueError("Never be here.") + + # All that's left is to compute the fraction. We store i, j and + # fractions as separate arrays and later construct coo_matrix. + row_indices.append(i) + col_indices.append(j) + fractions.append(df[i] / dx[j]) + + row_indices = np.hstack(row_indices) + col_indices = np.hstack(col_indices) + fractions = np.hstack(fractions) + J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) + return csr_matrix(J) + + +def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(), + kwargs={}): + """Check correctness of a function computing derivatives (Jacobian or + gradient) by comparison with a finite difference approximation. + + Parameters + ---------- + fun : callable + Function of which to estimate the derivatives. The argument x + passed to this function is ndarray of shape (n,) (never a scalar + even if n=1). It must return 1-D array_like of shape (m,) or a scalar. + jac : callable + Function which computes Jacobian matrix of `fun`. It must work with + argument x the same way as `fun`. The return value must be array_like + or sparse matrix with an appropriate shape. + x0 : array_like of shape (n,) or float + Point at which to estimate the derivatives. Float will be converted + to 1-D array. + bounds : 2-tuple of array_like, optional + Lower and upper bounds on independent variables. Defaults to no bounds. + Each bound must match the size of `x0` or be a scalar, in the latter + case the bound will be the same for all variables. Use it to limit the + range of function evaluation. + args, kwargs : tuple and dict, optional + Additional arguments passed to `fun` and `jac`. Both empty by default. + The calling signature is ``fun(x, *args, **kwargs)`` and the same + for `jac`. + + Returns + ------- + accuracy : float + The maximum among all relative errors for elements with absolute values + higher than 1 and absolute errors for elements with absolute values + less or equal than 1. If `accuracy` is on the order of 1e-6 or lower, + then it is likely that your `jac` implementation is correct. + + See Also + -------- + approx_derivative : Compute finite difference approximation of derivative. + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize._numdiff import check_derivative + >>> + >>> + >>> def f(x, c1, c2): + ... return np.array([x[0] * np.sin(c1 * x[1]), + ... x[0] * np.cos(c2 * x[1])]) + ... + >>> def jac(x, c1, c2): + ... return np.array([ + ... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 * x[1])], + ... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 * x[1])] + ... ]) + ... + >>> + >>> x0 = np.array([1.0, 0.5 * np.pi]) + >>> check_derivative(f, jac, x0, args=(1, 2)) + 2.4492935982947064e-16 + """ + J_to_test = jac(x0, *args, **kwargs) + if issparse(J_to_test): + J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, + args=args, kwargs=kwargs) + J_to_test = csr_matrix(J_to_test) + abs_err = J_to_test - J_diff + i, j, abs_err_data = find(abs_err) + J_diff_data = np.asarray(J_diff[i, j]).ravel() + return np.max(np.abs(abs_err_data) / + np.maximum(1, np.abs(J_diff_data))) + else: + J_diff = approx_derivative(fun, x0, bounds=bounds, + args=args, kwargs=kwargs) + abs_err = np.abs(J_to_test - J_diff) + return np.max(abs_err / np.maximum(1, np.abs(J_diff))) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_optimize.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_optimize.py new file mode 100644 index 0000000000000000000000000000000000000000..37472486fd53ca6ae6bb6c64a3fc0a36eb6fac7e --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_optimize.py @@ -0,0 +1,4093 @@ +#__docformat__ = "restructuredtext en" +# ******NOTICE*************** +# optimize.py module by Travis E. Oliphant +# +# You may copy and use this module as you see fit with no +# guarantee implied provided you keep this notice in all copies. +# *****END NOTICE************ + +# A collection of optimization algorithms. Version 0.5 +# CHANGES +# Added fminbound (July 2001) +# Added brute (Aug. 2002) +# Finished line search satisfying strong Wolfe conditions (Mar. 2004) +# Updated strong Wolfe conditions line search to use +# cubic-interpolation (Mar. 2004) + + +# Minimization routines + +__all__ = ['fmin', 'fmin_powell', 'fmin_bfgs', 'fmin_ncg', 'fmin_cg', + 'fminbound', 'brent', 'golden', 'bracket', 'rosen', 'rosen_der', + 'rosen_hess', 'rosen_hess_prod', 'brute', 'approx_fprime', + 'line_search', 'check_grad', 'OptimizeResult', 'show_options', + 'OptimizeWarning'] + +__docformat__ = "restructuredtext en" + +import math +import warnings +import sys +import inspect +from numpy import (atleast_1d, eye, argmin, zeros, shape, squeeze, + asarray, sqrt) +import numpy as np +from scipy.linalg import cholesky, issymmetric, LinAlgError +from scipy.sparse.linalg import LinearOperator +from ._linesearch import (line_search_wolfe1, line_search_wolfe2, + line_search_wolfe2 as line_search, + LineSearchWarning) +from ._numdiff import approx_derivative +from scipy._lib._util import getfullargspec_no_self as _getfullargspec +from scipy._lib._util import (MapWrapper, check_random_state, _RichResult, + _call_callback_maybe_halt) +from scipy.optimize._differentiable_functions import ScalarFunction, FD_METHODS + + +# standard status messages of optimizers +_status_message = {'success': 'Optimization terminated successfully.', + 'maxfev': 'Maximum number of function evaluations has ' + 'been exceeded.', + 'maxiter': 'Maximum number of iterations has been ' + 'exceeded.', + 'pr_loss': 'Desired error not necessarily achieved due ' + 'to precision loss.', + 'nan': 'NaN result encountered.', + 'out_of_bounds': 'The result is outside of the provided ' + 'bounds.'} + + +class MemoizeJac: + """ Decorator that caches the return values of a function returning `(fun, grad)` + each time it is called. """ + + def __init__(self, fun): + self.fun = fun + self.jac = None + self._value = None + self.x = None + + def _compute_if_needed(self, x, *args): + if not np.all(x == self.x) or self._value is None or self.jac is None: + self.x = np.asarray(x).copy() + fg = self.fun(x, *args) + self.jac = fg[1] + self._value = fg[0] + + def __call__(self, x, *args): + """ returns the function value """ + self._compute_if_needed(x, *args) + return self._value + + def derivative(self, x, *args): + self._compute_if_needed(x, *args) + return self.jac + + +def _wrap_callback(callback, method=None): + """Wrap a user-provided callback so that attributes can be attached.""" + if callback is None or method in {'tnc', 'slsqp', 'cobyla', 'cobyqa'}: + return callback # don't wrap + + sig = inspect.signature(callback) + + if set(sig.parameters) == {'intermediate_result'}: + def wrapped_callback(res): + return callback(intermediate_result=res) + elif method == 'trust-constr': + def wrapped_callback(res): + return callback(np.copy(res.x), res) + elif method == 'differential_evolution': + def wrapped_callback(res): + return callback(np.copy(res.x), res.convergence) + else: + def wrapped_callback(res): + return callback(np.copy(res.x)) + + wrapped_callback.stop_iteration = False + return wrapped_callback + + +class OptimizeResult(_RichResult): + """ + Represents the optimization result. + + Attributes + ---------- + x : ndarray + The solution of the optimization. + success : bool + Whether or not the optimizer exited successfully. + status : int + Termination status of the optimizer. Its value depends on the + underlying solver. Refer to `message` for details. + message : str + Description of the cause of the termination. + fun, jac, hess: ndarray + Values of objective function, its Jacobian and its Hessian (if + available). The Hessians may be approximations, see the documentation + of the function in question. + hess_inv : object + Inverse of the objective function's Hessian; may be an approximation. + Not available for all solvers. The type of this attribute may be + either np.ndarray or scipy.sparse.linalg.LinearOperator. + nfev, njev, nhev : int + Number of evaluations of the objective functions and of its + Jacobian and Hessian. + nit : int + Number of iterations performed by the optimizer. + maxcv : float + The maximum constraint violation. + + Notes + ----- + Depending on the specific solver being used, `OptimizeResult` may + not have all attributes listed here, and they may have additional + attributes not listed here. Since this class is essentially a + subclass of dict with attribute accessors, one can see which + attributes are available using the `OptimizeResult.keys` method. + + """ + pass + + +class OptimizeWarning(UserWarning): + pass + +def _check_positive_definite(Hk): + def is_pos_def(A): + if issymmetric(A): + try: + cholesky(A) + return True + except LinAlgError: + return False + else: + return False + if Hk is not None: + if not is_pos_def(Hk): + raise ValueError("'hess_inv0' matrix isn't positive definite.") + + +def _check_unknown_options(unknown_options): + if unknown_options: + msg = ", ".join(map(str, unknown_options.keys())) + # Stack level 4: this is called from _minimize_*, which is + # called from another function in SciPy. Level 4 is the first + # level in user code. + warnings.warn("Unknown solver options: %s" % msg, OptimizeWarning, stacklevel=4) + + +def is_finite_scalar(x): + """Test whether `x` is either a finite scalar or a finite array scalar. + + """ + return np.size(x) == 1 and np.isfinite(x) + + +_epsilon = sqrt(np.finfo(float).eps) + + +def vecnorm(x, ord=2): + if ord == np.inf: + return np.amax(np.abs(x)) + elif ord == -np.inf: + return np.amin(np.abs(x)) + else: + return np.sum(np.abs(x)**ord, axis=0)**(1.0 / ord) + + +def _prepare_scalar_function(fun, x0, jac=None, args=(), bounds=None, + epsilon=None, finite_diff_rel_step=None, + hess=None): + """ + Creates a ScalarFunction object for use with scalar minimizers + (BFGS/LBFGSB/SLSQP/TNC/CG/etc). + + Parameters + ---------- + fun : callable + The objective function to be minimized. + + ``fun(x, *args) -> float`` + + where ``x`` is an 1-D array with shape (n,) and ``args`` + is a tuple of the fixed parameters needed to completely + specify the function. + x0 : ndarray, shape (n,) + Initial guess. Array of real elements of size (n,), + where 'n' is the number of independent variables. + jac : {callable, '2-point', '3-point', 'cs', None}, optional + Method for computing the gradient vector. If it is a callable, it + should be a function that returns the gradient vector: + + ``jac(x, *args) -> array_like, shape (n,)`` + + If one of `{'2-point', '3-point', 'cs'}` is selected then the gradient + is calculated with a relative step for finite differences. If `None`, + then two-point finite differences with an absolute step is used. + args : tuple, optional + Extra arguments passed to the objective function and its + derivatives (`fun`, `jac` functions). + bounds : sequence, optional + Bounds on variables. 'new-style' bounds are required. + eps : float or ndarray + If `jac is None` the absolute step size used for numerical + approximation of the jacobian via forward differences. + finite_diff_rel_step : None or array_like, optional + If `jac in ['2-point', '3-point', 'cs']` the relative step size to + use for numerical approximation of the jacobian. The absolute step + size is computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``, + possibly adjusted to fit into the bounds. For ``jac='3-point'`` + the sign of `h` is ignored. If None (default) then step is selected + automatically. + hess : {callable, '2-point', '3-point', 'cs', None} + Computes the Hessian matrix. If it is callable, it should return the + Hessian matrix: + + ``hess(x, *args) -> {LinearOperator, spmatrix, array}, (n, n)`` + + Alternatively, the keywords {'2-point', '3-point', 'cs'} select a + finite difference scheme for numerical estimation. + Whenever the gradient is estimated via finite-differences, the Hessian + cannot be estimated with options {'2-point', '3-point', 'cs'} and needs + to be estimated using one of the quasi-Newton strategies. + + Returns + ------- + sf : ScalarFunction + """ + if callable(jac): + grad = jac + elif jac in FD_METHODS: + # epsilon is set to None so that ScalarFunction is made to use + # rel_step + epsilon = None + grad = jac + else: + # default (jac is None) is to do 2-point finite differences with + # absolute step size. ScalarFunction has to be provided an + # epsilon value that is not None to use absolute steps. This is + # normally the case from most _minimize* methods. + grad = '2-point' + epsilon = epsilon + + if hess is None: + # ScalarFunction requires something for hess, so we give a dummy + # implementation here if nothing is provided, return a value of None + # so that downstream minimisers halt. The results of `fun.hess` + # should not be used. + def hess(x, *args): + return None + + if bounds is None: + bounds = (-np.inf, np.inf) + + # ScalarFunction caches. Reuse of fun(x) during grad + # calculation reduces overall function evaluations. + sf = ScalarFunction(fun, x0, args, grad, hess, + finite_diff_rel_step, bounds, epsilon=epsilon) + + return sf + + +def _clip_x_for_func(func, bounds): + # ensures that x values sent to func are clipped to bounds + + # this is used as a mitigation for gh11403, slsqp/tnc sometimes + # suggest a move that is outside the limits by 1 or 2 ULP. This + # unclean fix makes sure x is strictly within bounds. + def eval(x): + x = _check_clip_x(x, bounds) + return func(x) + + return eval + + +def _check_clip_x(x, bounds): + if (x < bounds[0]).any() or (x > bounds[1]).any(): + warnings.warn("Values in x were outside bounds during a " + "minimize step, clipping to bounds", + RuntimeWarning, stacklevel=3) + x = np.clip(x, bounds[0], bounds[1]) + return x + + return x + + +def rosen(x): + """ + The Rosenbrock function. + + The function computed is:: + + sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) + + Parameters + ---------- + x : array_like + 1-D array of points at which the Rosenbrock function is to be computed. + + Returns + ------- + f : float + The value of the Rosenbrock function. + + See Also + -------- + rosen_der, rosen_hess, rosen_hess_prod + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import rosen + >>> X = 0.1 * np.arange(10) + >>> rosen(X) + 76.56 + + For higher-dimensional input ``rosen`` broadcasts. + In the following example, we use this to plot a 2D landscape. + Note that ``rosen_hess`` does not broadcast in this manner. + + >>> import matplotlib.pyplot as plt + >>> from mpl_toolkits.mplot3d import Axes3D + >>> x = np.linspace(-1, 1, 50) + >>> X, Y = np.meshgrid(x, x) + >>> ax = plt.subplot(111, projection='3d') + >>> ax.plot_surface(X, Y, rosen([X, Y])) + >>> plt.show() + """ + x = asarray(x) + r = np.sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0, + axis=0) + return r + + +def rosen_der(x): + """ + The derivative (i.e. gradient) of the Rosenbrock function. + + Parameters + ---------- + x : array_like + 1-D array of points at which the derivative is to be computed. + + Returns + ------- + rosen_der : (N,) ndarray + The gradient of the Rosenbrock function at `x`. + + See Also + -------- + rosen, rosen_hess, rosen_hess_prod + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import rosen_der + >>> X = 0.1 * np.arange(9) + >>> rosen_der(X) + array([ -2. , 10.6, 15.6, 13.4, 6.4, -3. , -12.4, -19.4, 62. ]) + + """ + x = asarray(x) + xm = x[1:-1] + xm_m1 = x[:-2] + xm_p1 = x[2:] + der = np.zeros_like(x) + der[1:-1] = (200 * (xm - xm_m1**2) - + 400 * (xm_p1 - xm**2) * xm - 2 * (1 - xm)) + der[0] = -400 * x[0] * (x[1] - x[0]**2) - 2 * (1 - x[0]) + der[-1] = 200 * (x[-1] - x[-2]**2) + return der + + +def rosen_hess(x): + """ + The Hessian matrix of the Rosenbrock function. + + Parameters + ---------- + x : array_like + 1-D array of points at which the Hessian matrix is to be computed. + + Returns + ------- + rosen_hess : ndarray + The Hessian matrix of the Rosenbrock function at `x`. + + See Also + -------- + rosen, rosen_der, rosen_hess_prod + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import rosen_hess + >>> X = 0.1 * np.arange(4) + >>> rosen_hess(X) + array([[-38., 0., 0., 0.], + [ 0., 134., -40., 0.], + [ 0., -40., 130., -80.], + [ 0., 0., -80., 200.]]) + + """ + x = atleast_1d(x) + H = np.diag(-400 * x[:-1], 1) - np.diag(400 * x[:-1], -1) + diagonal = np.zeros(len(x), dtype=x.dtype) + diagonal[0] = 1200 * x[0]**2 - 400 * x[1] + 2 + diagonal[-1] = 200 + diagonal[1:-1] = 202 + 1200 * x[1:-1]**2 - 400 * x[2:] + H = H + np.diag(diagonal) + return H + + +def rosen_hess_prod(x, p): + """ + Product of the Hessian matrix of the Rosenbrock function with a vector. + + Parameters + ---------- + x : array_like + 1-D array of points at which the Hessian matrix is to be computed. + p : array_like + 1-D array, the vector to be multiplied by the Hessian matrix. + + Returns + ------- + rosen_hess_prod : ndarray + The Hessian matrix of the Rosenbrock function at `x` multiplied + by the vector `p`. + + See Also + -------- + rosen, rosen_der, rosen_hess + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import rosen_hess_prod + >>> X = 0.1 * np.arange(9) + >>> p = 0.5 * np.arange(9) + >>> rosen_hess_prod(X, p) + array([ -0., 27., -10., -95., -192., -265., -278., -195., -180.]) + + """ + x = atleast_1d(x) + Hp = np.zeros(len(x), dtype=x.dtype) + Hp[0] = (1200 * x[0]**2 - 400 * x[1] + 2) * p[0] - 400 * x[0] * p[1] + Hp[1:-1] = (-400 * x[:-2] * p[:-2] + + (202 + 1200 * x[1:-1]**2 - 400 * x[2:]) * p[1:-1] - + 400 * x[1:-1] * p[2:]) + Hp[-1] = -400 * x[-2] * p[-2] + 200*p[-1] + return Hp + + +def _wrap_scalar_function(function, args): + # wraps a minimizer function to count number of evaluations + # and to easily provide an args kwd. + ncalls = [0] + if function is None: + return ncalls, None + + def function_wrapper(x, *wrapper_args): + ncalls[0] += 1 + # A copy of x is sent to the user function (gh13740) + fx = function(np.copy(x), *(wrapper_args + args)) + # Ideally, we'd like to a have a true scalar returned from f(x). For + # backwards-compatibility, also allow np.array([1.3]), np.array([[1.3]]) etc. + if not np.isscalar(fx): + try: + fx = np.asarray(fx).item() + except (TypeError, ValueError) as e: + raise ValueError("The user-provided objective function " + "must return a scalar value.") from e + return fx + + return ncalls, function_wrapper + + +class _MaxFuncCallError(RuntimeError): + pass + + +def _wrap_scalar_function_maxfun_validation(function, args, maxfun): + # wraps a minimizer function to count number of evaluations + # and to easily provide an args kwd. + ncalls = [0] + if function is None: + return ncalls, None + + def function_wrapper(x, *wrapper_args): + if ncalls[0] >= maxfun: + raise _MaxFuncCallError("Too many function calls") + ncalls[0] += 1 + # A copy of x is sent to the user function (gh13740) + fx = function(np.copy(x), *(wrapper_args + args)) + # Ideally, we'd like to a have a true scalar returned from f(x). For + # backwards-compatibility, also allow np.array([1.3]), + # np.array([[1.3]]) etc. + if not np.isscalar(fx): + try: + fx = np.asarray(fx).item() + except (TypeError, ValueError) as e: + raise ValueError("The user-provided objective function " + "must return a scalar value.") from e + return fx + + return ncalls, function_wrapper + + +def fmin(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None, maxfun=None, + full_output=0, disp=1, retall=0, callback=None, initial_simplex=None): + """ + Minimize a function using the downhill simplex algorithm. + + This algorithm only uses function values, not derivatives or second + derivatives. + + Parameters + ---------- + func : callable func(x,*args) + The objective function to be minimized. + x0 : ndarray + Initial guess. + args : tuple, optional + Extra arguments passed to func, i.e., ``f(x,*args)``. + xtol : float, optional + Absolute error in xopt between iterations that is acceptable for + convergence. + ftol : number, optional + Absolute error in func(xopt) between iterations that is acceptable for + convergence. + maxiter : int, optional + Maximum number of iterations to perform. + maxfun : number, optional + Maximum number of function evaluations to make. + full_output : bool, optional + Set to True if fopt and warnflag outputs are desired. + disp : bool, optional + Set to True to print convergence messages. + retall : bool, optional + Set to True to return list of solutions at each iteration. + callback : callable, optional + Called after each iteration, as callback(xk), where xk is the + current parameter vector. + initial_simplex : array_like of shape (N + 1, N), optional + Initial simplex. If given, overrides `x0`. + ``initial_simplex[j,:]`` should contain the coordinates of + the jth vertex of the ``N+1`` vertices in the simplex, where + ``N`` is the dimension. + + Returns + ------- + xopt : ndarray + Parameter that minimizes function. + fopt : float + Value of function at minimum: ``fopt = func(xopt)``. + iter : int + Number of iterations performed. + funcalls : int + Number of function calls made. + warnflag : int + 1 : Maximum number of function evaluations made. + 2 : Maximum number of iterations reached. + allvecs : list + Solution at each iteration. + + See also + -------- + minimize: Interface to minimization algorithms for multivariate + functions. See the 'Nelder-Mead' `method` in particular. + + Notes + ----- + Uses a Nelder-Mead simplex algorithm to find the minimum of function of + one or more variables. + + This algorithm has a long history of successful use in applications. + But it will usually be slower than an algorithm that uses first or + second derivative information. In practice, it can have poor + performance in high-dimensional problems and is not robust to + minimizing complicated functions. Additionally, there currently is no + complete theory describing when the algorithm will successfully + converge to the minimum, or how fast it will if it does. Both the ftol and + xtol criteria must be met for convergence. + + Examples + -------- + >>> def f(x): + ... return x**2 + + >>> from scipy import optimize + + >>> minimum = optimize.fmin(f, 1) + Optimization terminated successfully. + Current function value: 0.000000 + Iterations: 17 + Function evaluations: 34 + >>> minimum[0] + -8.8817841970012523e-16 + + References + ---------- + .. [1] Nelder, J.A. and Mead, R. (1965), "A simplex method for function + minimization", The Computer Journal, 7, pp. 308-313 + + .. [2] Wright, M.H. (1996), "Direct Search Methods: Once Scorned, Now + Respectable", in Numerical Analysis 1995, Proceedings of the + 1995 Dundee Biennial Conference in Numerical Analysis, D.F. + Griffiths and G.A. Watson (Eds.), Addison Wesley Longman, + Harlow, UK, pp. 191-208. + + """ + opts = {'xatol': xtol, + 'fatol': ftol, + 'maxiter': maxiter, + 'maxfev': maxfun, + 'disp': disp, + 'return_all': retall, + 'initial_simplex': initial_simplex} + + callback = _wrap_callback(callback) + res = _minimize_neldermead(func, x0, args, callback=callback, **opts) + if full_output: + retlist = res['x'], res['fun'], res['nit'], res['nfev'], res['status'] + if retall: + retlist += (res['allvecs'], ) + return retlist + else: + if retall: + return res['x'], res['allvecs'] + else: + return res['x'] + + +def _minimize_neldermead(func, x0, args=(), callback=None, + maxiter=None, maxfev=None, disp=False, + return_all=False, initial_simplex=None, + xatol=1e-4, fatol=1e-4, adaptive=False, bounds=None, + **unknown_options): + """ + Minimization of scalar function of one or more variables using the + Nelder-Mead algorithm. + + Options + ------- + disp : bool + Set to True to print convergence messages. + maxiter, maxfev : int + Maximum allowed number of iterations and function evaluations. + Will default to ``N*200``, where ``N`` is the number of + variables, if neither `maxiter` or `maxfev` is set. If both + `maxiter` and `maxfev` are set, minimization will stop at the + first reached. + return_all : bool, optional + Set to True to return a list of the best solution at each of the + iterations. + initial_simplex : array_like of shape (N + 1, N) + Initial simplex. If given, overrides `x0`. + ``initial_simplex[j,:]`` should contain the coordinates of + the jth vertex of the ``N+1`` vertices in the simplex, where + ``N`` is the dimension. + xatol : float, optional + Absolute error in xopt between iterations that is acceptable for + convergence. + fatol : number, optional + Absolute error in func(xopt) between iterations that is acceptable for + convergence. + adaptive : bool, optional + Adapt algorithm parameters to dimensionality of problem. Useful for + high-dimensional minimization [1]_. + bounds : sequence or `Bounds`, optional + Bounds on variables. There are two ways to specify the bounds: + + 1. Instance of `Bounds` class. + 2. Sequence of ``(min, max)`` pairs for each element in `x`. None + is used to specify no bound. + + Note that this just clips all vertices in simplex based on + the bounds. + + References + ---------- + .. [1] Gao, F. and Han, L. + Implementing the Nelder-Mead simplex algorithm with adaptive + parameters. 2012. Computational Optimization and Applications. + 51:1, pp. 259-277 + + """ + _check_unknown_options(unknown_options) + maxfun = maxfev + retall = return_all + + x0 = np.atleast_1d(x0).flatten() + dtype = x0.dtype if np.issubdtype(x0.dtype, np.inexact) else np.float64 + x0 = np.asarray(x0, dtype=dtype) + + if adaptive: + dim = float(len(x0)) + rho = 1 + chi = 1 + 2/dim + psi = 0.75 - 1/(2*dim) + sigma = 1 - 1/dim + else: + rho = 1 + chi = 2 + psi = 0.5 + sigma = 0.5 + + nonzdelt = 0.05 + zdelt = 0.00025 + + if bounds is not None: + lower_bound, upper_bound = bounds.lb, bounds.ub + # check bounds + if (lower_bound > upper_bound).any(): + raise ValueError("Nelder Mead - one of the lower bounds " + "is greater than an upper bound.") + if np.any(lower_bound > x0) or np.any(x0 > upper_bound): + warnings.warn("Initial guess is not within the specified bounds", + OptimizeWarning, stacklevel=3) + + if bounds is not None: + x0 = np.clip(x0, lower_bound, upper_bound) + + if initial_simplex is None: + N = len(x0) + + sim = np.empty((N + 1, N), dtype=x0.dtype) + sim[0] = x0 + for k in range(N): + y = np.array(x0, copy=True) + if y[k] != 0: + y[k] = (1 + nonzdelt)*y[k] + else: + y[k] = zdelt + sim[k + 1] = y + else: + sim = np.atleast_2d(initial_simplex).copy() + dtype = sim.dtype if np.issubdtype(sim.dtype, np.inexact) else np.float64 + sim = np.asarray(sim, dtype=dtype) + if sim.ndim != 2 or sim.shape[0] != sim.shape[1] + 1: + raise ValueError("`initial_simplex` should be an array of shape (N+1,N)") + if len(x0) != sim.shape[1]: + raise ValueError("Size of `initial_simplex` is not consistent with `x0`") + N = sim.shape[1] + + if retall: + allvecs = [sim[0]] + + # If neither are set, then set both to default + if maxiter is None and maxfun is None: + maxiter = N * 200 + maxfun = N * 200 + elif maxiter is None: + # Convert remaining Nones, to np.inf, unless the other is np.inf, in + # which case use the default to avoid unbounded iteration + if maxfun == np.inf: + maxiter = N * 200 + else: + maxiter = np.inf + elif maxfun is None: + if maxiter == np.inf: + maxfun = N * 200 + else: + maxfun = np.inf + + if bounds is not None: + # The default simplex construction may make all entries (for a given + # parameter) greater than an upper bound if x0 is very close to the + # upper bound. If one simply clips the simplex to the bounds this could + # make the simplex entries degenerate. If that occurs reflect into the + # interior. + msk = sim > upper_bound + # reflect into the interior + sim = np.where(msk, 2*upper_bound - sim, sim) + # but make sure the reflection is no less than the lower_bound + sim = np.clip(sim, lower_bound, upper_bound) + + one2np1 = list(range(1, N + 1)) + fsim = np.full((N + 1,), np.inf, dtype=float) + + fcalls, func = _wrap_scalar_function_maxfun_validation(func, args, maxfun) + + try: + for k in range(N + 1): + fsim[k] = func(sim[k]) + except _MaxFuncCallError: + pass + finally: + ind = np.argsort(fsim) + sim = np.take(sim, ind, 0) + fsim = np.take(fsim, ind, 0) + + ind = np.argsort(fsim) + fsim = np.take(fsim, ind, 0) + # sort so sim[0,:] has the lowest function value + sim = np.take(sim, ind, 0) + + iterations = 1 + + while (fcalls[0] < maxfun and iterations < maxiter): + try: + if (np.max(np.ravel(np.abs(sim[1:] - sim[0]))) <= xatol and + np.max(np.abs(fsim[0] - fsim[1:])) <= fatol): + break + + xbar = np.add.reduce(sim[:-1], 0) / N + xr = (1 + rho) * xbar - rho * sim[-1] + if bounds is not None: + xr = np.clip(xr, lower_bound, upper_bound) + fxr = func(xr) + doshrink = 0 + + if fxr < fsim[0]: + xe = (1 + rho * chi) * xbar - rho * chi * sim[-1] + if bounds is not None: + xe = np.clip(xe, lower_bound, upper_bound) + fxe = func(xe) + + if fxe < fxr: + sim[-1] = xe + fsim[-1] = fxe + else: + sim[-1] = xr + fsim[-1] = fxr + else: # fsim[0] <= fxr + if fxr < fsim[-2]: + sim[-1] = xr + fsim[-1] = fxr + else: # fxr >= fsim[-2] + # Perform contraction + if fxr < fsim[-1]: + xc = (1 + psi * rho) * xbar - psi * rho * sim[-1] + if bounds is not None: + xc = np.clip(xc, lower_bound, upper_bound) + fxc = func(xc) + + if fxc <= fxr: + sim[-1] = xc + fsim[-1] = fxc + else: + doshrink = 1 + else: + # Perform an inside contraction + xcc = (1 - psi) * xbar + psi * sim[-1] + if bounds is not None: + xcc = np.clip(xcc, lower_bound, upper_bound) + fxcc = func(xcc) + + if fxcc < fsim[-1]: + sim[-1] = xcc + fsim[-1] = fxcc + else: + doshrink = 1 + + if doshrink: + for j in one2np1: + sim[j] = sim[0] + sigma * (sim[j] - sim[0]) + if bounds is not None: + sim[j] = np.clip( + sim[j], lower_bound, upper_bound) + fsim[j] = func(sim[j]) + iterations += 1 + except _MaxFuncCallError: + pass + finally: + ind = np.argsort(fsim) + sim = np.take(sim, ind, 0) + fsim = np.take(fsim, ind, 0) + if retall: + allvecs.append(sim[0]) + intermediate_result = OptimizeResult(x=sim[0], fun=fsim[0]) + if _call_callback_maybe_halt(callback, intermediate_result): + break + + x = sim[0] + fval = np.min(fsim) + warnflag = 0 + + if fcalls[0] >= maxfun: + warnflag = 1 + msg = _status_message['maxfev'] + if disp: + warnings.warn(msg, RuntimeWarning, stacklevel=3) + elif iterations >= maxiter: + warnflag = 2 + msg = _status_message['maxiter'] + if disp: + warnings.warn(msg, RuntimeWarning, stacklevel=3) + else: + msg = _status_message['success'] + if disp: + print(msg) + print(" Current function value: %f" % fval) + print(" Iterations: %d" % iterations) + print(" Function evaluations: %d" % fcalls[0]) + + result = OptimizeResult(fun=fval, nit=iterations, nfev=fcalls[0], + status=warnflag, success=(warnflag == 0), + message=msg, x=x, final_simplex=(sim, fsim)) + if retall: + result['allvecs'] = allvecs + return result + + +def approx_fprime(xk, f, epsilon=_epsilon, *args): + """Finite difference approximation of the derivatives of a + scalar or vector-valued function. + + If a function maps from :math:`R^n` to :math:`R^m`, its derivatives form + an m-by-n matrix + called the Jacobian, where an element :math:`(i, j)` is a partial + derivative of f[i] with respect to ``xk[j]``. + + Parameters + ---------- + xk : array_like + The coordinate vector at which to determine the gradient of `f`. + f : callable + Function of which to estimate the derivatives of. Has the signature + ``f(xk, *args)`` where `xk` is the argument in the form of a 1-D array + and `args` is a tuple of any additional fixed parameters needed to + completely specify the function. The argument `xk` passed to this + function is an ndarray of shape (n,) (never a scalar even if n=1). + It must return a 1-D array_like of shape (m,) or a scalar. + + .. versionchanged:: 1.9.0 + `f` is now able to return a 1-D array-like, with the :math:`(m, n)` + Jacobian being estimated. + + epsilon : {float, array_like}, optional + Increment to `xk` to use for determining the function gradient. + If a scalar, uses the same finite difference delta for all partial + derivatives. If an array, should contain one value per element of + `xk`. Defaults to ``sqrt(np.finfo(float).eps)``, which is approximately + 1.49e-08. + \\*args : args, optional + Any other arguments that are to be passed to `f`. + + Returns + ------- + jac : ndarray + The partial derivatives of `f` to `xk`. + + See Also + -------- + check_grad : Check correctness of gradient function against approx_fprime. + + Notes + ----- + The function gradient is determined by the forward finite difference + formula:: + + f(xk[i] + epsilon[i]) - f(xk[i]) + f'[i] = --------------------------------- + epsilon[i] + + Examples + -------- + >>> import numpy as np + >>> from scipy import optimize + >>> def func(x, c0, c1): + ... "Coordinate vector `x` should be an array of size two." + ... return c0 * x[0]**2 + c1*x[1]**2 + + >>> x = np.ones(2) + >>> c0, c1 = (1, 200) + >>> eps = np.sqrt(np.finfo(float).eps) + >>> optimize.approx_fprime(x, func, [eps, np.sqrt(200) * eps], c0, c1) + array([ 2. , 400.00004208]) + + """ + xk = np.asarray(xk, float) + f0 = f(xk, *args) + + return approx_derivative(f, xk, method='2-point', abs_step=epsilon, + args=args, f0=f0) + + +def check_grad(func, grad, x0, *args, epsilon=_epsilon, + direction='all', seed=None): + """Check the correctness of a gradient function by comparing it against a + (forward) finite-difference approximation of the gradient. + + Parameters + ---------- + func : callable ``func(x0, *args)`` + Function whose derivative is to be checked. + grad : callable ``grad(x0, *args)`` + Jacobian of `func`. + x0 : ndarray + Points to check `grad` against forward difference approximation of grad + using `func`. + args : \\*args, optional + Extra arguments passed to `func` and `grad`. + epsilon : float, optional + Step size used for the finite difference approximation. It defaults to + ``sqrt(np.finfo(float).eps)``, which is approximately 1.49e-08. + direction : str, optional + If set to ``'random'``, then gradients along a random vector + are used to check `grad` against forward difference approximation + using `func`. By default it is ``'all'``, in which case, all + the one hot direction vectors are considered to check `grad`. + If `func` is a vector valued function then only ``'all'`` can be used. + seed : {None, int, `numpy.random.Generator`, `numpy.random.RandomState`}, optional + If `seed` is None (or `np.random`), the `numpy.random.RandomState` + singleton is used. + If `seed` is an int, a new ``RandomState`` instance is used, + seeded with `seed`. + If `seed` is already a ``Generator`` or ``RandomState`` instance then + that instance is used. + Specify `seed` for reproducing the return value from this function. + The random numbers generated with this seed affect the random vector + along which gradients are computed to check ``grad``. Note that `seed` + is only used when `direction` argument is set to `'random'`. + + Returns + ------- + err : float + The square root of the sum of squares (i.e., the 2-norm) of the + difference between ``grad(x0, *args)`` and the finite difference + approximation of `grad` using func at the points `x0`. + + See Also + -------- + approx_fprime + + Examples + -------- + >>> import numpy as np + >>> def func(x): + ... return x[0]**2 - 0.5 * x[1]**3 + >>> def grad(x): + ... return [2 * x[0], -1.5 * x[1]**2] + >>> from scipy.optimize import check_grad + >>> check_grad(func, grad, [1.5, -1.5]) + 2.9802322387695312e-08 # may vary + >>> rng = np.random.default_rng() + >>> check_grad(func, grad, [1.5, -1.5], + ... direction='random', seed=rng) + 2.9802322387695312e-08 + + """ + step = epsilon + x0 = np.asarray(x0) + + def g(w, func, x0, v, *args): + return func(x0 + w*v, *args) + + if direction == 'random': + _grad = np.asanyarray(grad(x0, *args)) + if _grad.ndim > 1: + raise ValueError("'random' can only be used with scalar valued" + " func") + random_state = check_random_state(seed) + v = random_state.normal(0, 1, size=(x0.shape)) + _args = (func, x0, v) + args + _func = g + vars = np.zeros((1,)) + analytical_grad = np.dot(_grad, v) + elif direction == 'all': + _args = args + _func = func + vars = x0 + analytical_grad = grad(x0, *args) + else: + raise ValueError(f"{direction} is not a valid string for " + "``direction`` argument") + + return np.sqrt(np.sum(np.abs( + (analytical_grad - approx_fprime(vars, _func, step, *_args))**2 + ))) + + +def approx_fhess_p(x0, p, fprime, epsilon, *args): + # calculate fprime(x0) first, as this may be cached by ScalarFunction + f1 = fprime(*((x0,) + args)) + f2 = fprime(*((x0 + epsilon*p,) + args)) + return (f2 - f1) / epsilon + + +class _LineSearchError(RuntimeError): + pass + + +def _line_search_wolfe12(f, fprime, xk, pk, gfk, old_fval, old_old_fval, + **kwargs): + """ + Same as line_search_wolfe1, but fall back to line_search_wolfe2 if + suitable step length is not found, and raise an exception if a + suitable step length is not found. + + Raises + ------ + _LineSearchError + If no suitable step size is found + + """ + + extra_condition = kwargs.pop('extra_condition', None) + + ret = line_search_wolfe1(f, fprime, xk, pk, gfk, + old_fval, old_old_fval, + **kwargs) + + if ret[0] is not None and extra_condition is not None: + xp1 = xk + ret[0] * pk + if not extra_condition(ret[0], xp1, ret[3], ret[5]): + # Reject step if extra_condition fails + ret = (None,) + + if ret[0] is None: + # line search failed: try different one. + with warnings.catch_warnings(): + warnings.simplefilter('ignore', LineSearchWarning) + kwargs2 = {} + for key in ('c1', 'c2', 'amax'): + if key in kwargs: + kwargs2[key] = kwargs[key] + ret = line_search_wolfe2(f, fprime, xk, pk, gfk, + old_fval, old_old_fval, + extra_condition=extra_condition, + **kwargs2) + + if ret[0] is None: + raise _LineSearchError() + + return ret + + +def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=np.inf, + epsilon=_epsilon, maxiter=None, full_output=0, disp=1, + retall=0, callback=None, xrtol=0, c1=1e-4, c2=0.9, + hess_inv0=None): + """ + Minimize a function using the BFGS algorithm. + + Parameters + ---------- + f : callable ``f(x,*args)`` + Objective function to be minimized. + x0 : ndarray + Initial guess, shape (n,) + fprime : callable ``f'(x,*args)``, optional + Gradient of f. + args : tuple, optional + Extra arguments passed to f and fprime. + gtol : float, optional + Terminate successfully if gradient norm is less than `gtol` + norm : float, optional + Order of norm (Inf is max, -Inf is min) + epsilon : int or ndarray, optional + If `fprime` is approximated, use this value for the step size. + callback : callable, optional + An optional user-supplied function to call after each + iteration. Called as ``callback(xk)``, where ``xk`` is the + current parameter vector. + maxiter : int, optional + Maximum number of iterations to perform. + full_output : bool, optional + If True, return ``fopt``, ``func_calls``, ``grad_calls``, and + ``warnflag`` in addition to ``xopt``. + disp : bool, optional + Print convergence message if True. + retall : bool, optional + Return a list of results at each iteration if True. + xrtol : float, default: 0 + Relative tolerance for `x`. Terminate successfully if step + size is less than ``xk * xrtol`` where ``xk`` is the current + parameter vector. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.9 + Parameter for curvature condition rule. + hess_inv0 : None or ndarray, optional`` + Initial inverse hessian estimate, shape (n, n). If None (default) then + the identity matrix is used. + + Returns + ------- + xopt : ndarray + Parameters which minimize f, i.e., ``f(xopt) == fopt``. + fopt : float + Minimum value. + gopt : ndarray + Value of gradient at minimum, f'(xopt), which should be near 0. + Bopt : ndarray + Value of 1/f''(xopt), i.e., the inverse Hessian matrix. + func_calls : int + Number of function_calls made. + grad_calls : int + Number of gradient calls made. + warnflag : integer + 1 : Maximum number of iterations exceeded. + 2 : Gradient and/or function calls not changing. + 3 : NaN result encountered. + allvecs : list + The value of `xopt` at each iteration. Only returned if `retall` is + True. + + Notes + ----- + Optimize the function, `f`, whose gradient is given by `fprime` + using the quasi-Newton method of Broyden, Fletcher, Goldfarb, + and Shanno (BFGS). + + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + + See Also + -------- + minimize: Interface to minimization algorithms for multivariate + functions. See ``method='BFGS'`` in particular. + + References + ---------- + Wright, and Nocedal 'Numerical Optimization', 1999, p. 198. + + Examples + -------- + >>> import numpy as np + >>> from scipy.optimize import fmin_bfgs + >>> def quadratic_cost(x, Q): + ... return x @ Q @ x + ... + >>> x0 = np.array([-3, -4]) + >>> cost_weight = np.diag([1., 10.]) + >>> # Note that a trailing comma is necessary for a tuple with single element + >>> fmin_bfgs(quadratic_cost, x0, args=(cost_weight,)) + Optimization terminated successfully. + Current function value: 0.000000 + Iterations: 7 # may vary + Function evaluations: 24 # may vary + Gradient evaluations: 8 # may vary + array([ 2.85169950e-06, -4.61820139e-07]) + + >>> def quadratic_cost_grad(x, Q): + ... return 2 * Q @ x + ... + >>> fmin_bfgs(quadratic_cost, x0, quadratic_cost_grad, args=(cost_weight,)) + Optimization terminated successfully. + Current function value: 0.000000 + Iterations: 7 + Function evaluations: 8 + Gradient evaluations: 8 + array([ 2.85916637e-06, -4.54371951e-07]) + + """ + opts = {'gtol': gtol, + 'norm': norm, + 'eps': epsilon, + 'disp': disp, + 'maxiter': maxiter, + 'return_all': retall, + 'xrtol': xrtol, + 'c1': c1, + 'c2': c2, + 'hess_inv0': hess_inv0} + + callback = _wrap_callback(callback) + res = _minimize_bfgs(f, x0, args, fprime, callback=callback, **opts) + + if full_output: + retlist = (res['x'], res['fun'], res['jac'], res['hess_inv'], + res['nfev'], res['njev'], res['status']) + if retall: + retlist += (res['allvecs'], ) + return retlist + else: + if retall: + return res['x'], res['allvecs'] + else: + return res['x'] + + +def _minimize_bfgs(fun, x0, args=(), jac=None, callback=None, + gtol=1e-5, norm=np.inf, eps=_epsilon, maxiter=None, + disp=False, return_all=False, finite_diff_rel_step=None, + xrtol=0, c1=1e-4, c2=0.9, + hess_inv0=None, **unknown_options): + """ + Minimization of scalar function of one or more variables using the + BFGS algorithm. + + Options + ------- + disp : bool + Set to True to print convergence messages. + maxiter : int + Maximum number of iterations to perform. + gtol : float + Terminate successfully if gradient norm is less than `gtol`. + norm : float + Order of norm (Inf is max, -Inf is min). + eps : float or ndarray + If `jac is None` the absolute step size used for numerical + approximation of the jacobian via forward differences. + return_all : bool, optional + Set to True to return a list of the best solution at each of the + iterations. + finite_diff_rel_step : None or array_like, optional + If `jac in ['2-point', '3-point', 'cs']` the relative step size to + use for numerical approximation of the jacobian. The absolute step + size is computed as ``h = rel_step * sign(x) * max(1, abs(x))``, + possibly adjusted to fit into the bounds. For ``jac='3-point'`` + the sign of `h` is ignored. If None (default) then step is selected + automatically. + xrtol : float, default: 0 + Relative tolerance for `x`. Terminate successfully if step size is + less than ``xk * xrtol`` where ``xk`` is the current parameter vector. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.9 + Parameter for curvature condition rule. + hess_inv0 : None or ndarray, optional + Initial inverse hessian estimate, shape (n, n). If None (default) then + the identity matrix is used. + + Notes + ----- + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + + If minimization doesn't complete successfully, with an error message of + ``Desired error not necessarily achieved due to precision loss``, then + consider setting `gtol` to a higher value. This precision loss typically + occurs when the (finite difference) numerical differentiation cannot provide + sufficient precision to satisfy the `gtol` termination criterion. + This can happen when working in single precision and a callable jac is not + provided. For single precision problems a `gtol` of 1e-3 seems to work. + """ + _check_unknown_options(unknown_options) + _check_positive_definite(hess_inv0) + retall = return_all + + x0 = asarray(x0).flatten() + if x0.ndim == 0: + x0.shape = (1,) + if maxiter is None: + maxiter = len(x0) * 200 + + sf = _prepare_scalar_function(fun, x0, jac, args=args, epsilon=eps, + finite_diff_rel_step=finite_diff_rel_step) + + f = sf.fun + myfprime = sf.grad + + old_fval = f(x0) + gfk = myfprime(x0) + + k = 0 + N = len(x0) + I = np.eye(N, dtype=int) + Hk = I if hess_inv0 is None else hess_inv0 + + # Sets the initial step guess to dx ~ 1 + old_old_fval = old_fval + np.linalg.norm(gfk) / 2 + + xk = x0 + if retall: + allvecs = [x0] + warnflag = 0 + gnorm = vecnorm(gfk, ord=norm) + while (gnorm > gtol) and (k < maxiter): + pk = -np.dot(Hk, gfk) + try: + alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ + _line_search_wolfe12(f, myfprime, xk, pk, gfk, + old_fval, old_old_fval, amin=1e-100, + amax=1e100, c1=c1, c2=c2) + except _LineSearchError: + # Line search failed to find a better solution. + warnflag = 2 + break + + sk = alpha_k * pk + xkp1 = xk + sk + + if retall: + allvecs.append(xkp1) + xk = xkp1 + if gfkp1 is None: + gfkp1 = myfprime(xkp1) + + yk = gfkp1 - gfk + gfk = gfkp1 + k += 1 + intermediate_result = OptimizeResult(x=xk, fun=old_fval) + if _call_callback_maybe_halt(callback, intermediate_result): + break + gnorm = vecnorm(gfk, ord=norm) + if (gnorm <= gtol): + break + + # See Chapter 5 in P.E. Frandsen, K. Jonasson, H.B. Nielsen, + # O. Tingleff: "Unconstrained Optimization", IMM, DTU. 1999. + # These notes are available here: + # http://www2.imm.dtu.dk/documents/ftp/publlec.html + if (alpha_k*vecnorm(pk) <= xrtol*(xrtol + vecnorm(xk))): + break + + if not np.isfinite(old_fval): + # We correctly found +-Inf as optimal value, or something went + # wrong. + warnflag = 2 + break + + rhok_inv = np.dot(yk, sk) + # this was handled in numeric, let it remains for more safety + # Cryptic comment above is preserved for posterity. Future reader: + # consider change to condition below proposed in gh-1261/gh-17345. + if rhok_inv == 0.: + rhok = 1000.0 + if disp: + msg = "Divide-by-zero encountered: rhok assumed large" + _print_success_message_or_warn(True, msg) + else: + rhok = 1. / rhok_inv + + A1 = I - sk[:, np.newaxis] * yk[np.newaxis, :] * rhok + A2 = I - yk[:, np.newaxis] * sk[np.newaxis, :] * rhok + Hk = np.dot(A1, np.dot(Hk, A2)) + (rhok * sk[:, np.newaxis] * + sk[np.newaxis, :]) + + fval = old_fval + + if warnflag == 2: + msg = _status_message['pr_loss'] + elif k >= maxiter: + warnflag = 1 + msg = _status_message['maxiter'] + elif np.isnan(gnorm) or np.isnan(fval) or np.isnan(xk).any(): + warnflag = 3 + msg = _status_message['nan'] + else: + msg = _status_message['success'] + + if disp: + _print_success_message_or_warn(warnflag, msg) + print(" Current function value: %f" % fval) + print(" Iterations: %d" % k) + print(" Function evaluations: %d" % sf.nfev) + print(" Gradient evaluations: %d" % sf.ngev) + + result = OptimizeResult(fun=fval, jac=gfk, hess_inv=Hk, nfev=sf.nfev, + njev=sf.ngev, status=warnflag, + success=(warnflag == 0), message=msg, x=xk, + nit=k) + if retall: + result['allvecs'] = allvecs + return result + + +def _print_success_message_or_warn(warnflag, message, warntype=None): + if not warnflag: + print(message) + else: + warnings.warn(message, warntype or OptimizeWarning, stacklevel=3) + + +def fmin_cg(f, x0, fprime=None, args=(), gtol=1e-5, norm=np.inf, + epsilon=_epsilon, maxiter=None, full_output=0, disp=1, retall=0, + callback=None, c1=1e-4, c2=0.4): + """ + Minimize a function using a nonlinear conjugate gradient algorithm. + + Parameters + ---------- + f : callable, ``f(x, *args)`` + Objective function to be minimized. Here `x` must be a 1-D array of + the variables that are to be changed in the search for a minimum, and + `args` are the other (fixed) parameters of `f`. + x0 : ndarray + A user-supplied initial estimate of `xopt`, the optimal value of `x`. + It must be a 1-D array of values. + fprime : callable, ``fprime(x, *args)``, optional + A function that returns the gradient of `f` at `x`. Here `x` and `args` + are as described above for `f`. The returned value must be a 1-D array. + Defaults to None, in which case the gradient is approximated + numerically (see `epsilon`, below). + args : tuple, optional + Parameter values passed to `f` and `fprime`. Must be supplied whenever + additional fixed parameters are needed to completely specify the + functions `f` and `fprime`. + gtol : float, optional + Stop when the norm of the gradient is less than `gtol`. + norm : float, optional + Order to use for the norm of the gradient + (``-np.inf`` is min, ``np.inf`` is max). + epsilon : float or ndarray, optional + Step size(s) to use when `fprime` is approximated numerically. Can be a + scalar or a 1-D array. Defaults to ``sqrt(eps)``, with eps the + floating point machine precision. Usually ``sqrt(eps)`` is about + 1.5e-8. + maxiter : int, optional + Maximum number of iterations to perform. Default is ``200 * len(x0)``. + full_output : bool, optional + If True, return `fopt`, `func_calls`, `grad_calls`, and `warnflag` in + addition to `xopt`. See the Returns section below for additional + information on optional return values. + disp : bool, optional + If True, return a convergence message, followed by `xopt`. + retall : bool, optional + If True, add to the returned values the results of each iteration. + callback : callable, optional + An optional user-supplied function, called after each iteration. + Called as ``callback(xk)``, where ``xk`` is the current value of `x0`. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.4 + Parameter for curvature condition rule. + + Returns + ------- + xopt : ndarray + Parameters which minimize f, i.e., ``f(xopt) == fopt``. + fopt : float, optional + Minimum value found, f(xopt). Only returned if `full_output` is True. + func_calls : int, optional + The number of function_calls made. Only returned if `full_output` + is True. + grad_calls : int, optional + The number of gradient calls made. Only returned if `full_output` is + True. + warnflag : int, optional + Integer value with warning status, only returned if `full_output` is + True. + + 0 : Success. + + 1 : The maximum number of iterations was exceeded. + + 2 : Gradient and/or function calls were not changing. May indicate + that precision was lost, i.e., the routine did not converge. + + 3 : NaN result encountered. + + allvecs : list of ndarray, optional + List of arrays, containing the results at each iteration. + Only returned if `retall` is True. + + See Also + -------- + minimize : common interface to all `scipy.optimize` algorithms for + unconstrained and constrained minimization of multivariate + functions. It provides an alternative way to call + ``fmin_cg``, by specifying ``method='CG'``. + + Notes + ----- + This conjugate gradient algorithm is based on that of Polak and Ribiere + [1]_. + + Conjugate gradient methods tend to work better when: + + 1. `f` has a unique global minimizing point, and no local minima or + other stationary points, + 2. `f` is, at least locally, reasonably well approximated by a + quadratic function of the variables, + 3. `f` is continuous and has a continuous gradient, + 4. `fprime` is not too large, e.g., has a norm less than 1000, + 5. The initial guess, `x0`, is reasonably close to `f` 's global + minimizing point, `xopt`. + + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + + References + ---------- + .. [1] Wright & Nocedal, "Numerical Optimization", 1999, pp. 120-122. + + Examples + -------- + Example 1: seek the minimum value of the expression + ``a*u**2 + b*u*v + c*v**2 + d*u + e*v + f`` for given values + of the parameters and an initial guess ``(u, v) = (0, 0)``. + + >>> import numpy as np + >>> args = (2, 3, 7, 8, 9, 10) # parameter values + >>> def f(x, *args): + ... u, v = x + ... a, b, c, d, e, f = args + ... return a*u**2 + b*u*v + c*v**2 + d*u + e*v + f + >>> def gradf(x, *args): + ... u, v = x + ... a, b, c, d, e, f = args + ... gu = 2*a*u + b*v + d # u-component of the gradient + ... gv = b*u + 2*c*v + e # v-component of the gradient + ... return np.asarray((gu, gv)) + >>> x0 = np.asarray((0, 0)) # Initial guess. + >>> from scipy import optimize + >>> res1 = optimize.fmin_cg(f, x0, fprime=gradf, args=args) + Optimization terminated successfully. + Current function value: 1.617021 + Iterations: 4 + Function evaluations: 8 + Gradient evaluations: 8 + >>> res1 + array([-1.80851064, -0.25531915]) + + Example 2: solve the same problem using the `minimize` function. + (This `myopts` dictionary shows all of the available options, + although in practice only non-default values would be needed. + The returned value will be a dictionary.) + + >>> opts = {'maxiter' : None, # default value. + ... 'disp' : True, # non-default value. + ... 'gtol' : 1e-5, # default value. + ... 'norm' : np.inf, # default value. + ... 'eps' : 1.4901161193847656e-08} # default value. + >>> res2 = optimize.minimize(f, x0, jac=gradf, args=args, + ... method='CG', options=opts) + Optimization terminated successfully. + Current function value: 1.617021 + Iterations: 4 + Function evaluations: 8 + Gradient evaluations: 8 + >>> res2.x # minimum found + array([-1.80851064, -0.25531915]) + + """ + opts = {'gtol': gtol, + 'norm': norm, + 'eps': epsilon, + 'disp': disp, + 'maxiter': maxiter, + 'return_all': retall} + + callback = _wrap_callback(callback) + res = _minimize_cg(f, x0, args, fprime, callback=callback, c1=c1, c2=c2, + **opts) + + if full_output: + retlist = res['x'], res['fun'], res['nfev'], res['njev'], res['status'] + if retall: + retlist += (res['allvecs'], ) + return retlist + else: + if retall: + return res['x'], res['allvecs'] + else: + return res['x'] + + +def _minimize_cg(fun, x0, args=(), jac=None, callback=None, + gtol=1e-5, norm=np.inf, eps=_epsilon, maxiter=None, + disp=False, return_all=False, finite_diff_rel_step=None, + c1=1e-4, c2=0.4, **unknown_options): + """ + Minimization of scalar function of one or more variables using the + conjugate gradient algorithm. + + Options + ------- + disp : bool + Set to True to print convergence messages. + maxiter : int + Maximum number of iterations to perform. + gtol : float + Gradient norm must be less than `gtol` before successful + termination. + norm : float + Order of norm (Inf is max, -Inf is min). + eps : float or ndarray + If `jac is None` the absolute step size used for numerical + approximation of the jacobian via forward differences. + return_all : bool, optional + Set to True to return a list of the best solution at each of the + iterations. + finite_diff_rel_step : None or array_like, optional + If `jac in ['2-point', '3-point', 'cs']` the relative step size to + use for numerical approximation of the jacobian. The absolute step + size is computed as ``h = rel_step * sign(x) * max(1, abs(x))``, + possibly adjusted to fit into the bounds. For ``jac='3-point'`` + the sign of `h` is ignored. If None (default) then step is selected + automatically. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.4 + Parameter for curvature condition rule. + + Notes + ----- + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + """ + _check_unknown_options(unknown_options) + + retall = return_all + + x0 = asarray(x0).flatten() + if maxiter is None: + maxiter = len(x0) * 200 + + sf = _prepare_scalar_function(fun, x0, jac=jac, args=args, epsilon=eps, + finite_diff_rel_step=finite_diff_rel_step) + + f = sf.fun + myfprime = sf.grad + + old_fval = f(x0) + gfk = myfprime(x0) + + k = 0 + xk = x0 + # Sets the initial step guess to dx ~ 1 + old_old_fval = old_fval + np.linalg.norm(gfk) / 2 + + if retall: + allvecs = [xk] + warnflag = 0 + pk = -gfk + gnorm = vecnorm(gfk, ord=norm) + + sigma_3 = 0.01 + + while (gnorm > gtol) and (k < maxiter): + deltak = np.dot(gfk, gfk) + + cached_step = [None] + + def polak_ribiere_powell_step(alpha, gfkp1=None): + xkp1 = xk + alpha * pk + if gfkp1 is None: + gfkp1 = myfprime(xkp1) + yk = gfkp1 - gfk + beta_k = max(0, np.dot(yk, gfkp1) / deltak) + pkp1 = -gfkp1 + beta_k * pk + gnorm = vecnorm(gfkp1, ord=norm) + return (alpha, xkp1, pkp1, gfkp1, gnorm) + + def descent_condition(alpha, xkp1, fp1, gfkp1): + # Polak-Ribiere+ needs an explicit check of a sufficient + # descent condition, which is not guaranteed by strong Wolfe. + # + # See Gilbert & Nocedal, "Global convergence properties of + # conjugate gradient methods for optimization", + # SIAM J. Optimization 2, 21 (1992). + cached_step[:] = polak_ribiere_powell_step(alpha, gfkp1) + alpha, xk, pk, gfk, gnorm = cached_step + + # Accept step if it leads to convergence. + if gnorm <= gtol: + return True + + # Accept step if sufficient descent condition applies. + return np.dot(pk, gfk) <= -sigma_3 * np.dot(gfk, gfk) + + try: + alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ + _line_search_wolfe12(f, myfprime, xk, pk, gfk, old_fval, + old_old_fval, c1=c1, c2=c2, amin=1e-100, + amax=1e100, extra_condition=descent_condition) + except _LineSearchError: + # Line search failed to find a better solution. + warnflag = 2 + break + + # Reuse already computed results if possible + if alpha_k == cached_step[0]: + alpha_k, xk, pk, gfk, gnorm = cached_step + else: + alpha_k, xk, pk, gfk, gnorm = polak_ribiere_powell_step(alpha_k, gfkp1) + + if retall: + allvecs.append(xk) + k += 1 + intermediate_result = OptimizeResult(x=xk, fun=old_fval) + if _call_callback_maybe_halt(callback, intermediate_result): + break + + fval = old_fval + if warnflag == 2: + msg = _status_message['pr_loss'] + elif k >= maxiter: + warnflag = 1 + msg = _status_message['maxiter'] + elif np.isnan(gnorm) or np.isnan(fval) or np.isnan(xk).any(): + warnflag = 3 + msg = _status_message['nan'] + else: + msg = _status_message['success'] + + if disp: + _print_success_message_or_warn(warnflag, msg) + print(" Current function value: %f" % fval) + print(" Iterations: %d" % k) + print(" Function evaluations: %d" % sf.nfev) + print(" Gradient evaluations: %d" % sf.ngev) + + result = OptimizeResult(fun=fval, jac=gfk, nfev=sf.nfev, + njev=sf.ngev, status=warnflag, + success=(warnflag == 0), message=msg, x=xk, + nit=k) + if retall: + result['allvecs'] = allvecs + return result + + +def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, + epsilon=_epsilon, maxiter=None, full_output=0, disp=1, retall=0, + callback=None, c1=1e-4, c2=0.9): + """ + Unconstrained minimization of a function using the Newton-CG method. + + Parameters + ---------- + f : callable ``f(x, *args)`` + Objective function to be minimized. + x0 : ndarray + Initial guess. + fprime : callable ``f'(x, *args)`` + Gradient of f. + fhess_p : callable ``fhess_p(x, p, *args)``, optional + Function which computes the Hessian of f times an + arbitrary vector, p. + fhess : callable ``fhess(x, *args)``, optional + Function to compute the Hessian matrix of f. + args : tuple, optional + Extra arguments passed to f, fprime, fhess_p, and fhess + (the same set of extra arguments is supplied to all of + these functions). + epsilon : float or ndarray, optional + If fhess is approximated, use this value for the step size. + callback : callable, optional + An optional user-supplied function which is called after + each iteration. Called as callback(xk), where xk is the + current parameter vector. + avextol : float, optional + Convergence is assumed when the average relative error in + the minimizer falls below this amount. + maxiter : int, optional + Maximum number of iterations to perform. + full_output : bool, optional + If True, return the optional outputs. + disp : bool, optional + If True, print convergence message. + retall : bool, optional + If True, return a list of results at each iteration. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.9 + Parameter for curvature condition rule + + Returns + ------- + xopt : ndarray + Parameters which minimize f, i.e., ``f(xopt) == fopt``. + fopt : float + Value of the function at xopt, i.e., ``fopt = f(xopt)``. + fcalls : int + Number of function calls made. + gcalls : int + Number of gradient calls made. + hcalls : int + Number of Hessian calls made. + warnflag : int + Warnings generated by the algorithm. + 1 : Maximum number of iterations exceeded. + 2 : Line search failure (precision loss). + 3 : NaN result encountered. + allvecs : list + The result at each iteration, if retall is True (see below). + + See also + -------- + minimize: Interface to minimization algorithms for multivariate + functions. See the 'Newton-CG' `method` in particular. + + Notes + ----- + Only one of `fhess_p` or `fhess` need to be given. If `fhess` + is provided, then `fhess_p` will be ignored. If neither `fhess` + nor `fhess_p` is provided, then the hessian product will be + approximated using finite differences on `fprime`. `fhess_p` + must compute the hessian times an arbitrary vector. If it is not + given, finite-differences on `fprime` are used to compute + it. + + Newton-CG methods are also called truncated Newton methods. This + function differs from scipy.optimize.fmin_tnc because + + 1. scipy.optimize.fmin_ncg is written purely in Python using NumPy + and scipy while scipy.optimize.fmin_tnc calls a C function. + 2. scipy.optimize.fmin_ncg is only for unconstrained minimization + while scipy.optimize.fmin_tnc is for unconstrained minimization + or box constrained minimization. (Box constraints give + lower and upper bounds for each variable separately.) + + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + + References + ---------- + Wright & Nocedal, 'Numerical Optimization', 1999, p. 140. + + """ + opts = {'xtol': avextol, + 'eps': epsilon, + 'maxiter': maxiter, + 'disp': disp, + 'return_all': retall} + + callback = _wrap_callback(callback) + res = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p, + callback=callback, c1=c1, c2=c2, **opts) + + if full_output: + retlist = (res['x'], res['fun'], res['nfev'], res['njev'], + res['nhev'], res['status']) + if retall: + retlist += (res['allvecs'], ) + return retlist + else: + if retall: + return res['x'], res['allvecs'] + else: + return res['x'] + + +def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None, + callback=None, xtol=1e-5, eps=_epsilon, maxiter=None, + disp=False, return_all=False, c1=1e-4, c2=0.9, + **unknown_options): + """ + Minimization of scalar function of one or more variables using the + Newton-CG algorithm. + + Note that the `jac` parameter (Jacobian) is required. + + Options + ------- + disp : bool + Set to True to print convergence messages. + xtol : float + Average relative error in solution `xopt` acceptable for + convergence. + maxiter : int + Maximum number of iterations to perform. + eps : float or ndarray + If `hessp` is approximated, use this value for the step size. + return_all : bool, optional + Set to True to return a list of the best solution at each of the + iterations. + c1 : float, default: 1e-4 + Parameter for Armijo condition rule. + c2 : float, default: 0.9 + Parameter for curvature condition rule. + + Notes + ----- + Parameters `c1` and `c2` must satisfy ``0 < c1 < c2 < 1``. + """ + _check_unknown_options(unknown_options) + if jac is None: + raise ValueError('Jacobian is required for Newton-CG method') + fhess_p = hessp + fhess = hess + avextol = xtol + epsilon = eps + retall = return_all + + x0 = asarray(x0).flatten() + # TODO: add hessp (callable or FD) to ScalarFunction? + sf = _prepare_scalar_function( + fun, x0, jac, args=args, epsilon=eps, hess=hess + ) + f = sf.fun + fprime = sf.grad + _h = sf.hess(x0) + + # Logic for hess/hessp + # - If a callable(hess) is provided, then use that + # - If hess is a FD_METHOD, or the output from hess(x) is a LinearOperator + # then create a hessp function using those. + # - If hess is None but you have callable(hessp) then use the hessp. + # - If hess and hessp are None then approximate hessp using the grad/jac. + + if (hess in FD_METHODS or isinstance(_h, LinearOperator)): + fhess = None + + def _hessp(x, p, *args): + return sf.hess(x).dot(p) + + fhess_p = _hessp + + def terminate(warnflag, msg): + if disp: + _print_success_message_or_warn(warnflag, msg) + print(" Current function value: %f" % old_fval) + print(" Iterations: %d" % k) + print(" Function evaluations: %d" % sf.nfev) + print(" Gradient evaluations: %d" % sf.ngev) + print(" Hessian evaluations: %d" % hcalls) + fval = old_fval + result = OptimizeResult(fun=fval, jac=gfk, nfev=sf.nfev, + njev=sf.ngev, nhev=hcalls, status=warnflag, + success=(warnflag == 0), message=msg, x=xk, + nit=k) + if retall: + result['allvecs'] = allvecs + return result + + hcalls = 0 + if maxiter is None: + maxiter = len(x0)*200 + cg_maxiter = 20*len(x0) + + xtol = len(x0) * avextol + # Make sure we enter the while loop. + update_l1norm = np.finfo(float).max + xk = np.copy(x0) + if retall: + allvecs = [xk] + k = 0 + gfk = None + old_fval = f(x0) + old_old_fval = None + float64eps = np.finfo(np.float64).eps + while update_l1norm > xtol: + if k >= maxiter: + msg = "Warning: " + _status_message['maxiter'] + return terminate(1, msg) + # Compute a search direction pk by applying the CG method to + # del2 f(xk) p = - grad f(xk) starting from 0. + b = -fprime(xk) + maggrad = np.linalg.norm(b, ord=1) + eta = min(0.5, math.sqrt(maggrad)) + termcond = eta * maggrad + xsupi = zeros(len(x0), dtype=x0.dtype) + ri = -b + psupi = -ri + i = 0 + dri0 = np.dot(ri, ri) + + if fhess is not None: # you want to compute hessian once. + A = sf.hess(xk) + hcalls += 1 + + for k2 in range(cg_maxiter): + if np.add.reduce(np.abs(ri)) <= termcond: + break + if fhess is None: + if fhess_p is None: + Ap = approx_fhess_p(xk, psupi, fprime, epsilon) + else: + Ap = fhess_p(xk, psupi, *args) + hcalls += 1 + else: + # hess was supplied as a callable or hessian update strategy, so + # A is a dense numpy array or sparse matrix + Ap = A.dot(psupi) + # check curvature + Ap = asarray(Ap).squeeze() # get rid of matrices... + curv = np.dot(psupi, Ap) + if 0 <= curv <= 3 * float64eps: + break + elif curv < 0: + if (i > 0): + break + else: + # fall back to steepest descent direction + xsupi = dri0 / (-curv) * b + break + alphai = dri0 / curv + xsupi += alphai * psupi + ri += alphai * Ap + dri1 = np.dot(ri, ri) + betai = dri1 / dri0 + psupi = -ri + betai * psupi + i += 1 + dri0 = dri1 # update np.dot(ri,ri) for next time. + else: + # curvature keeps increasing, bail out + msg = ("Warning: CG iterations didn't converge. The Hessian is not " + "positive definite.") + return terminate(3, msg) + + pk = xsupi # search direction is solution to system. + gfk = -b # gradient at xk + + try: + alphak, fc, gc, old_fval, old_old_fval, gfkp1 = \ + _line_search_wolfe12(f, fprime, xk, pk, gfk, + old_fval, old_old_fval, c1=c1, c2=c2) + except _LineSearchError: + # Line search failed to find a better solution. + msg = "Warning: " + _status_message['pr_loss'] + return terminate(2, msg) + + update = alphak * pk + xk += update # upcast if necessary + if retall: + allvecs.append(xk) + k += 1 + intermediate_result = OptimizeResult(x=xk, fun=old_fval) + if _call_callback_maybe_halt(callback, intermediate_result): + return terminate(5, "") + update_l1norm = np.linalg.norm(update, ord=1) + + else: + if np.isnan(old_fval) or np.isnan(update_l1norm): + return terminate(3, _status_message['nan']) + + msg = _status_message['success'] + return terminate(0, msg) + + +def fminbound(func, x1, x2, args=(), xtol=1e-5, maxfun=500, + full_output=0, disp=1): + """Bounded minimization for scalar functions. + + Parameters + ---------- + func : callable f(x,*args) + Objective function to be minimized (must accept and return scalars). + x1, x2 : float or array scalar + Finite optimization bounds. + args : tuple, optional + Extra arguments passed to function. + xtol : float, optional + The convergence tolerance. + maxfun : int, optional + Maximum number of function evaluations allowed. + full_output : bool, optional + If True, return optional outputs. + disp : int, optional + If non-zero, print messages. + 0 : no message printing. + 1 : non-convergence notification messages only. + 2 : print a message on convergence too. + 3 : print iteration results. + + + Returns + ------- + xopt : ndarray + Parameters (over given interval) which minimize the + objective function. + fval : number + (Optional output) The function value evaluated at the minimizer. + ierr : int + (Optional output) An error flag (0 if converged, 1 if maximum number of + function calls reached). + numfunc : int + (Optional output) The number of function calls made. + + See also + -------- + minimize_scalar: Interface to minimization algorithms for scalar + univariate functions. See the 'Bounded' `method` in particular. + + Notes + ----- + Finds a local minimizer of the scalar function `func` in the + interval x1 < xopt < x2 using Brent's method. (See `brent` + for auto-bracketing.) + + References + ---------- + .. [1] Forsythe, G.E., M. A. Malcolm, and C. B. Moler. "Computer Methods + for Mathematical Computations." Prentice-Hall Series in Automatic + Computation 259 (1977). + .. [2] Brent, Richard P. Algorithms for Minimization Without Derivatives. + Courier Corporation, 2013. + + Examples + -------- + `fminbound` finds the minimizer of the function in the given range. + The following examples illustrate this. + + >>> from scipy import optimize + >>> def f(x): + ... return (x-1)**2 + >>> minimizer = optimize.fminbound(f, -4, 4) + >>> minimizer + 1.0 + >>> minimum = f(minimizer) + >>> minimum + 0.0 + >>> res = optimize.fminbound(f, 3, 4, full_output=True) + >>> minimizer, fval, ierr, numfunc = res + >>> minimizer + 3.000005960860986 + >>> minimum = f(minimizer) + >>> minimum, fval + (4.000023843479476, 4.000023843479476) + """ + options = {'xatol': xtol, + 'maxiter': maxfun, + 'disp': disp} + + res = _minimize_scalar_bounded(func, (x1, x2), args, **options) + if full_output: + return res['x'], res['fun'], res['status'], res['nfev'] + else: + return res['x'] + + +def _minimize_scalar_bounded(func, bounds, args=(), + xatol=1e-5, maxiter=500, disp=0, + **unknown_options): + """ + Options + ------- + maxiter : int + Maximum number of iterations to perform. + disp: int, optional + If non-zero, print messages. + 0 : no message printing. + 1 : non-convergence notification messages only. + 2 : print a message on convergence too. + 3 : print iteration results. + xatol : float + Absolute error in solution `xopt` acceptable for convergence. + + """ + _check_unknown_options(unknown_options) + maxfun = maxiter + # Test bounds are of correct form + if len(bounds) != 2: + raise ValueError('bounds must have two elements.') + x1, x2 = bounds + + if not (is_finite_scalar(x1) and is_finite_scalar(x2)): + raise ValueError("Optimization bounds must be finite scalars.") + + if x1 > x2: + raise ValueError("The lower bound exceeds the upper bound.") + + flag = 0 + header = ' Func-count x f(x) Procedure' + step = ' initial' + + sqrt_eps = sqrt(2.2e-16) + golden_mean = 0.5 * (3.0 - sqrt(5.0)) + a, b = x1, x2 + fulc = a + golden_mean * (b - a) + nfc, xf = fulc, fulc + rat = e = 0.0 + x = xf + fx = func(x, *args) + num = 1 + fmin_data = (1, xf, fx) + fu = np.inf + + ffulc = fnfc = fx + xm = 0.5 * (a + b) + tol1 = sqrt_eps * np.abs(xf) + xatol / 3.0 + tol2 = 2.0 * tol1 + + if disp > 2: + print(" ") + print(header) + print("%5.0f %12.6g %12.6g %s" % (fmin_data + (step,))) + + while (np.abs(xf - xm) > (tol2 - 0.5 * (b - a))): + golden = 1 + # Check for parabolic fit + if np.abs(e) > tol1: + golden = 0 + r = (xf - nfc) * (fx - ffulc) + q = (xf - fulc) * (fx - fnfc) + p = (xf - fulc) * q - (xf - nfc) * r + q = 2.0 * (q - r) + if q > 0.0: + p = -p + q = np.abs(q) + r = e + e = rat + + # Check for acceptability of parabola + if ((np.abs(p) < np.abs(0.5*q*r)) and (p > q*(a - xf)) and + (p < q * (b - xf))): + rat = (p + 0.0) / q + x = xf + rat + step = ' parabolic' + + if ((x - a) < tol2) or ((b - x) < tol2): + si = np.sign(xm - xf) + ((xm - xf) == 0) + rat = tol1 * si + else: # do a golden-section step + golden = 1 + + if golden: # do a golden-section step + if xf >= xm: + e = a - xf + else: + e = b - xf + rat = golden_mean*e + step = ' golden' + + si = np.sign(rat) + (rat == 0) + x = xf + si * np.maximum(np.abs(rat), tol1) + fu = func(x, *args) + num += 1 + fmin_data = (num, x, fu) + if disp > 2: + print("%5.0f %12.6g %12.6g %s" % (fmin_data + (step,))) + + if fu <= fx: + if x >= xf: + a = xf + else: + b = xf + fulc, ffulc = nfc, fnfc + nfc, fnfc = xf, fx + xf, fx = x, fu + else: + if x < xf: + a = x + else: + b = x + if (fu <= fnfc) or (nfc == xf): + fulc, ffulc = nfc, fnfc + nfc, fnfc = x, fu + elif (fu <= ffulc) or (fulc == xf) or (fulc == nfc): + fulc, ffulc = x, fu + + xm = 0.5 * (a + b) + tol1 = sqrt_eps * np.abs(xf) + xatol / 3.0 + tol2 = 2.0 * tol1 + + if num >= maxfun: + flag = 1 + break + + if np.isnan(xf) or np.isnan(fx) or np.isnan(fu): + flag = 2 + + fval = fx + if disp > 0: + _endprint(x, flag, fval, maxfun, xatol, disp) + + result = OptimizeResult(fun=fval, status=flag, success=(flag == 0), + message={0: 'Solution found.', + 1: 'Maximum number of function calls ' + 'reached.', + 2: _status_message['nan']}.get(flag, ''), + x=xf, nfev=num, nit=num) + + return result + + +class Brent: + #need to rethink design of __init__ + def __init__(self, func, args=(), tol=1.48e-8, maxiter=500, + full_output=0, disp=0): + self.func = func + self.args = args + self.tol = tol + self.maxiter = maxiter + self._mintol = 1.0e-11 + self._cg = 0.3819660 + self.xmin = None + self.fval = None + self.iter = 0 + self.funcalls = 0 + self.disp = disp + + # need to rethink design of set_bracket (new options, etc.) + def set_bracket(self, brack=None): + self.brack = brack + + def get_bracket_info(self): + #set up + func = self.func + args = self.args + brack = self.brack + ### BEGIN core bracket_info code ### + ### carefully DOCUMENT any CHANGES in core ## + if brack is None: + xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args) + elif len(brack) == 2: + xa, xb, xc, fa, fb, fc, funcalls = bracket(func, xa=brack[0], + xb=brack[1], args=args) + elif len(brack) == 3: + xa, xb, xc = brack + if (xa > xc): # swap so xa < xc can be assumed + xc, xa = xa, xc + if not ((xa < xb) and (xb < xc)): + raise ValueError( + "Bracketing values (xa, xb, xc) do not" + " fulfill this requirement: (xa < xb) and (xb < xc)" + ) + fa = func(*((xa,) + args)) + fb = func(*((xb,) + args)) + fc = func(*((xc,) + args)) + if not ((fb < fa) and (fb < fc)): + raise ValueError( + "Bracketing values (xa, xb, xc) do not fulfill" + " this requirement: (f(xb) < f(xa)) and (f(xb) < f(xc))" + ) + + funcalls = 3 + else: + raise ValueError("Bracketing interval must be " + "length 2 or 3 sequence.") + ### END core bracket_info code ### + + return xa, xb, xc, fa, fb, fc, funcalls + + def optimize(self): + # set up for optimization + func = self.func + xa, xb, xc, fa, fb, fc, funcalls = self.get_bracket_info() + _mintol = self._mintol + _cg = self._cg + ################################# + #BEGIN CORE ALGORITHM + ################################# + x = w = v = xb + fw = fv = fx = fb + if (xa < xc): + a = xa + b = xc + else: + a = xc + b = xa + deltax = 0.0 + iter = 0 + + if self.disp > 2: + print(" ") + print(f"{'Func-count':^12} {'x':^12} {'f(x)': ^12}") + print(f"{funcalls:^12g} {x:^12.6g} {fx:^12.6g}") + + while (iter < self.maxiter): + tol1 = self.tol * np.abs(x) + _mintol + tol2 = 2.0 * tol1 + xmid = 0.5 * (a + b) + # check for convergence + if np.abs(x - xmid) < (tol2 - 0.5 * (b - a)): + break + # XXX In the first iteration, rat is only bound in the true case + # of this conditional. This used to cause an UnboundLocalError + # (gh-4140). It should be set before the if (but to what?). + if (np.abs(deltax) <= tol1): + if (x >= xmid): + deltax = a - x # do a golden section step + else: + deltax = b - x + rat = _cg * deltax + else: # do a parabolic step + tmp1 = (x - w) * (fx - fv) + tmp2 = (x - v) * (fx - fw) + p = (x - v) * tmp2 - (x - w) * tmp1 + tmp2 = 2.0 * (tmp2 - tmp1) + if (tmp2 > 0.0): + p = -p + tmp2 = np.abs(tmp2) + dx_temp = deltax + deltax = rat + # check parabolic fit + if ((p > tmp2 * (a - x)) and (p < tmp2 * (b - x)) and + (np.abs(p) < np.abs(0.5 * tmp2 * dx_temp))): + rat = p * 1.0 / tmp2 # if parabolic step is useful. + u = x + rat + if ((u - a) < tol2 or (b - u) < tol2): + if xmid - x >= 0: + rat = tol1 + else: + rat = -tol1 + else: + if (x >= xmid): + deltax = a - x # if it's not do a golden section step + else: + deltax = b - x + rat = _cg * deltax + + if (np.abs(rat) < tol1): # update by at least tol1 + if rat >= 0: + u = x + tol1 + else: + u = x - tol1 + else: + u = x + rat + fu = func(*((u,) + self.args)) # calculate new output value + funcalls += 1 + + if (fu > fx): # if it's bigger than current + if (u < x): + a = u + else: + b = u + if (fu <= fw) or (w == x): + v = w + w = u + fv = fw + fw = fu + elif (fu <= fv) or (v == x) or (v == w): + v = u + fv = fu + else: + if (u >= x): + a = x + else: + b = x + v = w + w = x + x = u + fv = fw + fw = fx + fx = fu + + if self.disp > 2: + print(f"{funcalls:^12g} {x:^12.6g} {fx:^12.6g}") + + iter += 1 + ################################# + #END CORE ALGORITHM + ################################# + + self.xmin = x + self.fval = fx + self.iter = iter + self.funcalls = funcalls + + def get_result(self, full_output=False): + if full_output: + return self.xmin, self.fval, self.iter, self.funcalls + else: + return self.xmin + + +def brent(func, args=(), brack=None, tol=1.48e-8, full_output=0, maxiter=500): + """ + Given a function of one variable and a possible bracket, return + a local minimizer of the function isolated to a fractional precision + of tol. + + Parameters + ---------- + func : callable f(x,*args) + Objective function. + args : tuple, optional + Additional arguments (if present). + brack : tuple, optional + Either a triple ``(xa, xb, xc)`` satisfying ``xa < xb < xc`` and + ``func(xb) < func(xa) and func(xb) < func(xc)``, or a pair + ``(xa, xb)`` to be used as initial points for a downhill bracket search + (see `scipy.optimize.bracket`). + The minimizer ``x`` will not necessarily satisfy ``xa <= x <= xb``. + tol : float, optional + Relative error in solution `xopt` acceptable for convergence. + full_output : bool, optional + If True, return all output args (xmin, fval, iter, + funcalls). + maxiter : int, optional + Maximum number of iterations in solution. + + Returns + ------- + xmin : ndarray + Optimum point. + fval : float + (Optional output) Optimum function value. + iter : int + (Optional output) Number of iterations. + funcalls : int + (Optional output) Number of objective function evaluations made. + + See also + -------- + minimize_scalar: Interface to minimization algorithms for scalar + univariate functions. See the 'Brent' `method` in particular. + + Notes + ----- + Uses inverse parabolic interpolation when possible to speed up + convergence of golden section method. + + Does not ensure that the minimum lies in the range specified by + `brack`. See `scipy.optimize.fminbound`. + + Examples + -------- + We illustrate the behaviour of the function when `brack` is of + size 2 and 3 respectively. In the case where `brack` is of the + form ``(xa, xb)``, we can see for the given values, the output does + not necessarily lie in the range ``(xa, xb)``. + + >>> def f(x): + ... return (x-1)**2 + + >>> from scipy import optimize + + >>> minimizer = optimize.brent(f, brack=(1, 2)) + >>> minimizer + 1 + >>> res = optimize.brent(f, brack=(-1, 0.5, 2), full_output=True) + >>> xmin, fval, iter, funcalls = res + >>> f(xmin), fval + (0.0, 0.0) + + """ + options = {'xtol': tol, + 'maxiter': maxiter} + res = _minimize_scalar_brent(func, brack, args, **options) + if full_output: + return res['x'], res['fun'], res['nit'], res['nfev'] + else: + return res['x'] + + +def _minimize_scalar_brent(func, brack=None, args=(), xtol=1.48e-8, + maxiter=500, disp=0, + **unknown_options): + """ + Options + ------- + maxiter : int + Maximum number of iterations to perform. + xtol : float + Relative error in solution `xopt` acceptable for convergence. + disp: int, optional + If non-zero, print messages. + 0 : no message printing. + 1 : non-convergence notification messages only. + 2 : print a message on convergence too. + 3 : print iteration results. + Notes + ----- + Uses inverse parabolic interpolation when possible to speed up + convergence of golden section method. + + """ + _check_unknown_options(unknown_options) + tol = xtol + if tol < 0: + raise ValueError('tolerance should be >= 0, got %r' % tol) + + brent = Brent(func=func, args=args, tol=tol, + full_output=True, maxiter=maxiter, disp=disp) + brent.set_bracket(brack) + brent.optimize() + x, fval, nit, nfev = brent.get_result(full_output=True) + + success = nit < maxiter and not (np.isnan(x) or np.isnan(fval)) + + if success: + message = ("\nOptimization terminated successfully;\n" + "The returned value satisfies the termination criteria\n" + f"(using xtol = {xtol} )") + else: + if nit >= maxiter: + message = "\nMaximum number of iterations exceeded" + if np.isnan(x) or np.isnan(fval): + message = f"{_status_message['nan']}" + + if disp: + _print_success_message_or_warn(not success, message) + + return OptimizeResult(fun=fval, x=x, nit=nit, nfev=nfev, + success=success, message=message) + + +def golden(func, args=(), brack=None, tol=_epsilon, + full_output=0, maxiter=5000): + """ + Return the minimizer of a function of one variable using the golden section + method. + + Given a function of one variable and a possible bracketing interval, + return a minimizer of the function isolated to a fractional precision of + tol. + + Parameters + ---------- + func : callable func(x,*args) + Objective function to minimize. + args : tuple, optional + Additional arguments (if present), passed to func. + brack : tuple, optional + Either a triple ``(xa, xb, xc)`` where ``xa < xb < xc`` and + ``func(xb) < func(xa) and func(xb) < func(xc)``, or a pair (xa, xb) + to be used as initial points for a downhill bracket search (see + `scipy.optimize.bracket`). + The minimizer ``x`` will not necessarily satisfy ``xa <= x <= xb``. + tol : float, optional + x tolerance stop criterion + full_output : bool, optional + If True, return optional outputs. + maxiter : int + Maximum number of iterations to perform. + + Returns + ------- + xmin : ndarray + Optimum point. + fval : float + (Optional output) Optimum function value. + funcalls : int + (Optional output) Number of objective function evaluations made. + + See also + -------- + minimize_scalar: Interface to minimization algorithms for scalar + univariate functions. See the 'Golden' `method` in particular. + + Notes + ----- + Uses analog of bisection method to decrease the bracketed + interval. + + Examples + -------- + We illustrate the behaviour of the function when `brack` is of + size 2 and 3, respectively. In the case where `brack` is of the + form (xa,xb), we can see for the given values, the output need + not necessarily lie in the range ``(xa, xb)``. + + >>> def f(x): + ... return (x-1)**2 + + >>> from scipy import optimize + + >>> minimizer = optimize.golden(f, brack=(1, 2)) + >>> minimizer + 1 + >>> res = optimize.golden(f, brack=(-1, 0.5, 2), full_output=True) + >>> xmin, fval, funcalls = res + >>> f(xmin), fval + (9.925165290385052e-18, 9.925165290385052e-18) + + """ + options = {'xtol': tol, 'maxiter': maxiter} + res = _minimize_scalar_golden(func, brack, args, **options) + if full_output: + return res['x'], res['fun'], res['nfev'] + else: + return res['x'] + + +def _minimize_scalar_golden(func, brack=None, args=(), + xtol=_epsilon, maxiter=5000, disp=0, + **unknown_options): + """ + Options + ------- + xtol : float + Relative error in solution `xopt` acceptable for convergence. + maxiter : int + Maximum number of iterations to perform. + disp: int, optional + If non-zero, print messages. + 0 : no message printing. + 1 : non-convergence notification messages only. + 2 : print a message on convergence too. + 3 : print iteration results. + """ + _check_unknown_options(unknown_options) + tol = xtol + if brack is None: + xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args) + elif len(brack) == 2: + xa, xb, xc, fa, fb, fc, funcalls = bracket(func, xa=brack[0], + xb=brack[1], args=args) + elif len(brack) == 3: + xa, xb, xc = brack + if (xa > xc): # swap so xa < xc can be assumed + xc, xa = xa, xc + if not ((xa < xb) and (xb < xc)): + raise ValueError( + "Bracketing values (xa, xb, xc) do not" + " fulfill this requirement: (xa < xb) and (xb < xc)" + ) + fa = func(*((xa,) + args)) + fb = func(*((xb,) + args)) + fc = func(*((xc,) + args)) + if not ((fb < fa) and (fb < fc)): + raise ValueError( + "Bracketing values (xa, xb, xc) do not fulfill" + " this requirement: (f(xb) < f(xa)) and (f(xb) < f(xc))" + ) + funcalls = 3 + else: + raise ValueError("Bracketing interval must be length 2 or 3 sequence.") + + _gR = 0.61803399 # golden ratio conjugate: 2.0/(1.0+sqrt(5.0)) + _gC = 1.0 - _gR + x3 = xc + x0 = xa + if (np.abs(xc - xb) > np.abs(xb - xa)): + x1 = xb + x2 = xb + _gC * (xc - xb) + else: + x2 = xb + x1 = xb - _gC * (xb - xa) + f1 = func(*((x1,) + args)) + f2 = func(*((x2,) + args)) + funcalls += 2 + nit = 0 + + if disp > 2: + print(" ") + print(f"{'Func-count':^12} {'x':^12} {'f(x)': ^12}") + + for i in range(maxiter): + if np.abs(x3 - x0) <= tol * (np.abs(x1) + np.abs(x2)): + break + if (f2 < f1): + x0 = x1 + x1 = x2 + x2 = _gR * x1 + _gC * x3 + f1 = f2 + f2 = func(*((x2,) + args)) + else: + x3 = x2 + x2 = x1 + x1 = _gR * x2 + _gC * x0 + f2 = f1 + f1 = func(*((x1,) + args)) + funcalls += 1 + if disp > 2: + if (f1 < f2): + xmin, fval = x1, f1 + else: + xmin, fval = x2, f2 + print(f"{funcalls:^12g} {xmin:^12.6g} {fval:^12.6g}") + + nit += 1 + # end of iteration loop + + if (f1 < f2): + xmin = x1 + fval = f1 + else: + xmin = x2 + fval = f2 + + success = nit < maxiter and not (np.isnan(fval) or np.isnan(xmin)) + + if success: + message = ("\nOptimization terminated successfully;\n" + "The returned value satisfies the termination criteria\n" + f"(using xtol = {xtol} )") + else: + if nit >= maxiter: + message = "\nMaximum number of iterations exceeded" + if np.isnan(xmin) or np.isnan(fval): + message = f"{_status_message['nan']}" + + if disp: + _print_success_message_or_warn(not success, message) + + return OptimizeResult(fun=fval, nfev=funcalls, x=xmin, nit=nit, + success=success, message=message) + + +def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0, maxiter=1000): + """ + Bracket the minimum of a function. + + Given a function and distinct initial points, search in the + downhill direction (as defined by the initial points) and return + three points that bracket the minimum of the function. + + Parameters + ---------- + func : callable f(x,*args) + Objective function to minimize. + xa, xb : float, optional + Initial points. Defaults `xa` to 0.0, and `xb` to 1.0. + A local minimum need not be contained within this interval. + args : tuple, optional + Additional arguments (if present), passed to `func`. + grow_limit : float, optional + Maximum grow limit. Defaults to 110.0 + maxiter : int, optional + Maximum number of iterations to perform. Defaults to 1000. + + Returns + ------- + xa, xb, xc : float + Final points of the bracket. + fa, fb, fc : float + Objective function values at the bracket points. + funcalls : int + Number of function evaluations made. + + Raises + ------ + BracketError + If no valid bracket is found before the algorithm terminates. + See notes for conditions of a valid bracket. + + Notes + ----- + The algorithm attempts to find three strictly ordered points (i.e. + :math:`x_a < x_b < x_c` or :math:`x_c < x_b < x_a`) satisfying + :math:`f(x_b) ≤ f(x_a)` and :math:`f(x_b) ≤ f(x_c)`, where one of the + inequalities must be satistfied strictly and all :math:`x_i` must be + finite. + + Examples + -------- + This function can find a downward convex region of a function: + + >>> import numpy as np + >>> import matplotlib.pyplot as plt + >>> from scipy.optimize import bracket + >>> def f(x): + ... return 10*x**2 + 3*x + 5 + >>> x = np.linspace(-2, 2) + >>> y = f(x) + >>> init_xa, init_xb = 0.1, 1 + >>> xa, xb, xc, fa, fb, fc, funcalls = bracket(f, xa=init_xa, xb=init_xb) + >>> plt.axvline(x=init_xa, color="k", linestyle="--") + >>> plt.axvline(x=init_xb, color="k", linestyle="--") + >>> plt.plot(x, y, "-k") + >>> plt.plot(xa, fa, "bx") + >>> plt.plot(xb, fb, "rx") + >>> plt.plot(xc, fc, "bx") + >>> plt.show() + + Note that both initial points were to the right of the minimum, and the + third point was found in the "downhill" direction: the direction + in which the function appeared to be decreasing (to the left). + The final points are strictly ordered, and the function value + at the middle point is less than the function values at the endpoints; + it follows that a minimum must lie within the bracket. + + """ + _gold = 1.618034 # golden ratio: (1.0+sqrt(5.0))/2.0 + _verysmall_num = 1e-21 + # convert to numpy floats if not already + xa, xb = np.asarray([xa, xb]) + fa = func(*(xa,) + args) + fb = func(*(xb,) + args) + if (fa < fb): # Switch so fa > fb + xa, xb = xb, xa + fa, fb = fb, fa + xc = xb + _gold * (xb - xa) + fc = func(*((xc,) + args)) + funcalls = 3 + iter = 0 + while (fc < fb): + tmp1 = (xb - xa) * (fb - fc) + tmp2 = (xb - xc) * (fb - fa) + val = tmp2 - tmp1 + if np.abs(val) < _verysmall_num: + denom = 2.0 * _verysmall_num + else: + denom = 2.0 * val + w = xb - ((xb - xc) * tmp2 - (xb - xa) * tmp1) / denom + wlim = xb + grow_limit * (xc - xb) + msg = ("No valid bracket was found before the iteration limit was " + "reached. Consider trying different initial points or " + "increasing `maxiter`.") + if iter > maxiter: + raise RuntimeError(msg) + iter += 1 + if (w - xc) * (xb - w) > 0.0: + fw = func(*((w,) + args)) + funcalls += 1 + if (fw < fc): + xa = xb + xb = w + fa = fb + fb = fw + break + elif (fw > fb): + xc = w + fc = fw + break + w = xc + _gold * (xc - xb) + fw = func(*((w,) + args)) + funcalls += 1 + elif (w - wlim)*(wlim - xc) >= 0.0: + w = wlim + fw = func(*((w,) + args)) + funcalls += 1 + elif (w - wlim)*(xc - w) > 0.0: + fw = func(*((w,) + args)) + funcalls += 1 + if (fw < fc): + xb = xc + xc = w + w = xc + _gold * (xc - xb) + fb = fc + fc = fw + fw = func(*((w,) + args)) + funcalls += 1 + else: + w = xc + _gold * (xc - xb) + fw = func(*((w,) + args)) + funcalls += 1 + xa = xb + xb = xc + xc = w + fa = fb + fb = fc + fc = fw + + # three conditions for a valid bracket + cond1 = (fb < fc and fb <= fa) or (fb < fa and fb <= fc) + cond2 = (xa < xb < xc or xc < xb < xa) + cond3 = np.isfinite(xa) and np.isfinite(xb) and np.isfinite(xc) + msg = ("The algorithm terminated without finding a valid bracket. " + "Consider trying different initial points.") + if not (cond1 and cond2 and cond3): + e = BracketError(msg) + e.data = (xa, xb, xc, fa, fb, fc, funcalls) + raise e + + return xa, xb, xc, fa, fb, fc, funcalls + + +class BracketError(RuntimeError): + pass + + +def _recover_from_bracket_error(solver, fun, bracket, args, **options): + # `bracket` was originally written without checking whether the resulting + # bracket is valid. `brent` and `golden` built on top of it without + # checking the returned bracket for validity, and their output can be + # incorrect without warning/error if the original bracket is invalid. + # gh-14858 noticed the problem, and the following is the desired + # behavior: + # - `scipy.optimize.bracket`, `scipy.optimize.brent`, and + # `scipy.optimize.golden` should raise an error if the bracket is + # invalid, as opposed to silently returning garbage + # - `scipy.optimize.minimize_scalar` should return with `success=False` + # and other information + # The changes that would be required to achieve this the traditional + # way (`return`ing all the required information from bracket all the way + # up to `minimizer_scalar`) are extensive and invasive. (See a6aa40d.) + # We can achieve the same thing by raising the error in `bracket`, but + # storing the information needed by `minimize_scalar` in the error object, + # and intercepting it here. + try: + res = solver(fun, bracket, args, **options) + except BracketError as e: + msg = str(e) + xa, xb, xc, fa, fb, fc, funcalls = e.data + xs, fs = [xa, xb, xc], [fa, fb, fc] + if np.any(np.isnan([xs, fs])): + x, fun = np.nan, np.nan + else: + imin = np.argmin(fs) + x, fun = xs[imin], fs[imin] + return OptimizeResult(fun=fun, nfev=funcalls, x=x, + nit=0, success=False, message=msg) + return res + + +def _line_for_search(x0, alpha, lower_bound, upper_bound): + """ + Given a parameter vector ``x0`` with length ``n`` and a direction + vector ``alpha`` with length ``n``, and lower and upper bounds on + each of the ``n`` parameters, what are the bounds on a scalar + ``l`` such that ``lower_bound <= x0 + alpha * l <= upper_bound``. + + + Parameters + ---------- + x0 : np.array. + The vector representing the current location. + Note ``np.shape(x0) == (n,)``. + alpha : np.array. + The vector representing the direction. + Note ``np.shape(alpha) == (n,)``. + lower_bound : np.array. + The lower bounds for each parameter in ``x0``. If the ``i``th + parameter in ``x0`` is unbounded below, then ``lower_bound[i]`` + should be ``-np.inf``. + Note ``np.shape(lower_bound) == (n,)``. + upper_bound : np.array. + The upper bounds for each parameter in ``x0``. If the ``i``th + parameter in ``x0`` is unbounded above, then ``upper_bound[i]`` + should be ``np.inf``. + Note ``np.shape(upper_bound) == (n,)``. + + Returns + ------- + res : tuple ``(lmin, lmax)`` + The bounds for ``l`` such that + ``lower_bound[i] <= x0[i] + alpha[i] * l <= upper_bound[i]`` + for all ``i``. + + """ + # get nonzero indices of alpha so we don't get any zero division errors. + # alpha will not be all zero, since it is called from _linesearch_powell + # where we have a check for this. + nonzero, = alpha.nonzero() + lower_bound, upper_bound = lower_bound[nonzero], upper_bound[nonzero] + x0, alpha = x0[nonzero], alpha[nonzero] + low = (lower_bound - x0) / alpha + high = (upper_bound - x0) / alpha + + # positive and negative indices + pos = alpha > 0 + + lmin_pos = np.where(pos, low, 0) + lmin_neg = np.where(pos, 0, high) + lmax_pos = np.where(pos, high, 0) + lmax_neg = np.where(pos, 0, low) + + lmin = np.max(lmin_pos + lmin_neg) + lmax = np.min(lmax_pos + lmax_neg) + + # if x0 is outside the bounds, then it is possible that there is + # no way to get back in the bounds for the parameters being updated + # with the current direction alpha. + # when this happens, lmax < lmin. + # If this is the case, then we can just return (0, 0) + return (lmin, lmax) if lmax >= lmin else (0, 0) + + +def _linesearch_powell(func, p, xi, tol=1e-3, + lower_bound=None, upper_bound=None, fval=None): + """Line-search algorithm using fminbound. + + Find the minimum of the function ``func(x0 + alpha*direc)``. + + lower_bound : np.array. + The lower bounds for each parameter in ``x0``. If the ``i``th + parameter in ``x0`` is unbounded below, then ``lower_bound[i]`` + should be ``-np.inf``. + Note ``np.shape(lower_bound) == (n,)``. + upper_bound : np.array. + The upper bounds for each parameter in ``x0``. If the ``i``th + parameter in ``x0`` is unbounded above, then ``upper_bound[i]`` + should be ``np.inf``. + Note ``np.shape(upper_bound) == (n,)``. + fval : number. + ``fval`` is equal to ``func(p)``, the idea is just to avoid + recomputing it so we can limit the ``fevals``. + + """ + def myfunc(alpha): + return func(p + alpha*xi) + + # if xi is zero, then don't optimize + if not np.any(xi): + return ((fval, p, xi) if fval is not None else (func(p), p, xi)) + elif lower_bound is None and upper_bound is None: + # non-bounded minimization + res = _recover_from_bracket_error(_minimize_scalar_brent, + myfunc, None, tuple(), xtol=tol) + alpha_min, fret = res.x, res.fun + xi = alpha_min * xi + return squeeze(fret), p + xi, xi + else: + bound = _line_for_search(p, xi, lower_bound, upper_bound) + if np.isneginf(bound[0]) and np.isposinf(bound[1]): + # equivalent to unbounded + return _linesearch_powell(func, p, xi, fval=fval, tol=tol) + elif not np.isneginf(bound[0]) and not np.isposinf(bound[1]): + # we can use a bounded scalar minimization + res = _minimize_scalar_bounded(myfunc, bound, xatol=tol / 100) + xi = res.x * xi + return squeeze(res.fun), p + xi, xi + else: + # only bounded on one side. use the tangent function to convert + # the infinity bound to a finite bound. The new bounded region + # is a subregion of the region bounded by -np.pi/2 and np.pi/2. + bound = np.arctan(bound[0]), np.arctan(bound[1]) + res = _minimize_scalar_bounded( + lambda x: myfunc(np.tan(x)), + bound, + xatol=tol / 100) + xi = np.tan(res.x) * xi + return squeeze(res.fun), p + xi, xi + + +def fmin_powell(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None, + maxfun=None, full_output=0, disp=1, retall=0, callback=None, + direc=None): + """ + Minimize a function using modified Powell's method. + + This method only uses function values, not derivatives. + + Parameters + ---------- + func : callable f(x,*args) + Objective function to be minimized. + x0 : ndarray + Initial guess. + args : tuple, optional + Extra arguments passed to func. + xtol : float, optional + Line-search error tolerance. + ftol : float, optional + Relative error in ``func(xopt)`` acceptable for convergence. + maxiter : int, optional + Maximum number of iterations to perform. + maxfun : int, optional + Maximum number of function evaluations to make. + full_output : bool, optional + If True, ``fopt``, ``xi``, ``direc``, ``iter``, ``funcalls``, and + ``warnflag`` are returned. + disp : bool, optional + If True, print convergence messages. + retall : bool, optional + If True, return a list of the solution at each iteration. + callback : callable, optional + An optional user-supplied function, called after each + iteration. Called as ``callback(xk)``, where ``xk`` is the + current parameter vector. + direc : ndarray, optional + Initial fitting step and parameter order set as an (N, N) array, where N + is the number of fitting parameters in `x0`. Defaults to step size 1.0 + fitting all parameters simultaneously (``np.eye((N, N))``). To + prevent initial consideration of values in a step or to change initial + step size, set to 0 or desired step size in the Jth position in the Mth + block, where J is the position in `x0` and M is the desired evaluation + step, with steps being evaluated in index order. Step size and ordering + will change freely as minimization proceeds. + + Returns + ------- + xopt : ndarray + Parameter which minimizes `func`. + fopt : number + Value of function at minimum: ``fopt = func(xopt)``. + direc : ndarray + Current direction set. + iter : int + Number of iterations. + funcalls : int + Number of function calls made. + warnflag : int + Integer warning flag: + 1 : Maximum number of function evaluations. + 2 : Maximum number of iterations. + 3 : NaN result encountered. + 4 : The result is out of the provided bounds. + allvecs : list + List of solutions at each iteration. + + See also + -------- + minimize: Interface to unconstrained minimization algorithms for + multivariate functions. See the 'Powell' method in particular. + + Notes + ----- + Uses a modification of Powell's method to find the minimum of + a function of N variables. Powell's method is a conjugate + direction method. + + The algorithm has two loops. The outer loop merely iterates over the inner + loop. The inner loop minimizes over each current direction in the direction + set. At the end of the inner loop, if certain conditions are met, the + direction that gave the largest decrease is dropped and replaced with the + difference between the current estimated x and the estimated x from the + beginning of the inner-loop. + + The technical conditions for replacing the direction of greatest + increase amount to checking that + + 1. No further gain can be made along the direction of greatest increase + from that iteration. + 2. The direction of greatest increase accounted for a large sufficient + fraction of the decrease in the function value from that iteration of + the inner loop. + + References + ---------- + Powell M.J.D. (1964) An efficient method for finding the minimum of a + function of several variables without calculating derivatives, + Computer Journal, 7 (2):155-162. + + Press W., Teukolsky S.A., Vetterling W.T., and Flannery B.P.: + Numerical Recipes (any edition), Cambridge University Press + + Examples + -------- + >>> def f(x): + ... return x**2 + + >>> from scipy import optimize + + >>> minimum = optimize.fmin_powell(f, -1) + Optimization terminated successfully. + Current function value: 0.000000 + Iterations: 2 + Function evaluations: 16 + >>> minimum + array(0.0) + + """ + opts = {'xtol': xtol, + 'ftol': ftol, + 'maxiter': maxiter, + 'maxfev': maxfun, + 'disp': disp, + 'direc': direc, + 'return_all': retall} + + callback = _wrap_callback(callback) + res = _minimize_powell(func, x0, args, callback=callback, **opts) + + if full_output: + retlist = (res['x'], res['fun'], res['direc'], res['nit'], + res['nfev'], res['status']) + if retall: + retlist += (res['allvecs'], ) + return retlist + else: + if retall: + return res['x'], res['allvecs'] + else: + return res['x'] + + +def _minimize_powell(func, x0, args=(), callback=None, bounds=None, + xtol=1e-4, ftol=1e-4, maxiter=None, maxfev=None, + disp=False, direc=None, return_all=False, + **unknown_options): + """ + Minimization of scalar function of one or more variables using the + modified Powell algorithm. + + Parameters + ---------- + fun : callable + The objective function to be minimized. + + ``fun(x, *args) -> float`` + + where ``x`` is a 1-D array with shape (n,) and ``args`` + is a tuple of the fixed parameters needed to completely + specify the function. + x0 : ndarray, shape (n,) + Initial guess. Array of real elements of size (n,), + where ``n`` is the number of independent variables. + args : tuple, optional + Extra arguments passed to the objective function and its + derivatives (`fun`, `jac` and `hess` functions). + method : str or callable, optional + The present documentation is specific to ``method='powell'``, but other + options are available. See documentation for `scipy.optimize.minimize`. + bounds : sequence or `Bounds`, optional + Bounds on decision variables. There are two ways to specify the bounds: + + 1. Instance of `Bounds` class. + 2. Sequence of ``(min, max)`` pairs for each element in `x`. None + is used to specify no bound. + + If bounds are not provided, then an unbounded line search will be used. + If bounds are provided and the initial guess is within the bounds, then + every function evaluation throughout the minimization procedure will be + within the bounds. If bounds are provided, the initial guess is outside + the bounds, and `direc` is full rank (or left to default), then some + function evaluations during the first iteration may be outside the + bounds, but every function evaluation after the first iteration will be + within the bounds. If `direc` is not full rank, then some parameters + may not be optimized and the solution is not guaranteed to be within + the bounds. + + options : dict, optional + A dictionary of solver options. All methods accept the following + generic options: + + maxiter : int + Maximum number of iterations to perform. Depending on the + method each iteration may use several function evaluations. + disp : bool + Set to True to print convergence messages. + + See method-specific options for ``method='powell'`` below. + callback : callable, optional + Called after each iteration. The signature is: + + ``callback(xk)`` + + where ``xk`` is the current parameter vector. + + Returns + ------- + res : OptimizeResult + The optimization result represented as a ``OptimizeResult`` object. + Important attributes are: ``x`` the solution array, ``success`` a + Boolean flag indicating if the optimizer exited successfully and + ``message`` which describes the cause of the termination. See + `OptimizeResult` for a description of other attributes. + + Options + ------- + disp : bool + Set to True to print convergence messages. + xtol : float + Relative error in solution `xopt` acceptable for convergence. + ftol : float + Relative error in ``fun(xopt)`` acceptable for convergence. + maxiter, maxfev : int + Maximum allowed number of iterations and function evaluations. + Will default to ``N*1000``, where ``N`` is the number of + variables, if neither `maxiter` or `maxfev` is set. If both + `maxiter` and `maxfev` are set, minimization will stop at the + first reached. + direc : ndarray + Initial set of direction vectors for the Powell method. + return_all : bool, optional + Set to True to return a list of the best solution at each of the + iterations. + """ + _check_unknown_options(unknown_options) + maxfun = maxfev + retall = return_all + + x = asarray(x0).flatten() + if retall: + allvecs = [x] + N = len(x) + # If neither are set, then set both to default + if maxiter is None and maxfun is None: + maxiter = N * 1000 + maxfun = N * 1000 + elif maxiter is None: + # Convert remaining Nones, to np.inf, unless the other is np.inf, in + # which case use the default to avoid unbounded iteration + if maxfun == np.inf: + maxiter = N * 1000 + else: + maxiter = np.inf + elif maxfun is None: + if maxiter == np.inf: + maxfun = N * 1000 + else: + maxfun = np.inf + + # we need to use a mutable object here that we can update in the + # wrapper function + fcalls, func = _wrap_scalar_function_maxfun_validation(func, args, maxfun) + + if direc is None: + direc = eye(N, dtype=float) + else: + direc = asarray(direc, dtype=float) + if np.linalg.matrix_rank(direc) != direc.shape[0]: + warnings.warn("direc input is not full rank, some parameters may " + "not be optimized", + OptimizeWarning, stacklevel=3) + + if bounds is None: + # don't make these arrays of all +/- inf. because + # _linesearch_powell will do an unnecessary check of all the elements. + # just keep them None, _linesearch_powell will not have to check + # all the elements. + lower_bound, upper_bound = None, None + else: + # bounds is standardized in _minimize.py. + lower_bound, upper_bound = bounds.lb, bounds.ub + if np.any(lower_bound > x0) or np.any(x0 > upper_bound): + warnings.warn("Initial guess is not within the specified bounds", + OptimizeWarning, stacklevel=3) + + fval = squeeze(func(x)) + x1 = x.copy() + iter = 0 + while True: + try: + fx = fval + bigind = 0 + delta = 0.0 + for i in range(N): + direc1 = direc[i] + fx2 = fval + fval, x, direc1 = _linesearch_powell(func, x, direc1, + tol=xtol * 100, + lower_bound=lower_bound, + upper_bound=upper_bound, + fval=fval) + if (fx2 - fval) > delta: + delta = fx2 - fval + bigind = i + iter += 1 + if retall: + allvecs.append(x) + intermediate_result = OptimizeResult(x=x, fun=fval) + if _call_callback_maybe_halt(callback, intermediate_result): + break + bnd = ftol * (np.abs(fx) + np.abs(fval)) + 1e-20 + if 2.0 * (fx - fval) <= bnd: + break + if fcalls[0] >= maxfun: + break + if iter >= maxiter: + break + if np.isnan(fx) and np.isnan(fval): + # Ended up in a nan-region: bail out + break + + # Construct the extrapolated point + direc1 = x - x1 + x1 = x.copy() + # make sure that we don't go outside the bounds when extrapolating + if lower_bound is None and upper_bound is None: + lmax = 1 + else: + _, lmax = _line_for_search(x, direc1, lower_bound, upper_bound) + x2 = x + min(lmax, 1) * direc1 + fx2 = squeeze(func(x2)) + + if (fx > fx2): + t = 2.0*(fx + fx2 - 2.0*fval) + temp = (fx - fval - delta) + t *= temp*temp + temp = fx - fx2 + t -= delta*temp*temp + if t < 0.0: + fval, x, direc1 = _linesearch_powell( + func, x, direc1, + tol=xtol * 100, + lower_bound=lower_bound, + upper_bound=upper_bound, + fval=fval + ) + if np.any(direc1): + direc[bigind] = direc[-1] + direc[-1] = direc1 + except _MaxFuncCallError: + break + + warnflag = 0 + msg = _status_message['success'] + # out of bounds is more urgent than exceeding function evals or iters, + # but I don't want to cause inconsistencies by changing the + # established warning flags for maxfev and maxiter, so the out of bounds + # warning flag becomes 3, but is checked for first. + if bounds and (np.any(lower_bound > x) or np.any(x > upper_bound)): + warnflag = 4 + msg = _status_message['out_of_bounds'] + elif fcalls[0] >= maxfun: + warnflag = 1 + msg = _status_message['maxfev'] + elif iter >= maxiter: + warnflag = 2 + msg = _status_message['maxiter'] + elif np.isnan(fval) or np.isnan(x).any(): + warnflag = 3 + msg = _status_message['nan'] + + if disp: + _print_success_message_or_warn(warnflag, msg, RuntimeWarning) + print(" Current function value: %f" % fval) + print(" Iterations: %d" % iter) + print(" Function evaluations: %d" % fcalls[0]) + + result = OptimizeResult(fun=fval, direc=direc, nit=iter, nfev=fcalls[0], + status=warnflag, success=(warnflag == 0), + message=msg, x=x) + if retall: + result['allvecs'] = allvecs + return result + + +def _endprint(x, flag, fval, maxfun, xtol, disp): + if flag == 0: + if disp > 1: + print("\nOptimization terminated successfully;\n" + "The returned value satisfies the termination criteria\n" + "(using xtol = ", xtol, ")") + return + + if flag == 1: + msg = ("\nMaximum number of function evaluations exceeded --- " + "increase maxfun argument.\n") + elif flag == 2: + msg = "\n{}".format(_status_message['nan']) + + _print_success_message_or_warn(flag, msg) + return + + +def brute(func, ranges, args=(), Ns=20, full_output=0, finish=fmin, + disp=False, workers=1): + """Minimize a function over a given range by brute force. + + Uses the "brute force" method, i.e., computes the function's value + at each point of a multidimensional grid of points, to find the global + minimum of the function. + + The function is evaluated everywhere in the range with the datatype of the + first call to the function, as enforced by the ``vectorize`` NumPy + function. The value and type of the function evaluation returned when + ``full_output=True`` are affected in addition by the ``finish`` argument + (see Notes). + + The brute force approach is inefficient because the number of grid points + increases exponentially - the number of grid points to evaluate is + ``Ns ** len(x)``. Consequently, even with coarse grid spacing, even + moderately sized problems can take a long time to run, and/or run into + memory limitations. + + Parameters + ---------- + func : callable + The objective function to be minimized. Must be in the + form ``f(x, *args)``, where ``x`` is the argument in + the form of a 1-D array and ``args`` is a tuple of any + additional fixed parameters needed to completely specify + the function. + ranges : tuple + Each component of the `ranges` tuple must be either a + "slice object" or a range tuple of the form ``(low, high)``. + The program uses these to create the grid of points on which + the objective function will be computed. See `Note 2` for + more detail. + args : tuple, optional + Any additional fixed parameters needed to completely specify + the function. + Ns : int, optional + Number of grid points along the axes, if not otherwise + specified. See `Note2`. + full_output : bool, optional + If True, return the evaluation grid and the objective function's + values on it. + finish : callable, optional + An optimization function that is called with the result of brute force + minimization as initial guess. `finish` should take `func` and + the initial guess as positional arguments, and take `args` as + keyword arguments. It may additionally take `full_output` + and/or `disp` as keyword arguments. Use None if no "polishing" + function is to be used. See Notes for more details. + disp : bool, optional + Set to True to print convergence messages from the `finish` callable. + workers : int or map-like callable, optional + If `workers` is an int the grid is subdivided into `workers` + sections and evaluated in parallel (uses + `multiprocessing.Pool `). + Supply `-1` to use all cores available to the Process. + Alternatively supply a map-like callable, such as + `multiprocessing.Pool.map` for evaluating the grid in parallel. + This evaluation is carried out as ``workers(func, iterable)``. + Requires that `func` be pickleable. + + .. versionadded:: 1.3.0 + + Returns + ------- + x0 : ndarray + A 1-D array containing the coordinates of a point at which the + objective function had its minimum value. (See `Note 1` for + which point is returned.) + fval : float + Function value at the point `x0`. (Returned when `full_output` is + True.) + grid : tuple + Representation of the evaluation grid. It has the same + length as `x0`. (Returned when `full_output` is True.) + Jout : ndarray + Function values at each point of the evaluation + grid, i.e., ``Jout = func(*grid)``. (Returned + when `full_output` is True.) + + See Also + -------- + basinhopping, differential_evolution + + Notes + ----- + *Note 1*: The program finds the gridpoint at which the lowest value + of the objective function occurs. If `finish` is None, that is the + point returned. When the global minimum occurs within (or not very far + outside) the grid's boundaries, and the grid is fine enough, that + point will be in the neighborhood of the global minimum. + + However, users often employ some other optimization program to + "polish" the gridpoint values, i.e., to seek a more precise + (local) minimum near `brute's` best gridpoint. + The `brute` function's `finish` option provides a convenient way to do + that. Any polishing program used must take `brute's` output as its + initial guess as a positional argument, and take `brute's` input values + for `args` as keyword arguments, otherwise an error will be raised. + It may additionally take `full_output` and/or `disp` as keyword arguments. + + `brute` assumes that the `finish` function returns either an + `OptimizeResult` object or a tuple in the form: + ``(xmin, Jmin, ... , statuscode)``, where ``xmin`` is the minimizing + value of the argument, ``Jmin`` is the minimum value of the objective + function, "..." may be some other returned values (which are not used + by `brute`), and ``statuscode`` is the status code of the `finish` program. + + Note that when `finish` is not None, the values returned are those + of the `finish` program, *not* the gridpoint ones. Consequently, + while `brute` confines its search to the input grid points, + the `finish` program's results usually will not coincide with any + gridpoint, and may fall outside the grid's boundary. Thus, if a + minimum only needs to be found over the provided grid points, make + sure to pass in `finish=None`. + + *Note 2*: The grid of points is a `numpy.mgrid` object. + For `brute` the `ranges` and `Ns` inputs have the following effect. + Each component of the `ranges` tuple can be either a slice object or a + two-tuple giving a range of values, such as (0, 5). If the component is a + slice object, `brute` uses it directly. If the component is a two-tuple + range, `brute` internally converts it to a slice object that interpolates + `Ns` points from its low-value to its high-value, inclusive. + + Examples + -------- + We illustrate the use of `brute` to seek the global minimum of a function + of two variables that is given as the sum of a positive-definite + quadratic and two deep "Gaussian-shaped" craters. Specifically, define + the objective function `f` as the sum of three other functions, + ``f = f1 + f2 + f3``. We suppose each of these has a signature + ``(z, *params)``, where ``z = (x, y)``, and ``params`` and the functions + are as defined below. + + >>> import numpy as np + >>> params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5) + >>> def f1(z, *params): + ... x, y = z + ... a, b, c, d, e, f, g, h, i, j, k, l, scale = params + ... return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f) + + >>> def f2(z, *params): + ... x, y = z + ... a, b, c, d, e, f, g, h, i, j, k, l, scale = params + ... return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale)) + + >>> def f3(z, *params): + ... x, y = z + ... a, b, c, d, e, f, g, h, i, j, k, l, scale = params + ... return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale)) + + >>> def f(z, *params): + ... return f1(z, *params) + f2(z, *params) + f3(z, *params) + + Thus, the objective function may have local minima near the minimum + of each of the three functions of which it is composed. To + use `fmin` to polish its gridpoint result, we may then continue as + follows: + + >>> rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25)) + >>> from scipy import optimize + >>> resbrute = optimize.brute(f, rranges, args=params, full_output=True, + ... finish=optimize.fmin) + >>> resbrute[0] # global minimum + array([-1.05665192, 1.80834843]) + >>> resbrute[1] # function value at global minimum + -3.4085818767 + + Note that if `finish` had been set to None, we would have gotten the + gridpoint [-1.0 1.75] where the rounded function value is -2.892. + + """ + N = len(ranges) + if N > 40: + raise ValueError("Brute Force not possible with more " + "than 40 variables.") + lrange = list(ranges) + for k in range(N): + if not isinstance(lrange[k], slice): + if len(lrange[k]) < 3: + lrange[k] = tuple(lrange[k]) + (complex(Ns),) + lrange[k] = slice(*lrange[k]) + if (N == 1): + lrange = lrange[0] + + grid = np.mgrid[lrange] + + # obtain an array of parameters that is iterable by a map-like callable + inpt_shape = grid.shape + if (N > 1): + grid = np.reshape(grid, (inpt_shape[0], np.prod(inpt_shape[1:]))).T + + if not np.iterable(args): + args = (args,) + + wrapped_func = _Brute_Wrapper(func, args) + + # iterate over input arrays, possibly in parallel + with MapWrapper(pool=workers) as mapper: + Jout = np.array(list(mapper(wrapped_func, grid))) + if (N == 1): + grid = (grid,) + Jout = np.squeeze(Jout) + elif (N > 1): + Jout = np.reshape(Jout, inpt_shape[1:]) + grid = np.reshape(grid.T, inpt_shape) + + Nshape = shape(Jout) + + indx = argmin(Jout.ravel(), axis=-1) + Nindx = np.empty(N, int) + xmin = np.empty(N, float) + for k in range(N - 1, -1, -1): + thisN = Nshape[k] + Nindx[k] = indx % Nshape[k] + indx = indx // thisN + for k in range(N): + xmin[k] = grid[k][tuple(Nindx)] + + Jmin = Jout[tuple(Nindx)] + if (N == 1): + grid = grid[0] + xmin = xmin[0] + + if callable(finish): + # set up kwargs for `finish` function + finish_args = _getfullargspec(finish).args + finish_kwargs = dict() + if 'full_output' in finish_args: + finish_kwargs['full_output'] = 1 + if 'disp' in finish_args: + finish_kwargs['disp'] = disp + elif 'options' in finish_args: + # pass 'disp' as `options` + # (e.g., if `finish` is `minimize`) + finish_kwargs['options'] = {'disp': disp} + + # run minimizer + res = finish(func, xmin, args=args, **finish_kwargs) + + if isinstance(res, OptimizeResult): + xmin = res.x + Jmin = res.fun + success = res.success + else: + xmin = res[0] + Jmin = res[1] + success = res[-1] == 0 + if not success: + if disp: + warnings.warn("Either final optimization did not succeed or `finish` " + "does not return `statuscode` as its last argument.", + RuntimeWarning, stacklevel=2) + + if full_output: + return xmin, Jmin, grid, Jout + else: + return xmin + + +class _Brute_Wrapper: + """ + Object to wrap user cost function for optimize.brute, allowing picklability + """ + + def __init__(self, f, args): + self.f = f + self.args = [] if args is None else args + + def __call__(self, x): + # flatten needed for one dimensional case. + return self.f(np.asarray(x).flatten(), *self.args) + + +def show_options(solver=None, method=None, disp=True): + """ + Show documentation for additional options of optimization solvers. + + These are method-specific options that can be supplied through the + ``options`` dict. + + Parameters + ---------- + solver : str + Type of optimization solver. One of 'minimize', 'minimize_scalar', + 'root', 'root_scalar', 'linprog', or 'quadratic_assignment'. + method : str, optional + If not given, shows all methods of the specified solver. Otherwise, + show only the options for the specified method. Valid values + corresponds to methods' names of respective solver (e.g., 'BFGS' for + 'minimize'). + disp : bool, optional + Whether to print the result rather than returning it. + + Returns + ------- + text + Either None (for disp=True) or the text string (disp=False) + + Notes + ----- + The solver-specific methods are: + + `scipy.optimize.minimize` + + - :ref:`Nelder-Mead ` + - :ref:`Powell ` + - :ref:`CG ` + - :ref:`BFGS ` + - :ref:`Newton-CG ` + - :ref:`L-BFGS-B ` + - :ref:`TNC ` + - :ref:`COBYLA ` + - :ref:`COBYQA ` + - :ref:`SLSQP ` + - :ref:`dogleg ` + - :ref:`trust-ncg ` + + `scipy.optimize.root` + + - :ref:`hybr ` + - :ref:`lm ` + - :ref:`broyden1 ` + - :ref:`broyden2 ` + - :ref:`anderson ` + - :ref:`linearmixing ` + - :ref:`diagbroyden ` + - :ref:`excitingmixing ` + - :ref:`krylov ` + - :ref:`df-sane ` + + `scipy.optimize.minimize_scalar` + + - :ref:`brent ` + - :ref:`golden ` + - :ref:`bounded ` + + `scipy.optimize.root_scalar` + + - :ref:`bisect ` + - :ref:`brentq ` + - :ref:`brenth ` + - :ref:`ridder ` + - :ref:`toms748 ` + - :ref:`newton ` + - :ref:`secant ` + - :ref:`halley ` + + `scipy.optimize.linprog` + + - :ref:`simplex ` + - :ref:`interior-point ` + - :ref:`revised simplex ` + - :ref:`highs ` + - :ref:`highs-ds ` + - :ref:`highs-ipm ` + + `scipy.optimize.quadratic_assignment` + + - :ref:`faq ` + - :ref:`2opt ` + + Examples + -------- + We can print documentations of a solver in stdout: + + >>> from scipy.optimize import show_options + >>> show_options(solver="minimize") + ... + + Specifying a method is possible: + + >>> show_options(solver="minimize", method="Nelder-Mead") + ... + + We can also get the documentations as a string: + + >>> show_options(solver="minimize", method="Nelder-Mead", disp=False) + Minimization of scalar function of one or more variables using the ... + + """ + import textwrap + + doc_routines = { + 'minimize': ( + ('bfgs', 'scipy.optimize._optimize._minimize_bfgs'), + ('cg', 'scipy.optimize._optimize._minimize_cg'), + ('cobyla', 'scipy.optimize._cobyla_py._minimize_cobyla'), + ('cobyqa', 'scipy.optimize._cobyqa_py._minimize_cobyqa'), + ('dogleg', 'scipy.optimize._trustregion_dogleg._minimize_dogleg'), + ('l-bfgs-b', 'scipy.optimize._lbfgsb_py._minimize_lbfgsb'), + ('nelder-mead', 'scipy.optimize._optimize._minimize_neldermead'), + ('newton-cg', 'scipy.optimize._optimize._minimize_newtoncg'), + ('powell', 'scipy.optimize._optimize._minimize_powell'), + ('slsqp', 'scipy.optimize._slsqp_py._minimize_slsqp'), + ('tnc', 'scipy.optimize._tnc._minimize_tnc'), + ('trust-ncg', + 'scipy.optimize._trustregion_ncg._minimize_trust_ncg'), + ('trust-constr', + 'scipy.optimize._trustregion_constr.' + '_minimize_trustregion_constr'), + ('trust-exact', + 'scipy.optimize._trustregion_exact._minimize_trustregion_exact'), + ('trust-krylov', + 'scipy.optimize._trustregion_krylov._minimize_trust_krylov'), + ), + 'root': ( + ('hybr', 'scipy.optimize._minpack_py._root_hybr'), + ('lm', 'scipy.optimize._root._root_leastsq'), + ('broyden1', 'scipy.optimize._root._root_broyden1_doc'), + ('broyden2', 'scipy.optimize._root._root_broyden2_doc'), + ('anderson', 'scipy.optimize._root._root_anderson_doc'), + ('diagbroyden', 'scipy.optimize._root._root_diagbroyden_doc'), + ('excitingmixing', 'scipy.optimize._root._root_excitingmixing_doc'), + ('linearmixing', 'scipy.optimize._root._root_linearmixing_doc'), + ('krylov', 'scipy.optimize._root._root_krylov_doc'), + ('df-sane', 'scipy.optimize._spectral._root_df_sane'), + ), + 'root_scalar': ( + ('bisect', 'scipy.optimize._root_scalar._root_scalar_bisect_doc'), + ('brentq', 'scipy.optimize._root_scalar._root_scalar_brentq_doc'), + ('brenth', 'scipy.optimize._root_scalar._root_scalar_brenth_doc'), + ('ridder', 'scipy.optimize._root_scalar._root_scalar_ridder_doc'), + ('toms748', 'scipy.optimize._root_scalar._root_scalar_toms748_doc'), + ('secant', 'scipy.optimize._root_scalar._root_scalar_secant_doc'), + ('newton', 'scipy.optimize._root_scalar._root_scalar_newton_doc'), + ('halley', 'scipy.optimize._root_scalar._root_scalar_halley_doc'), + ), + 'linprog': ( + ('simplex', 'scipy.optimize._linprog._linprog_simplex_doc'), + ('interior-point', 'scipy.optimize._linprog._linprog_ip_doc'), + ('revised simplex', 'scipy.optimize._linprog._linprog_rs_doc'), + ('highs-ipm', 'scipy.optimize._linprog._linprog_highs_ipm_doc'), + ('highs-ds', 'scipy.optimize._linprog._linprog_highs_ds_doc'), + ('highs', 'scipy.optimize._linprog._linprog_highs_doc'), + ), + 'quadratic_assignment': ( + ('faq', 'scipy.optimize._qap._quadratic_assignment_faq'), + ('2opt', 'scipy.optimize._qap._quadratic_assignment_2opt'), + ), + 'minimize_scalar': ( + ('brent', 'scipy.optimize._optimize._minimize_scalar_brent'), + ('bounded', 'scipy.optimize._optimize._minimize_scalar_bounded'), + ('golden', 'scipy.optimize._optimize._minimize_scalar_golden'), + ), + } + + if solver is None: + text = ["\n\n\n========\n", "minimize\n", "========\n"] + text.append(show_options('minimize', disp=False)) + text.extend(["\n\n===============\n", "minimize_scalar\n", + "===============\n"]) + text.append(show_options('minimize_scalar', disp=False)) + text.extend(["\n\n\n====\n", "root\n", + "====\n"]) + text.append(show_options('root', disp=False)) + text.extend(['\n\n\n=======\n', 'linprog\n', + '=======\n']) + text.append(show_options('linprog', disp=False)) + text = "".join(text) + else: + solver = solver.lower() + if solver not in doc_routines: + raise ValueError(f'Unknown solver {solver!r}') + + if method is None: + text = [] + for name, _ in doc_routines[solver]: + text.extend(["\n\n" + name, "\n" + "="*len(name) + "\n\n"]) + text.append(show_options(solver, name, disp=False)) + text = "".join(text) + else: + method = method.lower() + methods = dict(doc_routines[solver]) + if method not in methods: + raise ValueError(f"Unknown method {method!r}") + name = methods[method] + + # Import function object + parts = name.split('.') + mod_name = ".".join(parts[:-1]) + __import__(mod_name) + obj = getattr(sys.modules[mod_name], parts[-1]) + + # Get doc + doc = obj.__doc__ + if doc is not None: + text = textwrap.dedent(doc).strip() + else: + text = "" + + if disp: + print(text) + return + else: + return text diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_slsqp.cpython-310-x86_64-linux-gnu.so b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_slsqp.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..4ce2e585afa3c7b8ad9e88af2ad2a5dcd73d11fc Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_slsqp.cpython-310-x86_64-linux-gnu.so differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_spectral.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_spectral.py new file mode 100644 index 0000000000000000000000000000000000000000..5ff5bef0283b2d6b6c018c1c8b98cd46a335d7cb --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_spectral.py @@ -0,0 +1,260 @@ +""" +Spectral Algorithm for Nonlinear Equations +""" +import collections + +import numpy as np +from scipy.optimize import OptimizeResult +from scipy.optimize._optimize import _check_unknown_options +from ._linesearch import _nonmonotone_line_search_cruz, _nonmonotone_line_search_cheng + +class _NoConvergence(Exception): + pass + + +def _root_df_sane(func, x0, args=(), ftol=1e-8, fatol=1e-300, maxfev=1000, + fnorm=None, callback=None, disp=False, M=10, eta_strategy=None, + sigma_eps=1e-10, sigma_0=1.0, line_search='cruz', **unknown_options): + r""" + Solve nonlinear equation with the DF-SANE method + + Options + ------- + ftol : float, optional + Relative norm tolerance. + fatol : float, optional + Absolute norm tolerance. + Algorithm terminates when ``||func(x)|| < fatol + ftol ||func(x_0)||``. + fnorm : callable, optional + Norm to use in the convergence check. If None, 2-norm is used. + maxfev : int, optional + Maximum number of function evaluations. + disp : bool, optional + Whether to print convergence process to stdout. + eta_strategy : callable, optional + Choice of the ``eta_k`` parameter, which gives slack for growth + of ``||F||**2``. Called as ``eta_k = eta_strategy(k, x, F)`` with + `k` the iteration number, `x` the current iterate and `F` the current + residual. Should satisfy ``eta_k > 0`` and ``sum(eta, k=0..inf) < inf``. + Default: ``||F||**2 / (1 + k)**2``. + sigma_eps : float, optional + The spectral coefficient is constrained to ``sigma_eps < sigma < 1/sigma_eps``. + Default: 1e-10 + sigma_0 : float, optional + Initial spectral coefficient. + Default: 1.0 + M : int, optional + Number of iterates to include in the nonmonotonic line search. + Default: 10 + line_search : {'cruz', 'cheng'} + Type of line search to employ. 'cruz' is the original one defined in + [Martinez & Raydan. Math. Comp. 75, 1429 (2006)], 'cheng' is + a modified search defined in [Cheng & Li. IMA J. Numer. Anal. 29, 814 (2009)]. + Default: 'cruz' + + References + ---------- + .. [1] "Spectral residual method without gradient information for solving + large-scale nonlinear systems of equations." W. La Cruz, + J.M. Martinez, M. Raydan. Math. Comp. **75**, 1429 (2006). + .. [2] W. La Cruz, Opt. Meth. Software, 29, 24 (2014). + .. [3] W. Cheng, D.-H. Li. IMA J. Numer. Anal. **29**, 814 (2009). + + """ + _check_unknown_options(unknown_options) + + if line_search not in ('cheng', 'cruz'): + raise ValueError(f"Invalid value {line_search!r} for 'line_search'") + + nexp = 2 + + if eta_strategy is None: + # Different choice from [1], as their eta is not invariant + # vs. scaling of F. + def eta_strategy(k, x, F): + # Obtain squared 2-norm of the initial residual from the outer scope + return f_0 / (1 + k)**2 + + if fnorm is None: + def fnorm(F): + # Obtain squared 2-norm of the current residual from the outer scope + return f_k**(1.0/nexp) + + def fmerit(F): + return np.linalg.norm(F)**nexp + + nfev = [0] + f, x_k, x_shape, f_k, F_k, is_complex = _wrap_func(func, x0, fmerit, + nfev, maxfev, args) + + k = 0 + f_0 = f_k + sigma_k = sigma_0 + + F_0_norm = fnorm(F_k) + + # For the 'cruz' line search + prev_fs = collections.deque([f_k], M) + + # For the 'cheng' line search + Q = 1.0 + C = f_0 + + converged = False + message = "too many function evaluations required" + + while True: + F_k_norm = fnorm(F_k) + + if disp: + print("iter %d: ||F|| = %g, sigma = %g" % (k, F_k_norm, sigma_k)) + + if callback is not None: + callback(x_k, F_k) + + if F_k_norm < ftol * F_0_norm + fatol: + # Converged! + message = "successful convergence" + converged = True + break + + # Control spectral parameter, from [2] + if abs(sigma_k) > 1/sigma_eps: + sigma_k = 1/sigma_eps * np.sign(sigma_k) + elif abs(sigma_k) < sigma_eps: + sigma_k = sigma_eps + + # Line search direction + d = -sigma_k * F_k + + # Nonmonotone line search + eta = eta_strategy(k, x_k, F_k) + try: + if line_search == 'cruz': + alpha, xp, fp, Fp = _nonmonotone_line_search_cruz(f, x_k, d, prev_fs, + eta=eta) + elif line_search == 'cheng': + alpha, xp, fp, Fp, C, Q = _nonmonotone_line_search_cheng(f, x_k, d, f_k, + C, Q, eta=eta) + except _NoConvergence: + break + + # Update spectral parameter + s_k = xp - x_k + y_k = Fp - F_k + sigma_k = np.vdot(s_k, s_k) / np.vdot(s_k, y_k) + + # Take step + x_k = xp + F_k = Fp + f_k = fp + + # Store function value + if line_search == 'cruz': + prev_fs.append(fp) + + k += 1 + + x = _wrap_result(x_k, is_complex, shape=x_shape) + F = _wrap_result(F_k, is_complex) + + result = OptimizeResult(x=x, success=converged, + message=message, + fun=F, nfev=nfev[0], nit=k, method="df-sane") + + return result + + +def _wrap_func(func, x0, fmerit, nfev_list, maxfev, args=()): + """ + Wrap a function and an initial value so that (i) complex values + are wrapped to reals, and (ii) value for a merit function + fmerit(x, f) is computed at the same time, (iii) iteration count + is maintained and an exception is raised if it is exceeded. + + Parameters + ---------- + func : callable + Function to wrap + x0 : ndarray + Initial value + fmerit : callable + Merit function fmerit(f) for computing merit value from residual. + nfev_list : list + List to store number of evaluations in. Should be [0] in the beginning. + maxfev : int + Maximum number of evaluations before _NoConvergence is raised. + args : tuple + Extra arguments to func + + Returns + ------- + wrap_func : callable + Wrapped function, to be called as + ``F, fp = wrap_func(x0)`` + x0_wrap : ndarray of float + Wrapped initial value; raveled to 1-D and complex + values mapped to reals. + x0_shape : tuple + Shape of the initial value array + f : float + Merit function at F + F : ndarray of float + Residual at x0_wrap + is_complex : bool + Whether complex values were mapped to reals + + """ + x0 = np.asarray(x0) + x0_shape = x0.shape + F = np.asarray(func(x0, *args)).ravel() + is_complex = np.iscomplexobj(x0) or np.iscomplexobj(F) + x0 = x0.ravel() + + nfev_list[0] = 1 + + if is_complex: + def wrap_func(x): + if nfev_list[0] >= maxfev: + raise _NoConvergence() + nfev_list[0] += 1 + z = _real2complex(x).reshape(x0_shape) + v = np.asarray(func(z, *args)).ravel() + F = _complex2real(v) + f = fmerit(F) + return f, F + + x0 = _complex2real(x0) + F = _complex2real(F) + else: + def wrap_func(x): + if nfev_list[0] >= maxfev: + raise _NoConvergence() + nfev_list[0] += 1 + x = x.reshape(x0_shape) + F = np.asarray(func(x, *args)).ravel() + f = fmerit(F) + return f, F + + return wrap_func, x0, x0_shape, fmerit(F), F, is_complex + + +def _wrap_result(result, is_complex, shape=None): + """ + Convert from real to complex and reshape result arrays. + """ + if is_complex: + z = _real2complex(result) + else: + z = result + if shape is not None: + z = z.reshape(shape) + return z + + +def _real2complex(x): + return np.ascontiguousarray(x, dtype=float).view(np.complex128) + + +def _complex2real(z): + return np.ascontiguousarray(z, dtype=complex).view(np.float64) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tnc.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tnc.py new file mode 100644 index 0000000000000000000000000000000000000000..0f0b3be740368eb759d608b541930dbb88ec042b --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tnc.py @@ -0,0 +1,430 @@ +# TNC Python interface +# @(#) $Jeannot: tnc.py,v 1.11 2005/01/28 18:27:31 js Exp $ + +# Copyright (c) 2004-2005, Jean-Sebastien Roy (js@jeannot.org) + +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: + +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +TNC: A Python interface to the TNC non-linear optimizer + +TNC is a non-linear optimizer. To use it, you must provide a function to +minimize. The function must take one argument: the list of coordinates where to +evaluate the function; and it must return either a tuple, whose first element is the +value of the function, and whose second argument is the gradient of the function +(as a list of values); or None, to abort the minimization. +""" + +from scipy.optimize import _moduleTNC as moduleTNC +from ._optimize import (MemoizeJac, OptimizeResult, _check_unknown_options, + _prepare_scalar_function) +from ._constraints import old_bound_to_new +from scipy._lib._array_api import atleast_nd, array_namespace + +from numpy import inf, array, zeros + +__all__ = ['fmin_tnc'] + + +MSG_NONE = 0 # No messages +MSG_ITER = 1 # One line per iteration +MSG_INFO = 2 # Informational messages +MSG_VERS = 4 # Version info +MSG_EXIT = 8 # Exit reasons +MSG_ALL = MSG_ITER + MSG_INFO + MSG_VERS + MSG_EXIT + +MSGS = { + MSG_NONE: "No messages", + MSG_ITER: "One line per iteration", + MSG_INFO: "Informational messages", + MSG_VERS: "Version info", + MSG_EXIT: "Exit reasons", + MSG_ALL: "All messages" +} + +INFEASIBLE = -1 # Infeasible (lower bound > upper bound) +LOCALMINIMUM = 0 # Local minimum reached (|pg| ~= 0) +FCONVERGED = 1 # Converged (|f_n-f_(n-1)| ~= 0) +XCONVERGED = 2 # Converged (|x_n-x_(n-1)| ~= 0) +MAXFUN = 3 # Max. number of function evaluations reached +LSFAIL = 4 # Linear search failed +CONSTANT = 5 # All lower bounds are equal to the upper bounds +NOPROGRESS = 6 # Unable to progress +USERABORT = 7 # User requested end of minimization + +RCSTRINGS = { + INFEASIBLE: "Infeasible (lower bound > upper bound)", + LOCALMINIMUM: "Local minimum reached (|pg| ~= 0)", + FCONVERGED: "Converged (|f_n-f_(n-1)| ~= 0)", + XCONVERGED: "Converged (|x_n-x_(n-1)| ~= 0)", + MAXFUN: "Max. number of function evaluations reached", + LSFAIL: "Linear search failed", + CONSTANT: "All lower bounds are equal to the upper bounds", + NOPROGRESS: "Unable to progress", + USERABORT: "User requested end of minimization" +} + +# Changes to interface made by Travis Oliphant, Apr. 2004 for inclusion in +# SciPy + + +def fmin_tnc(func, x0, fprime=None, args=(), approx_grad=0, + bounds=None, epsilon=1e-8, scale=None, offset=None, + messages=MSG_ALL, maxCGit=-1, maxfun=None, eta=-1, + stepmx=0, accuracy=0, fmin=0, ftol=-1, xtol=-1, pgtol=-1, + rescale=-1, disp=None, callback=None): + """ + Minimize a function with variables subject to bounds, using + gradient information in a truncated Newton algorithm. This + method wraps a C implementation of the algorithm. + + Parameters + ---------- + func : callable ``func(x, *args)`` + Function to minimize. Must do one of: + + 1. Return f and g, where f is the value of the function and g its + gradient (a list of floats). + + 2. Return the function value but supply gradient function + separately as `fprime`. + + 3. Return the function value and set ``approx_grad=True``. + + If the function returns None, the minimization + is aborted. + x0 : array_like + Initial estimate of minimum. + fprime : callable ``fprime(x, *args)``, optional + Gradient of `func`. If None, then either `func` must return the + function value and the gradient (``f,g = func(x, *args)``) + or `approx_grad` must be True. + args : tuple, optional + Arguments to pass to function. + approx_grad : bool, optional + If true, approximate the gradient numerically. + bounds : list, optional + (min, max) pairs for each element in x0, defining the + bounds on that parameter. Use None or +/-inf for one of + min or max when there is no bound in that direction. + epsilon : float, optional + Used if approx_grad is True. The stepsize in a finite + difference approximation for fprime. + scale : array_like, optional + Scaling factors to apply to each variable. If None, the + factors are up-low for interval bounded variables and + 1+|x| for the others. Defaults to None. + offset : array_like, optional + Value to subtract from each variable. If None, the + offsets are (up+low)/2 for interval bounded variables + and x for the others. + messages : int, optional + Bit mask used to select messages display during + minimization values defined in the MSGS dict. Defaults to + MGS_ALL. + disp : int, optional + Integer interface to messages. 0 = no message, 5 = all messages + maxCGit : int, optional + Maximum number of hessian*vector evaluations per main + iteration. If maxCGit == 0, the direction chosen is + -gradient if maxCGit < 0, maxCGit is set to + max(1,min(50,n/2)). Defaults to -1. + maxfun : int, optional + Maximum number of function evaluation. If None, maxfun is + set to max(100, 10*len(x0)). Defaults to None. Note that this function + may violate the limit because of evaluating gradients by numerical + differentiation. + eta : float, optional + Severity of the line search. If < 0 or > 1, set to 0.25. + Defaults to -1. + stepmx : float, optional + Maximum step for the line search. May be increased during + call. If too small, it will be set to 10.0. Defaults to 0. + accuracy : float, optional + Relative precision for finite difference calculations. If + <= machine_precision, set to sqrt(machine_precision). + Defaults to 0. + fmin : float, optional + Minimum function value estimate. Defaults to 0. + ftol : float, optional + Precision goal for the value of f in the stopping criterion. + If ftol < 0.0, ftol is set to 0.0 defaults to -1. + xtol : float, optional + Precision goal for the value of x in the stopping + criterion (after applying x scaling factors). If xtol < + 0.0, xtol is set to sqrt(machine_precision). Defaults to + -1. + pgtol : float, optional + Precision goal for the value of the projected gradient in + the stopping criterion (after applying x scaling factors). + If pgtol < 0.0, pgtol is set to 1e-2 * sqrt(accuracy). + Setting it to 0.0 is not recommended. Defaults to -1. + rescale : float, optional + Scaling factor (in log10) used to trigger f value + rescaling. If 0, rescale at each iteration. If a large + value, never rescale. If < 0, rescale is set to 1.3. + callback : callable, optional + Called after each iteration, as callback(xk), where xk is the + current parameter vector. + + Returns + ------- + x : ndarray + The solution. + nfeval : int + The number of function evaluations. + rc : int + Return code, see below + + See also + -------- + minimize: Interface to minimization algorithms for multivariate + functions. See the 'TNC' `method` in particular. + + Notes + ----- + The underlying algorithm is truncated Newton, also called + Newton Conjugate-Gradient. This method differs from + scipy.optimize.fmin_ncg in that + + 1. it wraps a C implementation of the algorithm + 2. it allows each variable to be given an upper and lower bound. + + The algorithm incorporates the bound constraints by determining + the descent direction as in an unconstrained truncated Newton, + but never taking a step-size large enough to leave the space + of feasible x's. The algorithm keeps track of a set of + currently active constraints, and ignores them when computing + the minimum allowable step size. (The x's associated with the + active constraint are kept fixed.) If the maximum allowable + step size is zero then a new constraint is added. At the end + of each iteration one of the constraints may be deemed no + longer active and removed. A constraint is considered + no longer active is if it is currently active + but the gradient for that variable points inward from the + constraint. The specific constraint removed is the one + associated with the variable of largest index whose + constraint is no longer active. + + Return codes are defined as follows:: + + -1 : Infeasible (lower bound > upper bound) + 0 : Local minimum reached (|pg| ~= 0) + 1 : Converged (|f_n-f_(n-1)| ~= 0) + 2 : Converged (|x_n-x_(n-1)| ~= 0) + 3 : Max. number of function evaluations reached + 4 : Linear search failed + 5 : All lower bounds are equal to the upper bounds + 6 : Unable to progress + 7 : User requested end of minimization + + References + ---------- + Wright S., Nocedal J. (2006), 'Numerical Optimization' + + Nash S.G. (1984), "Newton-Type Minimization Via the Lanczos Method", + SIAM Journal of Numerical Analysis 21, pp. 770-778 + + """ + # handle fprime/approx_grad + if approx_grad: + fun = func + jac = None + elif fprime is None: + fun = MemoizeJac(func) + jac = fun.derivative + else: + fun = func + jac = fprime + + if disp is not None: # disp takes precedence over messages + mesg_num = disp + else: + mesg_num = {0:MSG_NONE, 1:MSG_ITER, 2:MSG_INFO, 3:MSG_VERS, + 4:MSG_EXIT, 5:MSG_ALL}.get(messages, MSG_ALL) + # build options + opts = {'eps': epsilon, + 'scale': scale, + 'offset': offset, + 'mesg_num': mesg_num, + 'maxCGit': maxCGit, + 'maxfun': maxfun, + 'eta': eta, + 'stepmx': stepmx, + 'accuracy': accuracy, + 'minfev': fmin, + 'ftol': ftol, + 'xtol': xtol, + 'gtol': pgtol, + 'rescale': rescale, + 'disp': False} + + res = _minimize_tnc(fun, x0, args, jac, bounds, callback=callback, **opts) + + return res['x'], res['nfev'], res['status'] + + +def _minimize_tnc(fun, x0, args=(), jac=None, bounds=None, + eps=1e-8, scale=None, offset=None, mesg_num=None, + maxCGit=-1, eta=-1, stepmx=0, accuracy=0, + minfev=0, ftol=-1, xtol=-1, gtol=-1, rescale=-1, disp=False, + callback=None, finite_diff_rel_step=None, maxfun=None, + **unknown_options): + """ + Minimize a scalar function of one or more variables using a truncated + Newton (TNC) algorithm. + + Options + ------- + eps : float or ndarray + If `jac is None` the absolute step size used for numerical + approximation of the jacobian via forward differences. + scale : list of floats + Scaling factors to apply to each variable. If None, the + factors are up-low for interval bounded variables and + 1+|x] for the others. Defaults to None. + offset : float + Value to subtract from each variable. If None, the + offsets are (up+low)/2 for interval bounded variables + and x for the others. + disp : bool + Set to True to print convergence messages. + maxCGit : int + Maximum number of hessian*vector evaluations per main + iteration. If maxCGit == 0, the direction chosen is + -gradient if maxCGit < 0, maxCGit is set to + max(1,min(50,n/2)). Defaults to -1. + eta : float + Severity of the line search. If < 0 or > 1, set to 0.25. + Defaults to -1. + stepmx : float + Maximum step for the line search. May be increased during + call. If too small, it will be set to 10.0. Defaults to 0. + accuracy : float + Relative precision for finite difference calculations. If + <= machine_precision, set to sqrt(machine_precision). + Defaults to 0. + minfev : float + Minimum function value estimate. Defaults to 0. + ftol : float + Precision goal for the value of f in the stopping criterion. + If ftol < 0.0, ftol is set to 0.0 defaults to -1. + xtol : float + Precision goal for the value of x in the stopping + criterion (after applying x scaling factors). If xtol < + 0.0, xtol is set to sqrt(machine_precision). Defaults to + -1. + gtol : float + Precision goal for the value of the projected gradient in + the stopping criterion (after applying x scaling factors). + If gtol < 0.0, gtol is set to 1e-2 * sqrt(accuracy). + Setting it to 0.0 is not recommended. Defaults to -1. + rescale : float + Scaling factor (in log10) used to trigger f value + rescaling. If 0, rescale at each iteration. If a large + value, never rescale. If < 0, rescale is set to 1.3. + finite_diff_rel_step : None or array_like, optional + If `jac in ['2-point', '3-point', 'cs']` the relative step size to + use for numerical approximation of the jacobian. The absolute step + size is computed as ``h = rel_step * sign(x) * max(1, abs(x))``, + possibly adjusted to fit into the bounds. For ``method='3-point'`` + the sign of `h` is ignored. If None (default) then step is selected + automatically. + maxfun : int + Maximum number of function evaluations. If None, `maxfun` is + set to max(100, 10*len(x0)). Defaults to None. + """ + _check_unknown_options(unknown_options) + fmin = minfev + pgtol = gtol + + xp = array_namespace(x0) + x0 = atleast_nd(x0, ndim=1, xp=xp) + dtype = xp.float64 + if xp.isdtype(x0.dtype, "real floating"): + dtype = x0.dtype + x0 = xp.reshape(xp.astype(x0, dtype), -1) + + n = len(x0) + + if bounds is None: + bounds = [(None,None)] * n + if len(bounds) != n: + raise ValueError('length of x0 != length of bounds') + new_bounds = old_bound_to_new(bounds) + + if mesg_num is not None: + messages = {0:MSG_NONE, 1:MSG_ITER, 2:MSG_INFO, 3:MSG_VERS, + 4:MSG_EXIT, 5:MSG_ALL}.get(mesg_num, MSG_ALL) + elif disp: + messages = MSG_ALL + else: + messages = MSG_NONE + + sf = _prepare_scalar_function(fun, x0, jac=jac, args=args, epsilon=eps, + finite_diff_rel_step=finite_diff_rel_step, + bounds=new_bounds) + func_and_grad = sf.fun_and_grad + + """ + low, up : the bounds (lists of floats) + if low is None, the lower bounds are removed. + if up is None, the upper bounds are removed. + low and up defaults to None + """ + low = zeros(n) + up = zeros(n) + for i in range(n): + if bounds[i] is None: + l, u = -inf, inf + else: + l,u = bounds[i] + if l is None: + low[i] = -inf + else: + low[i] = l + if u is None: + up[i] = inf + else: + up[i] = u + + if scale is None: + scale = array([]) + + if offset is None: + offset = array([]) + + if maxfun is None: + maxfun = max(100, 10*len(x0)) + + rc, nf, nit, x, funv, jacv = moduleTNC.tnc_minimize( + func_and_grad, x0, low, up, scale, + offset, messages, maxCGit, maxfun, + eta, stepmx, accuracy, fmin, ftol, + xtol, pgtol, rescale, callback + ) + # the TNC documentation states: "On output, x, f and g may be very + # slightly out of sync because of scaling". Therefore re-evaluate + # func_and_grad so they are synced. + funv, jacv = func_and_grad(x) + + return OptimizeResult(x=x, fun=funv, jac=jacv, nfev=sf.nfev, + nit=nit, status=rc, message=RCSTRINGS[rc], + success=(-1 < rc < 3)) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..549cfb9760dda474cb858b7b36d236af48111067 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/__init__.py @@ -0,0 +1,6 @@ +"""This module contains the equality constrained SQP solver.""" + + +from .minimize_trustregion_constr import _minimize_trustregion_constr + +__all__ = ['_minimize_trustregion_constr'] diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/canonical_constraint.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/canonical_constraint.py new file mode 100644 index 0000000000000000000000000000000000000000..e1ad583bb8eee524d35c2e5bb16934f78629cd69 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/canonical_constraint.py @@ -0,0 +1,390 @@ +import numpy as np +import scipy.sparse as sps + + +class CanonicalConstraint: + """Canonical constraint to use with trust-constr algorithm. + + It represents the set of constraints of the form:: + + f_eq(x) = 0 + f_ineq(x) <= 0 + + where ``f_eq`` and ``f_ineq`` are evaluated by a single function, see + below. + + The class is supposed to be instantiated by factory methods, which + should prepare the parameters listed below. + + Parameters + ---------- + n_eq, n_ineq : int + Number of equality and inequality constraints respectively. + fun : callable + Function defining the constraints. The signature is + ``fun(x) -> c_eq, c_ineq``, where ``c_eq`` is ndarray with `n_eq` + components and ``c_ineq`` is ndarray with `n_ineq` components. + jac : callable + Function to evaluate the Jacobian of the constraint. The signature + is ``jac(x) -> J_eq, J_ineq``, where ``J_eq`` and ``J_ineq`` are + either ndarray of csr_matrix of shapes (n_eq, n) and (n_ineq, n), + respectively. + hess : callable + Function to evaluate the Hessian of the constraints multiplied + by Lagrange multipliers, that is + ``dot(f_eq, v_eq) + dot(f_ineq, v_ineq)``. The signature is + ``hess(x, v_eq, v_ineq) -> H``, where ``H`` has an implied + shape (n, n) and provide a matrix-vector product operation + ``H.dot(p)``. + keep_feasible : ndarray, shape (n_ineq,) + Mask indicating which inequality constraints should be kept feasible. + """ + def __init__(self, n_eq, n_ineq, fun, jac, hess, keep_feasible): + self.n_eq = n_eq + self.n_ineq = n_ineq + self.fun = fun + self.jac = jac + self.hess = hess + self.keep_feasible = keep_feasible + + @classmethod + def from_PreparedConstraint(cls, constraint): + """Create an instance from `PreparedConstrained` object.""" + lb, ub = constraint.bounds + cfun = constraint.fun + keep_feasible = constraint.keep_feasible + + if np.all(lb == -np.inf) and np.all(ub == np.inf): + return cls.empty(cfun.n) + + if np.all(lb == -np.inf) and np.all(ub == np.inf): + return cls.empty(cfun.n) + elif np.all(lb == ub): + return cls._equal_to_canonical(cfun, lb) + elif np.all(lb == -np.inf): + return cls._less_to_canonical(cfun, ub, keep_feasible) + elif np.all(ub == np.inf): + return cls._greater_to_canonical(cfun, lb, keep_feasible) + else: + return cls._interval_to_canonical(cfun, lb, ub, keep_feasible) + + @classmethod + def empty(cls, n): + """Create an "empty" instance. + + This "empty" instance is required to allow working with unconstrained + problems as if they have some constraints. + """ + empty_fun = np.empty(0) + empty_jac = np.empty((0, n)) + empty_hess = sps.csr_matrix((n, n)) + + def fun(x): + return empty_fun, empty_fun + + def jac(x): + return empty_jac, empty_jac + + def hess(x, v_eq, v_ineq): + return empty_hess + + return cls(0, 0, fun, jac, hess, np.empty(0, dtype=np.bool_)) + + @classmethod + def concatenate(cls, canonical_constraints, sparse_jacobian): + """Concatenate multiple `CanonicalConstraint` into one. + + `sparse_jacobian` (bool) determines the Jacobian format of the + concatenated constraint. Note that items in `canonical_constraints` + must have their Jacobians in the same format. + """ + def fun(x): + if canonical_constraints: + eq_all, ineq_all = zip( + *[c.fun(x) for c in canonical_constraints]) + else: + eq_all, ineq_all = [], [] + + return np.hstack(eq_all), np.hstack(ineq_all) + + if sparse_jacobian: + vstack = sps.vstack + else: + vstack = np.vstack + + def jac(x): + if canonical_constraints: + eq_all, ineq_all = zip( + *[c.jac(x) for c in canonical_constraints]) + else: + eq_all, ineq_all = [], [] + + return vstack(eq_all), vstack(ineq_all) + + def hess(x, v_eq, v_ineq): + hess_all = [] + index_eq = 0 + index_ineq = 0 + for c in canonical_constraints: + vc_eq = v_eq[index_eq:index_eq + c.n_eq] + vc_ineq = v_ineq[index_ineq:index_ineq + c.n_ineq] + hess_all.append(c.hess(x, vc_eq, vc_ineq)) + index_eq += c.n_eq + index_ineq += c.n_ineq + + def matvec(p): + result = np.zeros_like(p) + for h in hess_all: + result += h.dot(p) + return result + + n = x.shape[0] + return sps.linalg.LinearOperator((n, n), matvec, dtype=float) + + n_eq = sum(c.n_eq for c in canonical_constraints) + n_ineq = sum(c.n_ineq for c in canonical_constraints) + keep_feasible = np.hstack([c.keep_feasible for c in + canonical_constraints]) + + return cls(n_eq, n_ineq, fun, jac, hess, keep_feasible) + + @classmethod + def _equal_to_canonical(cls, cfun, value): + empty_fun = np.empty(0) + n = cfun.n + + n_eq = value.shape[0] + n_ineq = 0 + keep_feasible = np.empty(0, dtype=bool) + + if cfun.sparse_jacobian: + empty_jac = sps.csr_matrix((0, n)) + else: + empty_jac = np.empty((0, n)) + + def fun(x): + return cfun.fun(x) - value, empty_fun + + def jac(x): + return cfun.jac(x), empty_jac + + def hess(x, v_eq, v_ineq): + return cfun.hess(x, v_eq) + + empty_fun = np.empty(0) + n = cfun.n + if cfun.sparse_jacobian: + empty_jac = sps.csr_matrix((0, n)) + else: + empty_jac = np.empty((0, n)) + + return cls(n_eq, n_ineq, fun, jac, hess, keep_feasible) + + @classmethod + def _less_to_canonical(cls, cfun, ub, keep_feasible): + empty_fun = np.empty(0) + n = cfun.n + if cfun.sparse_jacobian: + empty_jac = sps.csr_matrix((0, n)) + else: + empty_jac = np.empty((0, n)) + + finite_ub = ub < np.inf + n_eq = 0 + n_ineq = np.sum(finite_ub) + + if np.all(finite_ub): + def fun(x): + return empty_fun, cfun.fun(x) - ub + + def jac(x): + return empty_jac, cfun.jac(x) + + def hess(x, v_eq, v_ineq): + return cfun.hess(x, v_ineq) + else: + finite_ub = np.nonzero(finite_ub)[0] + keep_feasible = keep_feasible[finite_ub] + ub = ub[finite_ub] + + def fun(x): + return empty_fun, cfun.fun(x)[finite_ub] - ub + + def jac(x): + return empty_jac, cfun.jac(x)[finite_ub] + + def hess(x, v_eq, v_ineq): + v = np.zeros(cfun.m) + v[finite_ub] = v_ineq + return cfun.hess(x, v) + + return cls(n_eq, n_ineq, fun, jac, hess, keep_feasible) + + @classmethod + def _greater_to_canonical(cls, cfun, lb, keep_feasible): + empty_fun = np.empty(0) + n = cfun.n + if cfun.sparse_jacobian: + empty_jac = sps.csr_matrix((0, n)) + else: + empty_jac = np.empty((0, n)) + + finite_lb = lb > -np.inf + n_eq = 0 + n_ineq = np.sum(finite_lb) + + if np.all(finite_lb): + def fun(x): + return empty_fun, lb - cfun.fun(x) + + def jac(x): + return empty_jac, -cfun.jac(x) + + def hess(x, v_eq, v_ineq): + return cfun.hess(x, -v_ineq) + else: + finite_lb = np.nonzero(finite_lb)[0] + keep_feasible = keep_feasible[finite_lb] + lb = lb[finite_lb] + + def fun(x): + return empty_fun, lb - cfun.fun(x)[finite_lb] + + def jac(x): + return empty_jac, -cfun.jac(x)[finite_lb] + + def hess(x, v_eq, v_ineq): + v = np.zeros(cfun.m) + v[finite_lb] = -v_ineq + return cfun.hess(x, v) + + return cls(n_eq, n_ineq, fun, jac, hess, keep_feasible) + + @classmethod + def _interval_to_canonical(cls, cfun, lb, ub, keep_feasible): + lb_inf = lb == -np.inf + ub_inf = ub == np.inf + equal = lb == ub + less = lb_inf & ~ub_inf + greater = ub_inf & ~lb_inf + interval = ~equal & ~lb_inf & ~ub_inf + + equal = np.nonzero(equal)[0] + less = np.nonzero(less)[0] + greater = np.nonzero(greater)[0] + interval = np.nonzero(interval)[0] + n_less = less.shape[0] + n_greater = greater.shape[0] + n_interval = interval.shape[0] + n_ineq = n_less + n_greater + 2 * n_interval + n_eq = equal.shape[0] + + keep_feasible = np.hstack((keep_feasible[less], + keep_feasible[greater], + keep_feasible[interval], + keep_feasible[interval])) + + def fun(x): + f = cfun.fun(x) + eq = f[equal] - lb[equal] + le = f[less] - ub[less] + ge = lb[greater] - f[greater] + il = f[interval] - ub[interval] + ig = lb[interval] - f[interval] + return eq, np.hstack((le, ge, il, ig)) + + def jac(x): + J = cfun.jac(x) + eq = J[equal] + le = J[less] + ge = -J[greater] + il = J[interval] + ig = -il + if sps.issparse(J): + ineq = sps.vstack((le, ge, il, ig)) + else: + ineq = np.vstack((le, ge, il, ig)) + return eq, ineq + + def hess(x, v_eq, v_ineq): + n_start = 0 + v_l = v_ineq[n_start:n_start + n_less] + n_start += n_less + v_g = v_ineq[n_start:n_start + n_greater] + n_start += n_greater + v_il = v_ineq[n_start:n_start + n_interval] + n_start += n_interval + v_ig = v_ineq[n_start:n_start + n_interval] + + v = np.zeros_like(lb) + v[equal] = v_eq + v[less] = v_l + v[greater] = -v_g + v[interval] = v_il - v_ig + + return cfun.hess(x, v) + + return cls(n_eq, n_ineq, fun, jac, hess, keep_feasible) + + +def initial_constraints_as_canonical(n, prepared_constraints, sparse_jacobian): + """Convert initial values of the constraints to the canonical format. + + The purpose to avoid one additional call to the constraints at the initial + point. It takes saved values in `PreparedConstraint`, modififies and + concatenates them to the canonical constraint format. + """ + c_eq = [] + c_ineq = [] + J_eq = [] + J_ineq = [] + + for c in prepared_constraints: + f = c.fun.f + J = c.fun.J + lb, ub = c.bounds + if np.all(lb == ub): + c_eq.append(f - lb) + J_eq.append(J) + elif np.all(lb == -np.inf): + finite_ub = ub < np.inf + c_ineq.append(f[finite_ub] - ub[finite_ub]) + J_ineq.append(J[finite_ub]) + elif np.all(ub == np.inf): + finite_lb = lb > -np.inf + c_ineq.append(lb[finite_lb] - f[finite_lb]) + J_ineq.append(-J[finite_lb]) + else: + lb_inf = lb == -np.inf + ub_inf = ub == np.inf + equal = lb == ub + less = lb_inf & ~ub_inf + greater = ub_inf & ~lb_inf + interval = ~equal & ~lb_inf & ~ub_inf + + c_eq.append(f[equal] - lb[equal]) + c_ineq.append(f[less] - ub[less]) + c_ineq.append(lb[greater] - f[greater]) + c_ineq.append(f[interval] - ub[interval]) + c_ineq.append(lb[interval] - f[interval]) + + J_eq.append(J[equal]) + J_ineq.append(J[less]) + J_ineq.append(-J[greater]) + J_ineq.append(J[interval]) + J_ineq.append(-J[interval]) + + c_eq = np.hstack(c_eq) if c_eq else np.empty(0) + c_ineq = np.hstack(c_ineq) if c_ineq else np.empty(0) + + if sparse_jacobian: + vstack = sps.vstack + empty = sps.csr_matrix((0, n)) + else: + vstack = np.vstack + empty = np.empty((0, n)) + + J_eq = vstack(J_eq) if J_eq else empty + J_ineq = vstack(J_ineq) if J_ineq else empty + + return c_eq, c_ineq, J_eq, J_ineq diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/equality_constrained_sqp.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/equality_constrained_sqp.py new file mode 100644 index 0000000000000000000000000000000000000000..fb4c05dcdd03fb990d3418220a398e249ef581ee --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/equality_constrained_sqp.py @@ -0,0 +1,231 @@ +"""Byrd-Omojokun Trust-Region SQP method.""" + +from scipy.sparse import eye as speye +from .projections import projections +from .qp_subproblem import modified_dogleg, projected_cg, box_intersections +import numpy as np +from numpy.linalg import norm + +__all__ = ['equality_constrained_sqp'] + + +def default_scaling(x): + n, = np.shape(x) + return speye(n) + + +def equality_constrained_sqp(fun_and_constr, grad_and_jac, lagr_hess, + x0, fun0, grad0, constr0, + jac0, stop_criteria, + state, + initial_penalty, + initial_trust_radius, + factorization_method, + trust_lb=None, + trust_ub=None, + scaling=default_scaling): + """Solve nonlinear equality-constrained problem using trust-region SQP. + + Solve optimization problem: + + minimize fun(x) + subject to: constr(x) = 0 + + using Byrd-Omojokun Trust-Region SQP method described in [1]_. Several + implementation details are based on [2]_ and [3]_, p. 549. + + References + ---------- + .. [1] Lalee, Marucha, Jorge Nocedal, and Todd Plantenga. "On the + implementation of an algorithm for large-scale equality + constrained optimization." SIAM Journal on + Optimization 8.3 (1998): 682-706. + .. [2] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal. + "An interior point algorithm for large-scale nonlinear + programming." SIAM Journal on Optimization 9.4 (1999): 877-900. + .. [3] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). + """ + PENALTY_FACTOR = 0.3 # Rho from formula (3.51), reference [2]_, p.891. + LARGE_REDUCTION_RATIO = 0.9 + INTERMEDIARY_REDUCTION_RATIO = 0.3 + SUFFICIENT_REDUCTION_RATIO = 1e-8 # Eta from reference [2]_, p.892. + TRUST_ENLARGEMENT_FACTOR_L = 7.0 + TRUST_ENLARGEMENT_FACTOR_S = 2.0 + MAX_TRUST_REDUCTION = 0.5 + MIN_TRUST_REDUCTION = 0.1 + SOC_THRESHOLD = 0.1 + TR_FACTOR = 0.8 # Zeta from formula (3.21), reference [2]_, p.885. + BOX_FACTOR = 0.5 + + n, = np.shape(x0) # Number of parameters + + # Set default lower and upper bounds. + if trust_lb is None: + trust_lb = np.full(n, -np.inf) + if trust_ub is None: + trust_ub = np.full(n, np.inf) + + # Initial values + x = np.copy(x0) + trust_radius = initial_trust_radius + penalty = initial_penalty + # Compute Values + f = fun0 + c = grad0 + b = constr0 + A = jac0 + S = scaling(x) + # Get projections + try: + Z, LS, Y = projections(A, factorization_method) + except ValueError as e: + if str(e) == "expected square matrix": + # can be the case if there are more equality + # constraints than independent variables + raise ValueError( + "The 'expected square matrix' error can occur if there are" + " more equality constraints than independent variables." + " Consider how your constraints are set up, or use" + " factorization_method='SVDFactorization'." + ) from e + else: + raise e + + # Compute least-square lagrange multipliers + v = -LS.dot(c) + # Compute Hessian + H = lagr_hess(x, v) + + # Update state parameters + optimality = norm(c + A.T.dot(v), np.inf) + constr_violation = norm(b, np.inf) if len(b) > 0 else 0 + cg_info = {'niter': 0, 'stop_cond': 0, + 'hits_boundary': False} + + last_iteration_failed = False + while not stop_criteria(state, x, last_iteration_failed, + optimality, constr_violation, + trust_radius, penalty, cg_info): + # Normal Step - `dn` + # minimize 1/2*||A dn + b||^2 + # subject to: + # ||dn|| <= TR_FACTOR * trust_radius + # BOX_FACTOR * lb <= dn <= BOX_FACTOR * ub. + dn = modified_dogleg(A, Y, b, + TR_FACTOR*trust_radius, + BOX_FACTOR*trust_lb, + BOX_FACTOR*trust_ub) + + # Tangential Step - `dt` + # Solve the QP problem: + # minimize 1/2 dt.T H dt + dt.T (H dn + c) + # subject to: + # A dt = 0 + # ||dt|| <= sqrt(trust_radius**2 - ||dn||**2) + # lb - dn <= dt <= ub - dn + c_t = H.dot(dn) + c + b_t = np.zeros_like(b) + trust_radius_t = np.sqrt(trust_radius**2 - np.linalg.norm(dn)**2) + lb_t = trust_lb - dn + ub_t = trust_ub - dn + dt, cg_info = projected_cg(H, c_t, Z, Y, b_t, + trust_radius_t, + lb_t, ub_t) + + # Compute update (normal + tangential steps). + d = dn + dt + + # Compute second order model: 1/2 d H d + c.T d + f. + quadratic_model = 1/2*(H.dot(d)).dot(d) + c.T.dot(d) + # Compute linearized constraint: l = A d + b. + linearized_constr = A.dot(d)+b + # Compute new penalty parameter according to formula (3.52), + # reference [2]_, p.891. + vpred = norm(b) - norm(linearized_constr) + # Guarantee `vpred` always positive, + # regardless of roundoff errors. + vpred = max(1e-16, vpred) + previous_penalty = penalty + if quadratic_model > 0: + new_penalty = quadratic_model / ((1-PENALTY_FACTOR)*vpred) + penalty = max(penalty, new_penalty) + # Compute predicted reduction according to formula (3.52), + # reference [2]_, p.891. + predicted_reduction = -quadratic_model + penalty*vpred + + # Compute merit function at current point + merit_function = f + penalty*norm(b) + # Evaluate function and constraints at trial point + x_next = x + S.dot(d) + f_next, b_next = fun_and_constr(x_next) + # Compute merit function at trial point + merit_function_next = f_next + penalty*norm(b_next) + # Compute actual reduction according to formula (3.54), + # reference [2]_, p.892. + actual_reduction = merit_function - merit_function_next + # Compute reduction ratio + reduction_ratio = actual_reduction / predicted_reduction + + # Second order correction (SOC), reference [2]_, p.892. + if reduction_ratio < SUFFICIENT_REDUCTION_RATIO and \ + norm(dn) <= SOC_THRESHOLD * norm(dt): + # Compute second order correction + y = -Y.dot(b_next) + # Make sure increment is inside box constraints + _, t, intersect = box_intersections(d, y, trust_lb, trust_ub) + # Compute tentative point + x_soc = x + S.dot(d + t*y) + f_soc, b_soc = fun_and_constr(x_soc) + # Recompute actual reduction + merit_function_soc = f_soc + penalty*norm(b_soc) + actual_reduction_soc = merit_function - merit_function_soc + # Recompute reduction ratio + reduction_ratio_soc = actual_reduction_soc / predicted_reduction + if intersect and reduction_ratio_soc >= SUFFICIENT_REDUCTION_RATIO: + x_next = x_soc + f_next = f_soc + b_next = b_soc + reduction_ratio = reduction_ratio_soc + + # Readjust trust region step, formula (3.55), reference [2]_, p.892. + if reduction_ratio >= LARGE_REDUCTION_RATIO: + trust_radius = max(TRUST_ENLARGEMENT_FACTOR_L * norm(d), + trust_radius) + elif reduction_ratio >= INTERMEDIARY_REDUCTION_RATIO: + trust_radius = max(TRUST_ENLARGEMENT_FACTOR_S * norm(d), + trust_radius) + # Reduce trust region step, according to reference [3]_, p.696. + elif reduction_ratio < SUFFICIENT_REDUCTION_RATIO: + trust_reduction = ((1-SUFFICIENT_REDUCTION_RATIO) / + (1-reduction_ratio)) + new_trust_radius = trust_reduction * norm(d) + if new_trust_radius >= MAX_TRUST_REDUCTION * trust_radius: + trust_radius *= MAX_TRUST_REDUCTION + elif new_trust_radius >= MIN_TRUST_REDUCTION * trust_radius: + trust_radius = new_trust_radius + else: + trust_radius *= MIN_TRUST_REDUCTION + + # Update iteration + if reduction_ratio >= SUFFICIENT_REDUCTION_RATIO: + x = x_next + f, b = f_next, b_next + c, A = grad_and_jac(x) + S = scaling(x) + # Get projections + Z, LS, Y = projections(A, factorization_method) + # Compute least-square lagrange multipliers + v = -LS.dot(c) + # Compute Hessian + H = lagr_hess(x, v) + # Set Flag + last_iteration_failed = False + # Otimality values + optimality = norm(c + A.T.dot(v), np.inf) + constr_violation = norm(b, np.inf) if len(b) > 0 else 0 + else: + penalty = previous_penalty + last_iteration_failed = True + + return x, state diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/minimize_trustregion_constr.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/minimize_trustregion_constr.py new file mode 100644 index 0000000000000000000000000000000000000000..2835ea5445c0eafc303f0cb1ab8543f48b7e3bb9 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/minimize_trustregion_constr.py @@ -0,0 +1,564 @@ +import time +import numpy as np +from scipy.sparse.linalg import LinearOperator +from .._differentiable_functions import VectorFunction +from .._constraints import ( + NonlinearConstraint, LinearConstraint, PreparedConstraint, Bounds, strict_bounds) +from .._hessian_update_strategy import BFGS +from .._optimize import OptimizeResult +from .._differentiable_functions import ScalarFunction +from .equality_constrained_sqp import equality_constrained_sqp +from .canonical_constraint import (CanonicalConstraint, + initial_constraints_as_canonical) +from .tr_interior_point import tr_interior_point +from .report import BasicReport, SQPReport, IPReport + + +TERMINATION_MESSAGES = { + 0: "The maximum number of function evaluations is exceeded.", + 1: "`gtol` termination condition is satisfied.", + 2: "`xtol` termination condition is satisfied.", + 3: "`callback` function requested termination." +} + + +class HessianLinearOperator: + """Build LinearOperator from hessp""" + def __init__(self, hessp, n): + self.hessp = hessp + self.n = n + + def __call__(self, x, *args): + def matvec(p): + return self.hessp(x, p, *args) + + return LinearOperator((self.n, self.n), matvec=matvec) + + +class LagrangianHessian: + """The Hessian of the Lagrangian as LinearOperator. + + The Lagrangian is computed as the objective function plus all the + constraints multiplied with some numbers (Lagrange multipliers). + """ + def __init__(self, n, objective_hess, constraints_hess): + self.n = n + self.objective_hess = objective_hess + self.constraints_hess = constraints_hess + + def __call__(self, x, v_eq=np.empty(0), v_ineq=np.empty(0)): + H_objective = self.objective_hess(x) + H_constraints = self.constraints_hess(x, v_eq, v_ineq) + + def matvec(p): + return H_objective.dot(p) + H_constraints.dot(p) + + return LinearOperator((self.n, self.n), matvec) + + +def update_state_sqp(state, x, last_iteration_failed, objective, prepared_constraints, + start_time, tr_radius, constr_penalty, cg_info): + state.nit += 1 + state.nfev = objective.nfev + state.njev = objective.ngev + state.nhev = objective.nhev + state.constr_nfev = [c.fun.nfev if isinstance(c.fun, VectorFunction) else 0 + for c in prepared_constraints] + state.constr_njev = [c.fun.njev if isinstance(c.fun, VectorFunction) else 0 + for c in prepared_constraints] + state.constr_nhev = [c.fun.nhev if isinstance(c.fun, VectorFunction) else 0 + for c in prepared_constraints] + + if not last_iteration_failed: + state.x = x + state.fun = objective.f + state.grad = objective.g + state.v = [c.fun.v for c in prepared_constraints] + state.constr = [c.fun.f for c in prepared_constraints] + state.jac = [c.fun.J for c in prepared_constraints] + # Compute Lagrangian Gradient + state.lagrangian_grad = np.copy(state.grad) + for c in prepared_constraints: + state.lagrangian_grad += c.fun.J.T.dot(c.fun.v) + state.optimality = np.linalg.norm(state.lagrangian_grad, np.inf) + # Compute maximum constraint violation + state.constr_violation = 0 + for i in range(len(prepared_constraints)): + lb, ub = prepared_constraints[i].bounds + c = state.constr[i] + state.constr_violation = np.max([state.constr_violation, + np.max(lb - c), + np.max(c - ub)]) + + state.execution_time = time.time() - start_time + state.tr_radius = tr_radius + state.constr_penalty = constr_penalty + state.cg_niter += cg_info["niter"] + state.cg_stop_cond = cg_info["stop_cond"] + + return state + + +def update_state_ip(state, x, last_iteration_failed, objective, + prepared_constraints, start_time, + tr_radius, constr_penalty, cg_info, + barrier_parameter, barrier_tolerance): + state = update_state_sqp(state, x, last_iteration_failed, objective, + prepared_constraints, start_time, tr_radius, + constr_penalty, cg_info) + state.barrier_parameter = barrier_parameter + state.barrier_tolerance = barrier_tolerance + return state + + +def _minimize_trustregion_constr(fun, x0, args, grad, + hess, hessp, bounds, constraints, + xtol=1e-8, gtol=1e-8, + barrier_tol=1e-8, + sparse_jacobian=None, + callback=None, maxiter=1000, + verbose=0, finite_diff_rel_step=None, + initial_constr_penalty=1.0, initial_tr_radius=1.0, + initial_barrier_parameter=0.1, + initial_barrier_tolerance=0.1, + factorization_method=None, + disp=False): + """Minimize a scalar function subject to constraints. + + Parameters + ---------- + gtol : float, optional + Tolerance for termination by the norm of the Lagrangian gradient. + The algorithm will terminate when both the infinity norm (i.e., max + abs value) of the Lagrangian gradient and the constraint violation + are smaller than ``gtol``. Default is 1e-8. + xtol : float, optional + Tolerance for termination by the change of the independent variable. + The algorithm will terminate when ``tr_radius < xtol``, where + ``tr_radius`` is the radius of the trust region used in the algorithm. + Default is 1e-8. + barrier_tol : float, optional + Threshold on the barrier parameter for the algorithm termination. + When inequality constraints are present, the algorithm will terminate + only when the barrier parameter is less than `barrier_tol`. + Default is 1e-8. + sparse_jacobian : {bool, None}, optional + Determines how to represent Jacobians of the constraints. If bool, + then Jacobians of all the constraints will be converted to the + corresponding format. If None (default), then Jacobians won't be + converted, but the algorithm can proceed only if they all have the + same format. + initial_tr_radius: float, optional + Initial trust radius. The trust radius gives the maximum distance + between solution points in consecutive iterations. It reflects the + trust the algorithm puts in the local approximation of the optimization + problem. For an accurate local approximation the trust-region should be + large and for an approximation valid only close to the current point it + should be a small one. The trust radius is automatically updated throughout + the optimization process, with ``initial_tr_radius`` being its initial value. + Default is 1 (recommended in [1]_, p. 19). + initial_constr_penalty : float, optional + Initial constraints penalty parameter. The penalty parameter is used for + balancing the requirements of decreasing the objective function + and satisfying the constraints. It is used for defining the merit function: + ``merit_function(x) = fun(x) + constr_penalty * constr_norm_l2(x)``, + where ``constr_norm_l2(x)`` is the l2 norm of a vector containing all + the constraints. The merit function is used for accepting or rejecting + trial points and ``constr_penalty`` weights the two conflicting goals + of reducing objective function and constraints. The penalty is automatically + updated throughout the optimization process, with + ``initial_constr_penalty`` being its initial value. Default is 1 + (recommended in [1]_, p 19). + initial_barrier_parameter, initial_barrier_tolerance: float, optional + Initial barrier parameter and initial tolerance for the barrier subproblem. + Both are used only when inequality constraints are present. For dealing with + optimization problems ``min_x f(x)`` subject to inequality constraints + ``c(x) <= 0`` the algorithm introduces slack variables, solving the problem + ``min_(x,s) f(x) + barrier_parameter*sum(ln(s))`` subject to the equality + constraints ``c(x) + s = 0`` instead of the original problem. This subproblem + is solved for decreasing values of ``barrier_parameter`` and with decreasing + tolerances for the termination, starting with ``initial_barrier_parameter`` + for the barrier parameter and ``initial_barrier_tolerance`` for the + barrier tolerance. Default is 0.1 for both values (recommended in [1]_ p. 19). + Also note that ``barrier_parameter`` and ``barrier_tolerance`` are updated + with the same prefactor. + factorization_method : string or None, optional + Method to factorize the Jacobian of the constraints. Use None (default) + for the auto selection or one of: + + - 'NormalEquation' (requires scikit-sparse) + - 'AugmentedSystem' + - 'QRFactorization' + - 'SVDFactorization' + + The methods 'NormalEquation' and 'AugmentedSystem' can be used only + with sparse constraints. The projections required by the algorithm + will be computed using, respectively, the normal equation and the + augmented system approaches explained in [1]_. 'NormalEquation' + computes the Cholesky factorization of ``A A.T`` and 'AugmentedSystem' + performs the LU factorization of an augmented system. They usually + provide similar results. 'AugmentedSystem' is used by default for + sparse matrices. + + The methods 'QRFactorization' and 'SVDFactorization' can be used + only with dense constraints. They compute the required projections + using, respectively, QR and SVD factorizations. The 'SVDFactorization' + method can cope with Jacobian matrices with deficient row rank and will + be used whenever other factorization methods fail (which may imply the + conversion of sparse matrices to a dense format when required). + By default, 'QRFactorization' is used for dense matrices. + finite_diff_rel_step : None or array_like, optional + Relative step size for the finite difference approximation. + maxiter : int, optional + Maximum number of algorithm iterations. Default is 1000. + verbose : {0, 1, 2}, optional + Level of algorithm's verbosity: + + * 0 (default) : work silently. + * 1 : display a termination report. + * 2 : display progress during iterations. + * 3 : display progress during iterations (more complete report). + + disp : bool, optional + If True (default), then `verbose` will be set to 1 if it was 0. + + Returns + ------- + `OptimizeResult` with the fields documented below. Note the following: + + 1. All values corresponding to the constraints are ordered as they + were passed to the solver. And values corresponding to `bounds` + constraints are put *after* other constraints. + 2. All numbers of function, Jacobian or Hessian evaluations correspond + to numbers of actual Python function calls. It means, for example, + that if a Jacobian is estimated by finite differences, then the + number of Jacobian evaluations will be zero and the number of + function evaluations will be incremented by all calls during the + finite difference estimation. + + x : ndarray, shape (n,) + Solution found. + optimality : float + Infinity norm of the Lagrangian gradient at the solution. + constr_violation : float + Maximum constraint violation at the solution. + fun : float + Objective function at the solution. + grad : ndarray, shape (n,) + Gradient of the objective function at the solution. + lagrangian_grad : ndarray, shape (n,) + Gradient of the Lagrangian function at the solution. + nit : int + Total number of iterations. + nfev : integer + Number of the objective function evaluations. + njev : integer + Number of the objective function gradient evaluations. + nhev : integer + Number of the objective function Hessian evaluations. + cg_niter : int + Total number of the conjugate gradient method iterations. + method : {'equality_constrained_sqp', 'tr_interior_point'} + Optimization method used. + constr : list of ndarray + List of constraint values at the solution. + jac : list of {ndarray, sparse matrix} + List of the Jacobian matrices of the constraints at the solution. + v : list of ndarray + List of the Lagrange multipliers for the constraints at the solution. + For an inequality constraint a positive multiplier means that the upper + bound is active, a negative multiplier means that the lower bound is + active and if a multiplier is zero it means the constraint is not + active. + constr_nfev : list of int + Number of constraint evaluations for each of the constraints. + constr_njev : list of int + Number of Jacobian matrix evaluations for each of the constraints. + constr_nhev : list of int + Number of Hessian evaluations for each of the constraints. + tr_radius : float + Radius of the trust region at the last iteration. + constr_penalty : float + Penalty parameter at the last iteration, see `initial_constr_penalty`. + barrier_tolerance : float + Tolerance for the barrier subproblem at the last iteration. + Only for problems with inequality constraints. + barrier_parameter : float + Barrier parameter at the last iteration. Only for problems + with inequality constraints. + execution_time : float + Total execution time. + message : str + Termination message. + status : {0, 1, 2, 3} + Termination status: + + * 0 : The maximum number of function evaluations is exceeded. + * 1 : `gtol` termination condition is satisfied. + * 2 : `xtol` termination condition is satisfied. + * 3 : `callback` function requested termination. + + cg_stop_cond : int + Reason for CG subproblem termination at the last iteration: + + * 0 : CG subproblem not evaluated. + * 1 : Iteration limit was reached. + * 2 : Reached the trust-region boundary. + * 3 : Negative curvature detected. + * 4 : Tolerance was satisfied. + + References + ---------- + .. [1] Conn, A. R., Gould, N. I., & Toint, P. L. + Trust region methods. 2000. Siam. pp. 19. + """ + x0 = np.atleast_1d(x0).astype(float) + n_vars = np.size(x0) + if hess is None: + if callable(hessp): + hess = HessianLinearOperator(hessp, n_vars) + else: + hess = BFGS() + if disp and verbose == 0: + verbose = 1 + + if bounds is not None: + modified_lb = np.nextafter(bounds.lb, -np.inf, where=bounds.lb > -np.inf) + modified_ub = np.nextafter(bounds.ub, np.inf, where=bounds.ub < np.inf) + modified_lb = np.where(np.isfinite(bounds.lb), modified_lb, bounds.lb) + modified_ub = np.where(np.isfinite(bounds.ub), modified_ub, bounds.ub) + bounds = Bounds(modified_lb, modified_ub, keep_feasible=bounds.keep_feasible) + finite_diff_bounds = strict_bounds(bounds.lb, bounds.ub, + bounds.keep_feasible, n_vars) + else: + finite_diff_bounds = (-np.inf, np.inf) + + # Define Objective Function + objective = ScalarFunction(fun, x0, args, grad, hess, + finite_diff_rel_step, finite_diff_bounds) + + # Put constraints in list format when needed. + if isinstance(constraints, (NonlinearConstraint, LinearConstraint)): + constraints = [constraints] + + # Prepare constraints. + prepared_constraints = [ + PreparedConstraint(c, x0, sparse_jacobian, finite_diff_bounds) + for c in constraints] + + # Check that all constraints are either sparse or dense. + n_sparse = sum(c.fun.sparse_jacobian for c in prepared_constraints) + if 0 < n_sparse < len(prepared_constraints): + raise ValueError("All constraints must have the same kind of the " + "Jacobian --- either all sparse or all dense. " + "You can set the sparsity globally by setting " + "`sparse_jacobian` to either True of False.") + if prepared_constraints: + sparse_jacobian = n_sparse > 0 + + if bounds is not None: + if sparse_jacobian is None: + sparse_jacobian = True + prepared_constraints.append(PreparedConstraint(bounds, x0, + sparse_jacobian)) + + # Concatenate initial constraints to the canonical form. + c_eq0, c_ineq0, J_eq0, J_ineq0 = initial_constraints_as_canonical( + n_vars, prepared_constraints, sparse_jacobian) + + # Prepare all canonical constraints and concatenate it into one. + canonical_all = [CanonicalConstraint.from_PreparedConstraint(c) + for c in prepared_constraints] + + if len(canonical_all) == 0: + canonical = CanonicalConstraint.empty(n_vars) + elif len(canonical_all) == 1: + canonical = canonical_all[0] + else: + canonical = CanonicalConstraint.concatenate(canonical_all, + sparse_jacobian) + + # Generate the Hessian of the Lagrangian. + lagrangian_hess = LagrangianHessian(n_vars, objective.hess, canonical.hess) + + # Choose appropriate method + if canonical.n_ineq == 0: + method = 'equality_constrained_sqp' + else: + method = 'tr_interior_point' + + # Construct OptimizeResult + state = OptimizeResult( + nit=0, nfev=0, njev=0, nhev=0, + cg_niter=0, cg_stop_cond=0, + fun=objective.f, grad=objective.g, + lagrangian_grad=np.copy(objective.g), + constr=[c.fun.f for c in prepared_constraints], + jac=[c.fun.J for c in prepared_constraints], + constr_nfev=[0 for c in prepared_constraints], + constr_njev=[0 for c in prepared_constraints], + constr_nhev=[0 for c in prepared_constraints], + v=[c.fun.v for c in prepared_constraints], + method=method) + + # Start counting + start_time = time.time() + + # Define stop criteria + if method == 'equality_constrained_sqp': + def stop_criteria(state, x, last_iteration_failed, + optimality, constr_violation, + tr_radius, constr_penalty, cg_info): + state = update_state_sqp(state, x, last_iteration_failed, + objective, prepared_constraints, + start_time, tr_radius, constr_penalty, + cg_info) + if verbose == 2: + BasicReport.print_iteration(state.nit, + state.nfev, + state.cg_niter, + state.fun, + state.tr_radius, + state.optimality, + state.constr_violation) + elif verbose > 2: + SQPReport.print_iteration(state.nit, + state.nfev, + state.cg_niter, + state.fun, + state.tr_radius, + state.optimality, + state.constr_violation, + state.constr_penalty, + state.cg_stop_cond) + state.status = None + state.niter = state.nit # Alias for callback (backward-compatibility) + if callback is not None: + callback_stop = False + try: + callback_stop = callback(state) + except StopIteration: + callback_stop = True + if callback_stop: + state.status = 3 + return True + if state.optimality < gtol and state.constr_violation < gtol: + state.status = 1 + elif state.tr_radius < xtol: + state.status = 2 + elif state.nit >= maxiter: + state.status = 0 + return state.status in (0, 1, 2, 3) + elif method == 'tr_interior_point': + def stop_criteria(state, x, last_iteration_failed, tr_radius, + constr_penalty, cg_info, barrier_parameter, + barrier_tolerance): + state = update_state_ip(state, x, last_iteration_failed, + objective, prepared_constraints, + start_time, tr_radius, constr_penalty, + cg_info, barrier_parameter, barrier_tolerance) + if verbose == 2: + BasicReport.print_iteration(state.nit, + state.nfev, + state.cg_niter, + state.fun, + state.tr_radius, + state.optimality, + state.constr_violation) + elif verbose > 2: + IPReport.print_iteration(state.nit, + state.nfev, + state.cg_niter, + state.fun, + state.tr_radius, + state.optimality, + state.constr_violation, + state.constr_penalty, + state.barrier_parameter, + state.cg_stop_cond) + state.status = None + state.niter = state.nit # Alias for callback (backward compatibility) + if callback is not None: + callback_stop = False + try: + callback_stop = callback(state) + except StopIteration: + callback_stop = True + if callback_stop: + state.status = 3 + return True + if state.optimality < gtol and state.constr_violation < gtol: + state.status = 1 + elif (state.tr_radius < xtol + and state.barrier_parameter < barrier_tol): + state.status = 2 + elif state.nit >= maxiter: + state.status = 0 + return state.status in (0, 1, 2, 3) + + if verbose == 2: + BasicReport.print_header() + elif verbose > 2: + if method == 'equality_constrained_sqp': + SQPReport.print_header() + elif method == 'tr_interior_point': + IPReport.print_header() + + # Call inferior function to do the optimization + if method == 'equality_constrained_sqp': + def fun_and_constr(x): + f = objective.fun(x) + c_eq, _ = canonical.fun(x) + return f, c_eq + + def grad_and_jac(x): + g = objective.grad(x) + J_eq, _ = canonical.jac(x) + return g, J_eq + + _, result = equality_constrained_sqp( + fun_and_constr, grad_and_jac, lagrangian_hess, + x0, objective.f, objective.g, + c_eq0, J_eq0, + stop_criteria, state, + initial_constr_penalty, initial_tr_radius, + factorization_method) + + elif method == 'tr_interior_point': + _, result = tr_interior_point( + objective.fun, objective.grad, lagrangian_hess, + n_vars, canonical.n_ineq, canonical.n_eq, + canonical.fun, canonical.jac, + x0, objective.f, objective.g, + c_ineq0, J_ineq0, c_eq0, J_eq0, + stop_criteria, + canonical.keep_feasible, + xtol, state, initial_barrier_parameter, + initial_barrier_tolerance, + initial_constr_penalty, initial_tr_radius, + factorization_method) + + # Status 3 occurs when the callback function requests termination, + # this is assumed to not be a success. + result.success = True if result.status in (1, 2) else False + result.message = TERMINATION_MESSAGES[result.status] + + # Alias (for backward compatibility with 1.1.0) + result.niter = result.nit + + if verbose == 2: + BasicReport.print_footer() + elif verbose > 2: + if method == 'equality_constrained_sqp': + SQPReport.print_footer() + elif method == 'tr_interior_point': + IPReport.print_footer() + if verbose >= 1: + print(result.message) + print("Number of iterations: {}, function evaluations: {}, " + "CG iterations: {}, optimality: {:.2e}, " + "constraint violation: {:.2e}, execution time: {:4.2} s." + .format(result.nit, result.nfev, result.cg_niter, + result.optimality, result.constr_violation, + result.execution_time)) + return result diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/projections.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/projections.py new file mode 100644 index 0000000000000000000000000000000000000000..a07b836bdbad688a265ae34ce91a361fd5050eb1 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/projections.py @@ -0,0 +1,407 @@ +"""Basic linear factorizations needed by the solver.""" + +from scipy.sparse import (bmat, csc_matrix, eye, issparse) +from scipy.sparse.linalg import LinearOperator +import scipy.linalg +import scipy.sparse.linalg +try: + from sksparse.cholmod import cholesky_AAt + sksparse_available = True +except ImportError: + import warnings + sksparse_available = False +import numpy as np +from warnings import warn + +__all__ = [ + 'orthogonality', + 'projections', +] + + +def orthogonality(A, g): + """Measure orthogonality between a vector and the null space of a matrix. + + Compute a measure of orthogonality between the null space + of the (possibly sparse) matrix ``A`` and a given vector ``g``. + + The formula is a simplified (and cheaper) version of formula (3.13) + from [1]_. + ``orth = norm(A g, ord=2)/(norm(A, ord='fro')*norm(g, ord=2))``. + + References + ---------- + .. [1] Gould, Nicholas IM, Mary E. Hribar, and Jorge Nocedal. + "On the solution of equality constrained quadratic + programming problems arising in optimization." + SIAM Journal on Scientific Computing 23.4 (2001): 1376-1395. + """ + # Compute vector norms + norm_g = np.linalg.norm(g) + # Compute Froebnius norm of the matrix A + if issparse(A): + norm_A = scipy.sparse.linalg.norm(A, ord='fro') + else: + norm_A = np.linalg.norm(A, ord='fro') + + # Check if norms are zero + if norm_g == 0 or norm_A == 0: + return 0 + + norm_A_g = np.linalg.norm(A.dot(g)) + # Orthogonality measure + orth = norm_A_g / (norm_A*norm_g) + return orth + + +def normal_equation_projections(A, m, n, orth_tol, max_refin, tol): + """Return linear operators for matrix A using ``NormalEquation`` approach. + """ + # Cholesky factorization + factor = cholesky_AAt(A) + + # z = x - A.T inv(A A.T) A x + def null_space(x): + v = factor(A.dot(x)) + z = x - A.T.dot(v) + + # Iterative refinement to improve roundoff + # errors described in [2]_, algorithm 5.1. + k = 0 + while orthogonality(A, z) > orth_tol: + if k >= max_refin: + break + # z_next = z - A.T inv(A A.T) A z + v = factor(A.dot(z)) + z = z - A.T.dot(v) + k += 1 + + return z + + # z = inv(A A.T) A x + def least_squares(x): + return factor(A.dot(x)) + + # z = A.T inv(A A.T) x + def row_space(x): + return A.T.dot(factor(x)) + + return null_space, least_squares, row_space + + +def augmented_system_projections(A, m, n, orth_tol, max_refin, tol): + """Return linear operators for matrix A - ``AugmentedSystem``.""" + # Form augmented system + K = csc_matrix(bmat([[eye(n), A.T], [A, None]])) + # LU factorization + # TODO: Use a symmetric indefinite factorization + # to solve the system twice as fast (because + # of the symmetry). + try: + solve = scipy.sparse.linalg.factorized(K) + except RuntimeError: + warn("Singular Jacobian matrix. Using dense SVD decomposition to " + "perform the factorizations.", + stacklevel=3) + return svd_factorization_projections(A.toarray(), + m, n, orth_tol, + max_refin, tol) + + # z = x - A.T inv(A A.T) A x + # is computed solving the extended system: + # [I A.T] * [ z ] = [x] + # [A O ] [aux] [0] + def null_space(x): + # v = [x] + # [0] + v = np.hstack([x, np.zeros(m)]) + # lu_sol = [ z ] + # [aux] + lu_sol = solve(v) + z = lu_sol[:n] + + # Iterative refinement to improve roundoff + # errors described in [2]_, algorithm 5.2. + k = 0 + while orthogonality(A, z) > orth_tol: + if k >= max_refin: + break + # new_v = [x] - [I A.T] * [ z ] + # [0] [A O ] [aux] + new_v = v - K.dot(lu_sol) + # [I A.T] * [delta z ] = new_v + # [A O ] [delta aux] + lu_update = solve(new_v) + # [ z ] += [delta z ] + # [aux] [delta aux] + lu_sol += lu_update + z = lu_sol[:n] + k += 1 + + # return z = x - A.T inv(A A.T) A x + return z + + # z = inv(A A.T) A x + # is computed solving the extended system: + # [I A.T] * [aux] = [x] + # [A O ] [ z ] [0] + def least_squares(x): + # v = [x] + # [0] + v = np.hstack([x, np.zeros(m)]) + # lu_sol = [aux] + # [ z ] + lu_sol = solve(v) + # return z = inv(A A.T) A x + return lu_sol[n:m+n] + + # z = A.T inv(A A.T) x + # is computed solving the extended system: + # [I A.T] * [ z ] = [0] + # [A O ] [aux] [x] + def row_space(x): + # v = [0] + # [x] + v = np.hstack([np.zeros(n), x]) + # lu_sol = [ z ] + # [aux] + lu_sol = solve(v) + # return z = A.T inv(A A.T) x + return lu_sol[:n] + + return null_space, least_squares, row_space + + +def qr_factorization_projections(A, m, n, orth_tol, max_refin, tol): + """Return linear operators for matrix A using ``QRFactorization`` approach. + """ + # QRFactorization + Q, R, P = scipy.linalg.qr(A.T, pivoting=True, mode='economic') + + if np.linalg.norm(R[-1, :], np.inf) < tol: + warn('Singular Jacobian matrix. Using SVD decomposition to ' + + 'perform the factorizations.', + stacklevel=3) + return svd_factorization_projections(A, m, n, + orth_tol, + max_refin, + tol) + + # z = x - A.T inv(A A.T) A x + def null_space(x): + # v = P inv(R) Q.T x + aux1 = Q.T.dot(x) + aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False) + v = np.zeros(m) + v[P] = aux2 + z = x - A.T.dot(v) + + # Iterative refinement to improve roundoff + # errors described in [2]_, algorithm 5.1. + k = 0 + while orthogonality(A, z) > orth_tol: + if k >= max_refin: + break + # v = P inv(R) Q.T x + aux1 = Q.T.dot(z) + aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False) + v[P] = aux2 + # z_next = z - A.T v + z = z - A.T.dot(v) + k += 1 + + return z + + # z = inv(A A.T) A x + def least_squares(x): + # z = P inv(R) Q.T x + aux1 = Q.T.dot(x) + aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False) + z = np.zeros(m) + z[P] = aux2 + return z + + # z = A.T inv(A A.T) x + def row_space(x): + # z = Q inv(R.T) P.T x + aux1 = x[P] + aux2 = scipy.linalg.solve_triangular(R, aux1, + lower=False, + trans='T') + z = Q.dot(aux2) + return z + + return null_space, least_squares, row_space + + +def svd_factorization_projections(A, m, n, orth_tol, max_refin, tol): + """Return linear operators for matrix A using ``SVDFactorization`` approach. + """ + # SVD Factorization + U, s, Vt = scipy.linalg.svd(A, full_matrices=False) + + # Remove dimensions related with very small singular values + U = U[:, s > tol] + Vt = Vt[s > tol, :] + s = s[s > tol] + + # z = x - A.T inv(A A.T) A x + def null_space(x): + # v = U 1/s V.T x = inv(A A.T) A x + aux1 = Vt.dot(x) + aux2 = 1/s*aux1 + v = U.dot(aux2) + z = x - A.T.dot(v) + + # Iterative refinement to improve roundoff + # errors described in [2]_, algorithm 5.1. + k = 0 + while orthogonality(A, z) > orth_tol: + if k >= max_refin: + break + # v = U 1/s V.T x = inv(A A.T) A x + aux1 = Vt.dot(z) + aux2 = 1/s*aux1 + v = U.dot(aux2) + # z_next = z - A.T v + z = z - A.T.dot(v) + k += 1 + + return z + + # z = inv(A A.T) A x + def least_squares(x): + # z = U 1/s V.T x = inv(A A.T) A x + aux1 = Vt.dot(x) + aux2 = 1/s*aux1 + z = U.dot(aux2) + return z + + # z = A.T inv(A A.T) x + def row_space(x): + # z = V 1/s U.T x + aux1 = U.T.dot(x) + aux2 = 1/s*aux1 + z = Vt.T.dot(aux2) + return z + + return null_space, least_squares, row_space + + +def projections(A, method=None, orth_tol=1e-12, max_refin=3, tol=1e-15): + """Return three linear operators related with a given matrix A. + + Parameters + ---------- + A : sparse matrix (or ndarray), shape (m, n) + Matrix ``A`` used in the projection. + method : string, optional + Method used for compute the given linear + operators. Should be one of: + + - 'NormalEquation': The operators + will be computed using the + so-called normal equation approach + explained in [1]_. In order to do + so the Cholesky factorization of + ``(A A.T)`` is computed. Exclusive + for sparse matrices. + - 'AugmentedSystem': The operators + will be computed using the + so-called augmented system approach + explained in [1]_. Exclusive + for sparse matrices. + - 'QRFactorization': Compute projections + using QR factorization. Exclusive for + dense matrices. + - 'SVDFactorization': Compute projections + using SVD factorization. Exclusive for + dense matrices. + + orth_tol : float, optional + Tolerance for iterative refinements. + max_refin : int, optional + Maximum number of iterative refinements. + tol : float, optional + Tolerance for singular values. + + Returns + ------- + Z : LinearOperator, shape (n, n) + Null-space operator. For a given vector ``x``, + the null space operator is equivalent to apply + a projection matrix ``P = I - A.T inv(A A.T) A`` + to the vector. It can be shown that this is + equivalent to project ``x`` into the null space + of A. + LS : LinearOperator, shape (m, n) + Least-squares operator. For a given vector ``x``, + the least-squares operator is equivalent to apply a + pseudoinverse matrix ``pinv(A.T) = inv(A A.T) A`` + to the vector. It can be shown that this vector + ``pinv(A.T) x`` is the least_square solution to + ``A.T y = x``. + Y : LinearOperator, shape (n, m) + Row-space operator. For a given vector ``x``, + the row-space operator is equivalent to apply a + projection matrix ``Q = A.T inv(A A.T)`` + to the vector. It can be shown that this + vector ``y = Q x`` the minimum norm solution + of ``A y = x``. + + Notes + ----- + Uses iterative refinements described in [1] + during the computation of ``Z`` in order to + cope with the possibility of large roundoff errors. + + References + ---------- + .. [1] Gould, Nicholas IM, Mary E. Hribar, and Jorge Nocedal. + "On the solution of equality constrained quadratic + programming problems arising in optimization." + SIAM Journal on Scientific Computing 23.4 (2001): 1376-1395. + """ + m, n = np.shape(A) + + # The factorization of an empty matrix + # only works for the sparse representation. + if m*n == 0: + A = csc_matrix(A) + + # Check Argument + if issparse(A): + if method is None: + method = "AugmentedSystem" + if method not in ("NormalEquation", "AugmentedSystem"): + raise ValueError("Method not allowed for sparse matrix.") + if method == "NormalEquation" and not sksparse_available: + warnings.warn("Only accepts 'NormalEquation' option when " + "scikit-sparse is available. Using " + "'AugmentedSystem' option instead.", + ImportWarning, stacklevel=3) + method = 'AugmentedSystem' + else: + if method is None: + method = "QRFactorization" + if method not in ("QRFactorization", "SVDFactorization"): + raise ValueError("Method not allowed for dense array.") + + if method == 'NormalEquation': + null_space, least_squares, row_space \ + = normal_equation_projections(A, m, n, orth_tol, max_refin, tol) + elif method == 'AugmentedSystem': + null_space, least_squares, row_space \ + = augmented_system_projections(A, m, n, orth_tol, max_refin, tol) + elif method == "QRFactorization": + null_space, least_squares, row_space \ + = qr_factorization_projections(A, m, n, orth_tol, max_refin, tol) + elif method == "SVDFactorization": + null_space, least_squares, row_space \ + = svd_factorization_projections(A, m, n, orth_tol, max_refin, tol) + + Z = LinearOperator((n, n), null_space) + LS = LinearOperator((m, n), least_squares) + Y = LinearOperator((n, m), row_space) + + return Z, LS, Y diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/qp_subproblem.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/qp_subproblem.py new file mode 100644 index 0000000000000000000000000000000000000000..a039a7738c283f90f30fd7c4583bf9e1a8f559d5 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/qp_subproblem.py @@ -0,0 +1,637 @@ +"""Equality-constrained quadratic programming solvers.""" + +from scipy.sparse import (linalg, bmat, csc_matrix) +from math import copysign +import numpy as np +from numpy.linalg import norm + +__all__ = [ + 'eqp_kktfact', + 'sphere_intersections', + 'box_intersections', + 'box_sphere_intersections', + 'inside_box_boundaries', + 'modified_dogleg', + 'projected_cg' +] + + +# For comparison with the projected CG +def eqp_kktfact(H, c, A, b): + """Solve equality-constrained quadratic programming (EQP) problem. + + Solve ``min 1/2 x.T H x + x.t c`` subject to ``A x + b = 0`` + using direct factorization of the KKT system. + + Parameters + ---------- + H : sparse matrix, shape (n, n) + Hessian matrix of the EQP problem. + c : array_like, shape (n,) + Gradient of the quadratic objective function. + A : sparse matrix + Jacobian matrix of the EQP problem. + b : array_like, shape (m,) + Right-hand side of the constraint equation. + + Returns + ------- + x : array_like, shape (n,) + Solution of the KKT problem. + lagrange_multipliers : ndarray, shape (m,) + Lagrange multipliers of the KKT problem. + """ + n, = np.shape(c) # Number of parameters + m, = np.shape(b) # Number of constraints + + # Karush-Kuhn-Tucker matrix of coefficients. + # Defined as in Nocedal/Wright "Numerical + # Optimization" p.452 in Eq. (16.4). + kkt_matrix = csc_matrix(bmat([[H, A.T], [A, None]])) + # Vector of coefficients. + kkt_vec = np.hstack([-c, -b]) + + # TODO: Use a symmetric indefinite factorization + # to solve the system twice as fast (because + # of the symmetry). + lu = linalg.splu(kkt_matrix) + kkt_sol = lu.solve(kkt_vec) + x = kkt_sol[:n] + lagrange_multipliers = -kkt_sol[n:n+m] + + return x, lagrange_multipliers + + +def sphere_intersections(z, d, trust_radius, + entire_line=False): + """Find the intersection between segment (or line) and spherical constraints. + + Find the intersection between the segment (or line) defined by the + parametric equation ``x(t) = z + t*d`` and the ball + ``||x|| <= trust_radius``. + + Parameters + ---------- + z : array_like, shape (n,) + Initial point. + d : array_like, shape (n,) + Direction. + trust_radius : float + Ball radius. + entire_line : bool, optional + When ``True``, the function returns the intersection between the line + ``x(t) = z + t*d`` (``t`` can assume any value) and the ball + ``||x|| <= trust_radius``. When ``False``, the function returns the intersection + between the segment ``x(t) = z + t*d``, ``0 <= t <= 1``, and the ball. + + Returns + ------- + ta, tb : float + The line/segment ``x(t) = z + t*d`` is inside the ball for + for ``ta <= t <= tb``. + intersect : bool + When ``True``, there is a intersection between the line/segment + and the sphere. On the other hand, when ``False``, there is no + intersection. + """ + # Special case when d=0 + if norm(d) == 0: + return 0, 0, False + # Check for inf trust_radius + if np.isinf(trust_radius): + if entire_line: + ta = -np.inf + tb = np.inf + else: + ta = 0 + tb = 1 + intersect = True + return ta, tb, intersect + + a = np.dot(d, d) + b = 2 * np.dot(z, d) + c = np.dot(z, z) - trust_radius**2 + discriminant = b*b - 4*a*c + if discriminant < 0: + intersect = False + return 0, 0, intersect + sqrt_discriminant = np.sqrt(discriminant) + + # The following calculation is mathematically + # equivalent to: + # ta = (-b - sqrt_discriminant) / (2*a) + # tb = (-b + sqrt_discriminant) / (2*a) + # but produce smaller round off errors. + # Look at Matrix Computation p.97 + # for a better justification. + aux = b + copysign(sqrt_discriminant, b) + ta = -aux / (2*a) + tb = -2*c / aux + ta, tb = sorted([ta, tb]) + + if entire_line: + intersect = True + else: + # Checks to see if intersection happens + # within vectors length. + if tb < 0 or ta > 1: + intersect = False + ta = 0 + tb = 0 + else: + intersect = True + # Restrict intersection interval + # between 0 and 1. + ta = max(0, ta) + tb = min(1, tb) + + return ta, tb, intersect + + +def box_intersections(z, d, lb, ub, + entire_line=False): + """Find the intersection between segment (or line) and box constraints. + + Find the intersection between the segment (or line) defined by the + parametric equation ``x(t) = z + t*d`` and the rectangular box + ``lb <= x <= ub``. + + Parameters + ---------- + z : array_like, shape (n,) + Initial point. + d : array_like, shape (n,) + Direction. + lb : array_like, shape (n,) + Lower bounds to each one of the components of ``x``. Used + to delimit the rectangular box. + ub : array_like, shape (n, ) + Upper bounds to each one of the components of ``x``. Used + to delimit the rectangular box. + entire_line : bool, optional + When ``True``, the function returns the intersection between the line + ``x(t) = z + t*d`` (``t`` can assume any value) and the rectangular + box. When ``False``, the function returns the intersection between the segment + ``x(t) = z + t*d``, ``0 <= t <= 1``, and the rectangular box. + + Returns + ------- + ta, tb : float + The line/segment ``x(t) = z + t*d`` is inside the box for + for ``ta <= t <= tb``. + intersect : bool + When ``True``, there is a intersection between the line (or segment) + and the rectangular box. On the other hand, when ``False``, there is no + intersection. + """ + # Make sure it is a numpy array + z = np.asarray(z) + d = np.asarray(d) + lb = np.asarray(lb) + ub = np.asarray(ub) + # Special case when d=0 + if norm(d) == 0: + return 0, 0, False + + # Get values for which d==0 + zero_d = (d == 0) + # If the boundaries are not satisfied for some coordinate + # for which "d" is zero, there is no box-line intersection. + if (z[zero_d] < lb[zero_d]).any() or (z[zero_d] > ub[zero_d]).any(): + intersect = False + return 0, 0, intersect + # Remove values for which d is zero + not_zero_d = np.logical_not(zero_d) + z = z[not_zero_d] + d = d[not_zero_d] + lb = lb[not_zero_d] + ub = ub[not_zero_d] + + # Find a series of intervals (t_lb[i], t_ub[i]). + t_lb = (lb-z) / d + t_ub = (ub-z) / d + # Get the intersection of all those intervals. + ta = max(np.minimum(t_lb, t_ub)) + tb = min(np.maximum(t_lb, t_ub)) + + # Check if intersection is feasible + if ta <= tb: + intersect = True + else: + intersect = False + # Checks to see if intersection happens within vectors length. + if not entire_line: + if tb < 0 or ta > 1: + intersect = False + ta = 0 + tb = 0 + else: + # Restrict intersection interval between 0 and 1. + ta = max(0, ta) + tb = min(1, tb) + + return ta, tb, intersect + + +def box_sphere_intersections(z, d, lb, ub, trust_radius, + entire_line=False, + extra_info=False): + """Find the intersection between segment (or line) and box/sphere constraints. + + Find the intersection between the segment (or line) defined by the + parametric equation ``x(t) = z + t*d``, the rectangular box + ``lb <= x <= ub`` and the ball ``||x|| <= trust_radius``. + + Parameters + ---------- + z : array_like, shape (n,) + Initial point. + d : array_like, shape (n,) + Direction. + lb : array_like, shape (n,) + Lower bounds to each one of the components of ``x``. Used + to delimit the rectangular box. + ub : array_like, shape (n, ) + Upper bounds to each one of the components of ``x``. Used + to delimit the rectangular box. + trust_radius : float + Ball radius. + entire_line : bool, optional + When ``True``, the function returns the intersection between the line + ``x(t) = z + t*d`` (``t`` can assume any value) and the constraints. + When ``False``, the function returns the intersection between the segment + ``x(t) = z + t*d``, ``0 <= t <= 1`` and the constraints. + extra_info : bool, optional + When ``True``, the function returns ``intersect_sphere`` and ``intersect_box``. + + Returns + ------- + ta, tb : float + The line/segment ``x(t) = z + t*d`` is inside the rectangular box and + inside the ball for ``ta <= t <= tb``. + intersect : bool + When ``True``, there is a intersection between the line (or segment) + and both constraints. On the other hand, when ``False``, there is no + intersection. + sphere_info : dict, optional + Dictionary ``{ta, tb, intersect}`` containing the interval ``[ta, tb]`` + for which the line intercepts the ball. And a boolean value indicating + whether the sphere is intersected by the line. + box_info : dict, optional + Dictionary ``{ta, tb, intersect}`` containing the interval ``[ta, tb]`` + for which the line intercepts the box. And a boolean value indicating + whether the box is intersected by the line. + """ + ta_b, tb_b, intersect_b = box_intersections(z, d, lb, ub, + entire_line) + ta_s, tb_s, intersect_s = sphere_intersections(z, d, + trust_radius, + entire_line) + ta = np.maximum(ta_b, ta_s) + tb = np.minimum(tb_b, tb_s) + if intersect_b and intersect_s and ta <= tb: + intersect = True + else: + intersect = False + + if extra_info: + sphere_info = {'ta': ta_s, 'tb': tb_s, 'intersect': intersect_s} + box_info = {'ta': ta_b, 'tb': tb_b, 'intersect': intersect_b} + return ta, tb, intersect, sphere_info, box_info + else: + return ta, tb, intersect + + +def inside_box_boundaries(x, lb, ub): + """Check if lb <= x <= ub.""" + return (lb <= x).all() and (x <= ub).all() + + +def reinforce_box_boundaries(x, lb, ub): + """Return clipped value of x""" + return np.minimum(np.maximum(x, lb), ub) + + +def modified_dogleg(A, Y, b, trust_radius, lb, ub): + """Approximately minimize ``1/2*|| A x + b ||^2`` inside trust-region. + + Approximately solve the problem of minimizing ``1/2*|| A x + b ||^2`` + subject to ``||x|| < Delta`` and ``lb <= x <= ub`` using a modification + of the classical dogleg approach. + + Parameters + ---------- + A : LinearOperator (or sparse matrix or ndarray), shape (m, n) + Matrix ``A`` in the minimization problem. It should have + dimension ``(m, n)`` such that ``m < n``. + Y : LinearOperator (or sparse matrix or ndarray), shape (n, m) + LinearOperator that apply the projection matrix + ``Q = A.T inv(A A.T)`` to the vector. The obtained vector + ``y = Q x`` being the minimum norm solution of ``A y = x``. + b : array_like, shape (m,) + Vector ``b``in the minimization problem. + trust_radius: float + Trust radius to be considered. Delimits a sphere boundary + to the problem. + lb : array_like, shape (n,) + Lower bounds to each one of the components of ``x``. + It is expected that ``lb <= 0``, otherwise the algorithm + may fail. If ``lb[i] = -Inf``, the lower + bound for the ith component is just ignored. + ub : array_like, shape (n, ) + Upper bounds to each one of the components of ``x``. + It is expected that ``ub >= 0``, otherwise the algorithm + may fail. If ``ub[i] = Inf``, the upper bound for the ith + component is just ignored. + + Returns + ------- + x : array_like, shape (n,) + Solution to the problem. + + Notes + ----- + Based on implementations described in pp. 885-886 from [1]_. + + References + ---------- + .. [1] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal. + "An interior point algorithm for large-scale nonlinear + programming." SIAM Journal on Optimization 9.4 (1999): 877-900. + """ + # Compute minimum norm minimizer of 1/2*|| A x + b ||^2. + newton_point = -Y.dot(b) + # Check for interior point + if inside_box_boundaries(newton_point, lb, ub) \ + and norm(newton_point) <= trust_radius: + x = newton_point + return x + + # Compute gradient vector ``g = A.T b`` + g = A.T.dot(b) + # Compute Cauchy point + # `cauchy_point = g.T g / (g.T A.T A g)``. + A_g = A.dot(g) + cauchy_point = -np.dot(g, g) / np.dot(A_g, A_g) * g + # Origin + origin_point = np.zeros_like(cauchy_point) + + # Check the segment between cauchy_point and newton_point + # for a possible solution. + z = cauchy_point + p = newton_point - cauchy_point + _, alpha, intersect = box_sphere_intersections(z, p, lb, ub, + trust_radius) + if intersect: + x1 = z + alpha*p + else: + # Check the segment between the origin and cauchy_point + # for a possible solution. + z = origin_point + p = cauchy_point + _, alpha, _ = box_sphere_intersections(z, p, lb, ub, + trust_radius) + x1 = z + alpha*p + + # Check the segment between origin and newton_point + # for a possible solution. + z = origin_point + p = newton_point + _, alpha, _ = box_sphere_intersections(z, p, lb, ub, + trust_radius) + x2 = z + alpha*p + + # Return the best solution among x1 and x2. + if norm(A.dot(x1) + b) < norm(A.dot(x2) + b): + return x1 + else: + return x2 + + +def projected_cg(H, c, Z, Y, b, trust_radius=np.inf, + lb=None, ub=None, tol=None, + max_iter=None, max_infeasible_iter=None, + return_all=False): + """Solve EQP problem with projected CG method. + + Solve equality-constrained quadratic programming problem + ``min 1/2 x.T H x + x.t c`` subject to ``A x + b = 0`` and, + possibly, to trust region constraints ``||x|| < trust_radius`` + and box constraints ``lb <= x <= ub``. + + Parameters + ---------- + H : LinearOperator (or sparse matrix or ndarray), shape (n, n) + Operator for computing ``H v``. + c : array_like, shape (n,) + Gradient of the quadratic objective function. + Z : LinearOperator (or sparse matrix or ndarray), shape (n, n) + Operator for projecting ``x`` into the null space of A. + Y : LinearOperator, sparse matrix, ndarray, shape (n, m) + Operator that, for a given a vector ``b``, compute smallest + norm solution of ``A x + b = 0``. + b : array_like, shape (m,) + Right-hand side of the constraint equation. + trust_radius : float, optional + Trust radius to be considered. By default, uses ``trust_radius=inf``, + which means no trust radius at all. + lb : array_like, shape (n,), optional + Lower bounds to each one of the components of ``x``. + If ``lb[i] = -Inf`` the lower bound for the i-th + component is just ignored (default). + ub : array_like, shape (n, ), optional + Upper bounds to each one of the components of ``x``. + If ``ub[i] = Inf`` the upper bound for the i-th + component is just ignored (default). + tol : float, optional + Tolerance used to interrupt the algorithm. + max_iter : int, optional + Maximum algorithm iterations. Where ``max_inter <= n-m``. + By default, uses ``max_iter = n-m``. + max_infeasible_iter : int, optional + Maximum infeasible (regarding box constraints) iterations the + algorithm is allowed to take. + By default, uses ``max_infeasible_iter = n-m``. + return_all : bool, optional + When ``true``, return the list of all vectors through the iterations. + + Returns + ------- + x : array_like, shape (n,) + Solution of the EQP problem. + info : Dict + Dictionary containing the following: + + - niter : Number of iterations. + - stop_cond : Reason for algorithm termination: + 1. Iteration limit was reached; + 2. Reached the trust-region boundary; + 3. Negative curvature detected; + 4. Tolerance was satisfied. + - allvecs : List containing all intermediary vectors (optional). + - hits_boundary : True if the proposed step is on the boundary + of the trust region. + + Notes + ----- + Implementation of Algorithm 6.2 on [1]_. + + In the absence of spherical and box constraints, for sufficient + iterations, the method returns a truly optimal result. + In the presence of those constraints, the value returned is only + a inexpensive approximation of the optimal value. + + References + ---------- + .. [1] Gould, Nicholas IM, Mary E. Hribar, and Jorge Nocedal. + "On the solution of equality constrained quadratic + programming problems arising in optimization." + SIAM Journal on Scientific Computing 23.4 (2001): 1376-1395. + """ + CLOSE_TO_ZERO = 1e-25 + + n, = np.shape(c) # Number of parameters + m, = np.shape(b) # Number of constraints + + # Initial Values + x = Y.dot(-b) + r = Z.dot(H.dot(x) + c) + g = Z.dot(r) + p = -g + + # Store ``x`` value + if return_all: + allvecs = [x] + # Values for the first iteration + H_p = H.dot(p) + rt_g = norm(g)**2 # g.T g = r.T Z g = r.T g (ref [1]_ p.1389) + + # If x > trust-region the problem does not have a solution. + tr_distance = trust_radius - norm(x) + if tr_distance < 0: + raise ValueError("Trust region problem does not have a solution.") + # If x == trust_radius, then x is the solution + # to the optimization problem, since x is the + # minimum norm solution to Ax=b. + elif tr_distance < CLOSE_TO_ZERO: + info = {'niter': 0, 'stop_cond': 2, 'hits_boundary': True} + if return_all: + allvecs.append(x) + info['allvecs'] = allvecs + return x, info + + # Set default tolerance + if tol is None: + tol = max(min(0.01 * np.sqrt(rt_g), 0.1 * rt_g), CLOSE_TO_ZERO) + # Set default lower and upper bounds + if lb is None: + lb = np.full(n, -np.inf) + if ub is None: + ub = np.full(n, np.inf) + # Set maximum iterations + if max_iter is None: + max_iter = n-m + max_iter = min(max_iter, n-m) + # Set maximum infeasible iterations + if max_infeasible_iter is None: + max_infeasible_iter = n-m + + hits_boundary = False + stop_cond = 1 + counter = 0 + last_feasible_x = np.zeros_like(x) + k = 0 + for i in range(max_iter): + # Stop criteria - Tolerance : r.T g < tol + if rt_g < tol: + stop_cond = 4 + break + k += 1 + # Compute curvature + pt_H_p = H_p.dot(p) + # Stop criteria - Negative curvature + if pt_H_p <= 0: + if np.isinf(trust_radius): + raise ValueError("Negative curvature not allowed " + "for unrestricted problems.") + else: + # Find intersection with constraints + _, alpha, intersect = box_sphere_intersections( + x, p, lb, ub, trust_radius, entire_line=True) + # Update solution + if intersect: + x = x + alpha*p + # Reinforce variables are inside box constraints. + # This is only necessary because of roundoff errors. + x = reinforce_box_boundaries(x, lb, ub) + # Attribute information + stop_cond = 3 + hits_boundary = True + break + + # Get next step + alpha = rt_g / pt_H_p + x_next = x + alpha*p + + # Stop criteria - Hits boundary + if np.linalg.norm(x_next) >= trust_radius: + # Find intersection with box constraints + _, theta, intersect = box_sphere_intersections(x, alpha*p, lb, ub, + trust_radius) + # Update solution + if intersect: + x = x + theta*alpha*p + # Reinforce variables are inside box constraints. + # This is only necessary because of roundoff errors. + x = reinforce_box_boundaries(x, lb, ub) + # Attribute information + stop_cond = 2 + hits_boundary = True + break + + # Check if ``x`` is inside the box and start counter if it is not. + if inside_box_boundaries(x_next, lb, ub): + counter = 0 + else: + counter += 1 + # Whenever outside box constraints keep looking for intersections. + if counter > 0: + _, theta, intersect = box_sphere_intersections(x, alpha*p, lb, ub, + trust_radius) + if intersect: + last_feasible_x = x + theta*alpha*p + # Reinforce variables are inside box constraints. + # This is only necessary because of roundoff errors. + last_feasible_x = reinforce_box_boundaries(last_feasible_x, + lb, ub) + counter = 0 + # Stop after too many infeasible (regarding box constraints) iteration. + if counter > max_infeasible_iter: + break + # Store ``x_next`` value + if return_all: + allvecs.append(x_next) + + # Update residual + r_next = r + alpha*H_p + # Project residual g+ = Z r+ + g_next = Z.dot(r_next) + # Compute conjugate direction step d + rt_g_next = norm(g_next)**2 # g.T g = r.T g (ref [1]_ p.1389) + beta = rt_g_next / rt_g + p = - g_next + beta*p + # Prepare for next iteration + x = x_next + g = g_next + r = g_next + rt_g = norm(g)**2 # g.T g = r.T Z g = r.T g (ref [1]_ p.1389) + H_p = H.dot(p) + + if not inside_box_boundaries(x, lb, ub): + x = last_feasible_x + hits_boundary = True + info = {'niter': k, 'stop_cond': stop_cond, + 'hits_boundary': hits_boundary} + if return_all: + info['allvecs'] = allvecs + return x, info diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/report.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/report.py new file mode 100644 index 0000000000000000000000000000000000000000..5c3b2fb4ef5c90da78ae3f181159140e87393dcf --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/report.py @@ -0,0 +1,51 @@ +"""Progress report printers.""" + +from __future__ import annotations + +class ReportBase: + COLUMN_NAMES: list[str] = NotImplemented + COLUMN_WIDTHS: list[int] = NotImplemented + ITERATION_FORMATS: list[str] = NotImplemented + + @classmethod + def print_header(cls): + fmt = ("|" + + "|".join([f"{{:^{x}}}" for x in cls.COLUMN_WIDTHS]) + + "|") + separators = ['-' * x for x in cls.COLUMN_WIDTHS] + print(fmt.format(*cls.COLUMN_NAMES)) + print(fmt.format(*separators)) + + @classmethod + def print_iteration(cls, *args): + iteration_format = [f"{{:{x}}}" for x in cls.ITERATION_FORMATS] + fmt = "|" + "|".join(iteration_format) + "|" + print(fmt.format(*args)) + + @classmethod + def print_footer(cls): + print() + + +class BasicReport(ReportBase): + COLUMN_NAMES = ["niter", "f evals", "CG iter", "obj func", "tr radius", + "opt", "c viol"] + COLUMN_WIDTHS = [7, 7, 7, 13, 10, 10, 10] + ITERATION_FORMATS = ["^7", "^7", "^7", "^+13.4e", + "^10.2e", "^10.2e", "^10.2e"] + + +class SQPReport(ReportBase): + COLUMN_NAMES = ["niter", "f evals", "CG iter", "obj func", "tr radius", + "opt", "c viol", "penalty", "CG stop"] + COLUMN_WIDTHS = [7, 7, 7, 13, 10, 10, 10, 10, 7] + ITERATION_FORMATS = ["^7", "^7", "^7", "^+13.4e", "^10.2e", "^10.2e", + "^10.2e", "^10.2e", "^7"] + + +class IPReport(ReportBase): + COLUMN_NAMES = ["niter", "f evals", "CG iter", "obj func", "tr radius", + "opt", "c viol", "penalty", "barrier param", "CG stop"] + COLUMN_WIDTHS = [7, 7, 7, 13, 10, 10, 10, 10, 13, 7] + ITERATION_FORMATS = ["^7", "^7", "^7", "^+13.4e", "^10.2e", "^10.2e", + "^10.2e", "^10.2e", "^13.2e", "^7"] diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2da108691a4fcbc8cacb7878fcf9f28f92d0136c Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_canonical_constraint.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_canonical_constraint.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30d60497e61ce3193dcd2a4117495cc94ce2fd8b Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_canonical_constraint.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_projections.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_projections.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be421468f3819ea7c8de33de38fef8f084505ea8 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_projections.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_qp_subproblem.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_qp_subproblem.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3c51e2b0a7f795d310fee69524082fc5eb2033e Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_qp_subproblem.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_report.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_report.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4f6d2889e97f9cd2822d7146a9ec471aef23e62 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/__pycache__/test_report.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_canonical_constraint.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_canonical_constraint.py new file mode 100644 index 0000000000000000000000000000000000000000..452b327d02da3b3bd3fab9592bdef4d56d6aff57 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_canonical_constraint.py @@ -0,0 +1,296 @@ +import numpy as np +from numpy.testing import assert_array_equal, assert_equal +from scipy.optimize._constraints import (NonlinearConstraint, Bounds, + PreparedConstraint) +from scipy.optimize._trustregion_constr.canonical_constraint \ + import CanonicalConstraint, initial_constraints_as_canonical + + +def create_quadratic_function(n, m, rng): + a = rng.rand(m) + A = rng.rand(m, n) + H = rng.rand(m, n, n) + HT = np.transpose(H, (1, 2, 0)) + + def fun(x): + return a + A.dot(x) + 0.5 * H.dot(x).dot(x) + + def jac(x): + return A + H.dot(x) + + def hess(x, v): + return HT.dot(v) + + return fun, jac, hess + + +def test_bounds_cases(): + # Test 1: no constraints. + user_constraint = Bounds(-np.inf, np.inf) + x0 = np.array([-1, 2]) + prepared_constraint = PreparedConstraint(user_constraint, x0, False) + c = CanonicalConstraint.from_PreparedConstraint(prepared_constraint) + + assert_equal(c.n_eq, 0) + assert_equal(c.n_ineq, 0) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, []) + assert_array_equal(c_ineq, []) + + J_eq, J_ineq = c.jac(x0) + assert_array_equal(J_eq, np.empty((0, 2))) + assert_array_equal(J_ineq, np.empty((0, 2))) + + assert_array_equal(c.keep_feasible, []) + + # Test 2: infinite lower bound. + user_constraint = Bounds(-np.inf, [0, np.inf, 1], [False, True, True]) + x0 = np.array([-1, -2, -3], dtype=float) + prepared_constraint = PreparedConstraint(user_constraint, x0, False) + c = CanonicalConstraint.from_PreparedConstraint(prepared_constraint) + + assert_equal(c.n_eq, 0) + assert_equal(c.n_ineq, 2) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, []) + assert_array_equal(c_ineq, [-1, -4]) + + J_eq, J_ineq = c.jac(x0) + assert_array_equal(J_eq, np.empty((0, 3))) + assert_array_equal(J_ineq, np.array([[1, 0, 0], [0, 0, 1]])) + + assert_array_equal(c.keep_feasible, [False, True]) + + # Test 3: infinite upper bound. + user_constraint = Bounds([0, 1, -np.inf], np.inf, [True, False, True]) + x0 = np.array([1, 2, 3], dtype=float) + prepared_constraint = PreparedConstraint(user_constraint, x0, False) + c = CanonicalConstraint.from_PreparedConstraint(prepared_constraint) + + assert_equal(c.n_eq, 0) + assert_equal(c.n_ineq, 2) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, []) + assert_array_equal(c_ineq, [-1, -1]) + + J_eq, J_ineq = c.jac(x0) + assert_array_equal(J_eq, np.empty((0, 3))) + assert_array_equal(J_ineq, np.array([[-1, 0, 0], [0, -1, 0]])) + + assert_array_equal(c.keep_feasible, [True, False]) + + # Test 4: interval constraint. + user_constraint = Bounds([-1, -np.inf, 2, 3], [1, np.inf, 10, 3], + [False, True, True, True]) + x0 = np.array([0, 10, 8, 5]) + prepared_constraint = PreparedConstraint(user_constraint, x0, False) + c = CanonicalConstraint.from_PreparedConstraint(prepared_constraint) + + assert_equal(c.n_eq, 1) + assert_equal(c.n_ineq, 4) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, [2]) + assert_array_equal(c_ineq, [-1, -2, -1, -6]) + + J_eq, J_ineq = c.jac(x0) + assert_array_equal(J_eq, [[0, 0, 0, 1]]) + assert_array_equal(J_ineq, [[1, 0, 0, 0], + [0, 0, 1, 0], + [-1, 0, 0, 0], + [0, 0, -1, 0]]) + + assert_array_equal(c.keep_feasible, [False, True, False, True]) + + +def test_nonlinear_constraint(): + n = 3 + m = 5 + rng = np.random.RandomState(0) + x0 = rng.rand(n) + + fun, jac, hess = create_quadratic_function(n, m, rng) + f = fun(x0) + J = jac(x0) + + lb = [-10, 3, -np.inf, -np.inf, -5] + ub = [10, 3, np.inf, 3, np.inf] + user_constraint = NonlinearConstraint( + fun, lb, ub, jac, hess, [True, False, False, True, False]) + + for sparse_jacobian in [False, True]: + prepared_constraint = PreparedConstraint(user_constraint, x0, + sparse_jacobian) + c = CanonicalConstraint.from_PreparedConstraint(prepared_constraint) + + assert_array_equal(c.n_eq, 1) + assert_array_equal(c.n_ineq, 4) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, [f[1] - lb[1]]) + assert_array_equal(c_ineq, [f[3] - ub[3], lb[4] - f[4], + f[0] - ub[0], lb[0] - f[0]]) + + J_eq, J_ineq = c.jac(x0) + if sparse_jacobian: + J_eq = J_eq.toarray() + J_ineq = J_ineq.toarray() + + assert_array_equal(J_eq, J[1, None]) + assert_array_equal(J_ineq, np.vstack((J[3], -J[4], J[0], -J[0]))) + + v_eq = rng.rand(c.n_eq) + v_ineq = rng.rand(c.n_ineq) + v = np.zeros(m) + v[1] = v_eq[0] + v[3] = v_ineq[0] + v[4] = -v_ineq[1] + v[0] = v_ineq[2] - v_ineq[3] + assert_array_equal(c.hess(x0, v_eq, v_ineq), hess(x0, v)) + + assert_array_equal(c.keep_feasible, [True, False, True, True]) + + +def test_concatenation(): + rng = np.random.RandomState(0) + n = 4 + x0 = rng.rand(n) + + f1 = x0 + J1 = np.eye(n) + lb1 = [-1, -np.inf, -2, 3] + ub1 = [1, np.inf, np.inf, 3] + bounds = Bounds(lb1, ub1, [False, False, True, False]) + + fun, jac, hess = create_quadratic_function(n, 5, rng) + f2 = fun(x0) + J2 = jac(x0) + lb2 = [-10, 3, -np.inf, -np.inf, -5] + ub2 = [10, 3, np.inf, 5, np.inf] + nonlinear = NonlinearConstraint( + fun, lb2, ub2, jac, hess, [True, False, False, True, False]) + + for sparse_jacobian in [False, True]: + bounds_prepared = PreparedConstraint(bounds, x0, sparse_jacobian) + nonlinear_prepared = PreparedConstraint(nonlinear, x0, sparse_jacobian) + + c1 = CanonicalConstraint.from_PreparedConstraint(bounds_prepared) + c2 = CanonicalConstraint.from_PreparedConstraint(nonlinear_prepared) + c = CanonicalConstraint.concatenate([c1, c2], sparse_jacobian) + + assert_equal(c.n_eq, 2) + assert_equal(c.n_ineq, 7) + + c_eq, c_ineq = c.fun(x0) + assert_array_equal(c_eq, [f1[3] - lb1[3], f2[1] - lb2[1]]) + assert_array_equal(c_ineq, [lb1[2] - f1[2], f1[0] - ub1[0], + lb1[0] - f1[0], f2[3] - ub2[3], + lb2[4] - f2[4], f2[0] - ub2[0], + lb2[0] - f2[0]]) + + J_eq, J_ineq = c.jac(x0) + if sparse_jacobian: + J_eq = J_eq.toarray() + J_ineq = J_ineq.toarray() + + assert_array_equal(J_eq, np.vstack((J1[3], J2[1]))) + assert_array_equal(J_ineq, np.vstack((-J1[2], J1[0], -J1[0], J2[3], + -J2[4], J2[0], -J2[0]))) + + v_eq = rng.rand(c.n_eq) + v_ineq = rng.rand(c.n_ineq) + v = np.zeros(5) + v[1] = v_eq[1] + v[3] = v_ineq[3] + v[4] = -v_ineq[4] + v[0] = v_ineq[5] - v_ineq[6] + H = c.hess(x0, v_eq, v_ineq).dot(np.eye(n)) + assert_array_equal(H, hess(x0, v)) + + assert_array_equal(c.keep_feasible, + [True, False, False, True, False, True, True]) + + +def test_empty(): + x = np.array([1, 2, 3]) + c = CanonicalConstraint.empty(3) + assert_equal(c.n_eq, 0) + assert_equal(c.n_ineq, 0) + + c_eq, c_ineq = c.fun(x) + assert_array_equal(c_eq, []) + assert_array_equal(c_ineq, []) + + J_eq, J_ineq = c.jac(x) + assert_array_equal(J_eq, np.empty((0, 3))) + assert_array_equal(J_ineq, np.empty((0, 3))) + + H = c.hess(x, None, None).toarray() + assert_array_equal(H, np.zeros((3, 3))) + + +def test_initial_constraints_as_canonical(): + # rng is only used to generate the coefficients of the quadratic + # function that is used by the nonlinear constraint. + rng = np.random.RandomState(0) + + x0 = np.array([0.5, 0.4, 0.3, 0.2]) + n = len(x0) + + lb1 = [-1, -np.inf, -2, 3] + ub1 = [1, np.inf, np.inf, 3] + bounds = Bounds(lb1, ub1, [False, False, True, False]) + + fun, jac, hess = create_quadratic_function(n, 5, rng) + lb2 = [-10, 3, -np.inf, -np.inf, -5] + ub2 = [10, 3, np.inf, 5, np.inf] + nonlinear = NonlinearConstraint( + fun, lb2, ub2, jac, hess, [True, False, False, True, False]) + + for sparse_jacobian in [False, True]: + bounds_prepared = PreparedConstraint(bounds, x0, sparse_jacobian) + nonlinear_prepared = PreparedConstraint(nonlinear, x0, sparse_jacobian) + + f1 = bounds_prepared.fun.f + J1 = bounds_prepared.fun.J + f2 = nonlinear_prepared.fun.f + J2 = nonlinear_prepared.fun.J + + c_eq, c_ineq, J_eq, J_ineq = initial_constraints_as_canonical( + n, [bounds_prepared, nonlinear_prepared], sparse_jacobian) + + assert_array_equal(c_eq, [f1[3] - lb1[3], f2[1] - lb2[1]]) + assert_array_equal(c_ineq, [lb1[2] - f1[2], f1[0] - ub1[0], + lb1[0] - f1[0], f2[3] - ub2[3], + lb2[4] - f2[4], f2[0] - ub2[0], + lb2[0] - f2[0]]) + + if sparse_jacobian: + J1 = J1.toarray() + J2 = J2.toarray() + J_eq = J_eq.toarray() + J_ineq = J_ineq.toarray() + + assert_array_equal(J_eq, np.vstack((J1[3], J2[1]))) + assert_array_equal(J_ineq, np.vstack((-J1[2], J1[0], -J1[0], J2[3], + -J2[4], J2[0], -J2[0]))) + + +def test_initial_constraints_as_canonical_empty(): + n = 3 + for sparse_jacobian in [False, True]: + c_eq, c_ineq, J_eq, J_ineq = initial_constraints_as_canonical( + n, [], sparse_jacobian) + + assert_array_equal(c_eq, []) + assert_array_equal(c_ineq, []) + + if sparse_jacobian: + J_eq = J_eq.toarray() + J_ineq = J_ineq.toarray() + + assert_array_equal(J_eq, np.empty((0, n))) + assert_array_equal(J_ineq, np.empty((0, n))) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_projections.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_projections.py new file mode 100644 index 0000000000000000000000000000000000000000..6ff3c39d649d0ac663d9b71bb906f1daac021118 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_projections.py @@ -0,0 +1,214 @@ +import numpy as np +import scipy.linalg +from scipy.sparse import csc_matrix +from scipy.optimize._trustregion_constr.projections \ + import projections, orthogonality +from numpy.testing import (TestCase, assert_array_almost_equal, + assert_equal, assert_allclose) + +try: + from sksparse.cholmod import cholesky_AAt # noqa: F401 + sksparse_available = True + available_sparse_methods = ("NormalEquation", "AugmentedSystem") +except ImportError: + sksparse_available = False + available_sparse_methods = ("AugmentedSystem",) +available_dense_methods = ('QRFactorization', 'SVDFactorization') + + +class TestProjections(TestCase): + + def test_nullspace_and_least_squares_sparse(self): + A_dense = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + At_dense = A_dense.T + A = csc_matrix(A_dense) + test_points = ([1, 2, 3, 4, 5, 6, 7, 8], + [1, 10, 3, 0, 1, 6, 7, 8], + [1.12, 10, 0, 0, 100000, 6, 0.7, 8]) + + for method in available_sparse_methods: + Z, LS, _ = projections(A, method) + for z in test_points: + # Test if x is in the null_space + x = Z.matvec(z) + assert_array_almost_equal(A.dot(x), 0) + # Test orthogonality + assert_array_almost_equal(orthogonality(A, x), 0) + # Test if x is the least square solution + x = LS.matvec(z) + x2 = scipy.linalg.lstsq(At_dense, z)[0] + assert_array_almost_equal(x, x2) + + def test_iterative_refinements_sparse(self): + A_dense = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + A = csc_matrix(A_dense) + test_points = ([1, 2, 3, 4, 5, 6, 7, 8], + [1, 10, 3, 0, 1, 6, 7, 8], + [1.12, 10, 0, 0, 100000, 6, 0.7, 8], + [1, 0, 0, 0, 0, 1, 2, 3+1e-10]) + + for method in available_sparse_methods: + Z, LS, _ = projections(A, method, orth_tol=1e-18, max_refin=100) + for z in test_points: + # Test if x is in the null_space + x = Z.matvec(z) + atol = 1e-13 * abs(x).max() + assert_allclose(A.dot(x), 0, atol=atol) + # Test orthogonality + assert_allclose(orthogonality(A, x), 0, atol=1e-13) + + def test_rowspace_sparse(self): + A_dense = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + A = csc_matrix(A_dense) + test_points = ([1, 2, 3], + [1, 10, 3], + [1.12, 10, 0]) + + for method in available_sparse_methods: + _, _, Y = projections(A, method) + for z in test_points: + # Test if x is solution of A x = z + x = Y.matvec(z) + assert_array_almost_equal(A.dot(x), z) + # Test if x is in the return row space of A + A_ext = np.vstack((A_dense, x)) + assert_equal(np.linalg.matrix_rank(A_dense), + np.linalg.matrix_rank(A_ext)) + + def test_nullspace_and_least_squares_dense(self): + A = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + At = A.T + test_points = ([1, 2, 3, 4, 5, 6, 7, 8], + [1, 10, 3, 0, 1, 6, 7, 8], + [1.12, 10, 0, 0, 100000, 6, 0.7, 8]) + + for method in available_dense_methods: + Z, LS, _ = projections(A, method) + for z in test_points: + # Test if x is in the null_space + x = Z.matvec(z) + assert_array_almost_equal(A.dot(x), 0) + # Test orthogonality + assert_array_almost_equal(orthogonality(A, x), 0) + # Test if x is the least square solution + x = LS.matvec(z) + x2 = scipy.linalg.lstsq(At, z)[0] + assert_array_almost_equal(x, x2) + + def test_compare_dense_and_sparse(self): + D = np.diag(range(1, 101)) + A = np.hstack([D, D, D, D]) + A_sparse = csc_matrix(A) + np.random.seed(0) + + Z, LS, Y = projections(A) + Z_sparse, LS_sparse, Y_sparse = projections(A_sparse) + for k in range(20): + z = np.random.normal(size=(400,)) + assert_array_almost_equal(Z.dot(z), Z_sparse.dot(z)) + assert_array_almost_equal(LS.dot(z), LS_sparse.dot(z)) + x = np.random.normal(size=(100,)) + assert_array_almost_equal(Y.dot(x), Y_sparse.dot(x)) + + def test_compare_dense_and_sparse2(self): + D1 = np.diag([-1.7, 1, 0.5]) + D2 = np.diag([1, -0.6, -0.3]) + D3 = np.diag([-0.3, -1.5, 2]) + A = np.hstack([D1, D2, D3]) + A_sparse = csc_matrix(A) + np.random.seed(0) + + Z, LS, Y = projections(A) + Z_sparse, LS_sparse, Y_sparse = projections(A_sparse) + for k in range(1): + z = np.random.normal(size=(9,)) + assert_array_almost_equal(Z.dot(z), Z_sparse.dot(z)) + assert_array_almost_equal(LS.dot(z), LS_sparse.dot(z)) + x = np.random.normal(size=(3,)) + assert_array_almost_equal(Y.dot(x), Y_sparse.dot(x)) + + def test_iterative_refinements_dense(self): + A = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + test_points = ([1, 2, 3, 4, 5, 6, 7, 8], + [1, 10, 3, 0, 1, 6, 7, 8], + [1, 0, 0, 0, 0, 1, 2, 3+1e-10]) + + for method in available_dense_methods: + Z, LS, _ = projections(A, method, orth_tol=1e-18, max_refin=10) + for z in test_points: + # Test if x is in the null_space + x = Z.matvec(z) + assert_allclose(A.dot(x), 0, rtol=0, atol=2.5e-14) + # Test orthogonality + assert_allclose(orthogonality(A, x), 0, rtol=0, atol=5e-16) + + def test_rowspace_dense(self): + A = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + test_points = ([1, 2, 3], + [1, 10, 3], + [1.12, 10, 0]) + + for method in available_dense_methods: + _, _, Y = projections(A, method) + for z in test_points: + # Test if x is solution of A x = z + x = Y.matvec(z) + assert_array_almost_equal(A.dot(x), z) + # Test if x is in the return row space of A + A_ext = np.vstack((A, x)) + assert_equal(np.linalg.matrix_rank(A), + np.linalg.matrix_rank(A_ext)) + + +class TestOrthogonality(TestCase): + + def test_dense_matrix(self): + A = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + test_vectors = ([-1.98931144, -1.56363389, + -0.84115584, 2.2864762, + 5.599141, 0.09286976, + 1.37040802, -0.28145812], + [697.92794044, -4091.65114008, + -3327.42316335, 836.86906951, + 99434.98929065, -1285.37653682, + -4109.21503806, 2935.29289083]) + test_expected_orth = (0, 0) + + for i in range(len(test_vectors)): + x = test_vectors[i] + orth = test_expected_orth[i] + assert_array_almost_equal(orthogonality(A, x), orth) + + def test_sparse_matrix(self): + A = np.array([[1, 2, 3, 4, 0, 5, 0, 7], + [0, 8, 7, 0, 1, 5, 9, 0], + [1, 0, 0, 0, 0, 1, 2, 3]]) + A = csc_matrix(A) + test_vectors = ([-1.98931144, -1.56363389, + -0.84115584, 2.2864762, + 5.599141, 0.09286976, + 1.37040802, -0.28145812], + [697.92794044, -4091.65114008, + -3327.42316335, 836.86906951, + 99434.98929065, -1285.37653682, + -4109.21503806, 2935.29289083]) + test_expected_orth = (0, 0) + + for i in range(len(test_vectors)): + x = test_vectors[i] + orth = test_expected_orth[i] + assert_array_almost_equal(orthogonality(A, x), orth) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py new file mode 100644 index 0000000000000000000000000000000000000000..e0235caace6c19563efc31fdf4b8e41d9d81819b --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_qp_subproblem.py @@ -0,0 +1,645 @@ +import numpy as np +from scipy.sparse import csc_matrix +from scipy.optimize._trustregion_constr.qp_subproblem \ + import (eqp_kktfact, + projected_cg, + box_intersections, + sphere_intersections, + box_sphere_intersections, + modified_dogleg) +from scipy.optimize._trustregion_constr.projections \ + import projections +from numpy.testing import TestCase, assert_array_almost_equal, assert_equal +import pytest + + +class TestEQPDirectFactorization(TestCase): + + # From Example 16.2 Nocedal/Wright "Numerical + # Optimization" p.452. + def test_nocedal_example(self): + H = csc_matrix([[6, 2, 1], + [2, 5, 2], + [1, 2, 4]]) + A = csc_matrix([[1, 0, 1], + [0, 1, 1]]) + c = np.array([-8, -3, -3]) + b = -np.array([3, 0]) + x, lagrange_multipliers = eqp_kktfact(H, c, A, b) + assert_array_almost_equal(x, [2, -1, 1]) + assert_array_almost_equal(lagrange_multipliers, [3, -2]) + + +class TestSphericalBoundariesIntersections(TestCase): + + def test_2d_sphere_constraints(self): + # Interior inicial point + ta, tb, intersect = sphere_intersections([0, 0], + [1, 0], 0.5) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # No intersection between line and circle + ta, tb, intersect = sphere_intersections([2, 0], + [0, 1], 1) + assert_equal(intersect, False) + + # Outside initial point pointing toward outside the circle + ta, tb, intersect = sphere_intersections([2, 0], + [1, 0], 1) + assert_equal(intersect, False) + + # Outside initial point pointing toward inside the circle + ta, tb, intersect = sphere_intersections([2, 0], + [-1, 0], 1.5) + assert_array_almost_equal([ta, tb], [0.5, 1]) + assert_equal(intersect, True) + + # Initial point on the boundary + ta, tb, intersect = sphere_intersections([2, 0], + [1, 0], 2) + assert_array_almost_equal([ta, tb], [0, 0]) + assert_equal(intersect, True) + + def test_2d_sphere_constraints_line_intersections(self): + # Interior initial point + ta, tb, intersect = sphere_intersections([0, 0], + [1, 0], 0.5, + entire_line=True) + assert_array_almost_equal([ta, tb], [-0.5, 0.5]) + assert_equal(intersect, True) + + # No intersection between line and circle + ta, tb, intersect = sphere_intersections([2, 0], + [0, 1], 1, + entire_line=True) + assert_equal(intersect, False) + + # Outside initial point pointing toward outside the circle + ta, tb, intersect = sphere_intersections([2, 0], + [1, 0], 1, + entire_line=True) + assert_array_almost_equal([ta, tb], [-3, -1]) + assert_equal(intersect, True) + + # Outside initial point pointing toward inside the circle + ta, tb, intersect = sphere_intersections([2, 0], + [-1, 0], 1.5, + entire_line=True) + assert_array_almost_equal([ta, tb], [0.5, 3.5]) + assert_equal(intersect, True) + + # Initial point on the boundary + ta, tb, intersect = sphere_intersections([2, 0], + [1, 0], 2, + entire_line=True) + assert_array_almost_equal([ta, tb], [-4, 0]) + assert_equal(intersect, True) + + +class TestBoxBoundariesIntersections(TestCase): + + def test_2d_box_constraints(self): + # Box constraint in the direction of vector d + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [1, 1], [3, 3]) + assert_array_almost_equal([ta, tb], [0.5, 1]) + assert_equal(intersect, True) + + # Negative direction + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [1, -3], [3, -1]) + assert_equal(intersect, False) + + # Some constraints are absent (set to +/- inf) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-np.inf, 1], + [np.inf, np.inf]) + assert_array_almost_equal([ta, tb], [0.5, 1]) + assert_equal(intersect, True) + + # Intersect on the face of the box + ta, tb, intersect = box_intersections([1, 0], [0, 1], + [1, 1], [3, 3]) + assert_array_almost_equal([ta, tb], [1, 1]) + assert_equal(intersect, True) + + # Interior initial point + ta, tb, intersect = box_intersections([0, 0], [4, 4], + [-2, -3], [3, 2]) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # No intersection between line and box constraints + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, -3], [-1, -1]) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, 3], [-1, 1]) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, -np.inf], + [-1, np.inf]) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([0, 0], [1, 100], + [1, 1], [3, 3]) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([0.99, 0], [0, 2], + [1, 1], [3, 3]) + assert_equal(intersect, False) + + # Initial point on the boundary + ta, tb, intersect = box_intersections([2, 2], [0, 1], + [-2, -2], [2, 2]) + assert_array_almost_equal([ta, tb], [0, 0]) + assert_equal(intersect, True) + + def test_2d_box_constraints_entire_line(self): + # Box constraint in the direction of vector d + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [1, 1], [3, 3], + entire_line=True) + assert_array_almost_equal([ta, tb], [0.5, 1.5]) + assert_equal(intersect, True) + + # Negative direction + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [1, -3], [3, -1], + entire_line=True) + assert_array_almost_equal([ta, tb], [-1.5, -0.5]) + assert_equal(intersect, True) + + # Some constraints are absent (set to +/- inf) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-np.inf, 1], + [np.inf, np.inf], + entire_line=True) + assert_array_almost_equal([ta, tb], [0.5, np.inf]) + assert_equal(intersect, True) + + # Intersect on the face of the box + ta, tb, intersect = box_intersections([1, 0], [0, 1], + [1, 1], [3, 3], + entire_line=True) + assert_array_almost_equal([ta, tb], [1, 3]) + assert_equal(intersect, True) + + # Interior initial pointoint + ta, tb, intersect = box_intersections([0, 0], [4, 4], + [-2, -3], [3, 2], + entire_line=True) + assert_array_almost_equal([ta, tb], [-0.5, 0.5]) + assert_equal(intersect, True) + + # No intersection between line and box constraints + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, -3], [-1, -1], + entire_line=True) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, 3], [-1, 1], + entire_line=True) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([2, 0], [0, 2], + [-3, -np.inf], + [-1, np.inf], + entire_line=True) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([0, 0], [1, 100], + [1, 1], [3, 3], + entire_line=True) + assert_equal(intersect, False) + ta, tb, intersect = box_intersections([0.99, 0], [0, 2], + [1, 1], [3, 3], + entire_line=True) + assert_equal(intersect, False) + + # Initial point on the boundary + ta, tb, intersect = box_intersections([2, 2], [0, 1], + [-2, -2], [2, 2], + entire_line=True) + assert_array_almost_equal([ta, tb], [-4, 0]) + assert_equal(intersect, True) + + def test_3d_box_constraints(self): + # Simple case + ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, 1], + [1, 1, 1], [3, 3, 3]) + assert_array_almost_equal([ta, tb], [1, 1]) + assert_equal(intersect, True) + + # Negative direction + ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, -1], + [1, 1, 1], [3, 3, 3]) + assert_equal(intersect, False) + + # Interior point + ta, tb, intersect = box_intersections([2, 2, 2], [0, -1, 1], + [1, 1, 1], [3, 3, 3]) + assert_array_almost_equal([ta, tb], [0, 1]) + assert_equal(intersect, True) + + def test_3d_box_constraints_entire_line(self): + # Simple case + ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, 1], + [1, 1, 1], [3, 3, 3], + entire_line=True) + assert_array_almost_equal([ta, tb], [1, 3]) + assert_equal(intersect, True) + + # Negative direction + ta, tb, intersect = box_intersections([1, 1, 0], [0, 0, -1], + [1, 1, 1], [3, 3, 3], + entire_line=True) + assert_array_almost_equal([ta, tb], [-3, -1]) + assert_equal(intersect, True) + + # Interior point + ta, tb, intersect = box_intersections([2, 2, 2], [0, -1, 1], + [1, 1, 1], [3, 3, 3], + entire_line=True) + assert_array_almost_equal([ta, tb], [-1, 1]) + assert_equal(intersect, True) + + +class TestBoxSphereBoundariesIntersections(TestCase): + + def test_2d_box_constraints(self): + # Both constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-2, 2], + [-1, -2], [1, 2], 2, + entire_line=False) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # None of the constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-1, 1], + [-1, -3], [1, 3], 10, + entire_line=False) + assert_array_almost_equal([ta, tb], [0, 1]) + assert_equal(intersect, True) + + # Box constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [-1, -3], [1, 3], 10, + entire_line=False) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # Spherical constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [-1, -3], [1, 3], 2, + entire_line=False) + assert_array_almost_equal([ta, tb], [0, 0.25]) + assert_equal(intersect, True) + + # Infeasible problems + ta, tb, intersect = box_sphere_intersections([2, 2], [-4, 4], + [-1, -3], [1, 3], 2, + entire_line=False) + assert_equal(intersect, False) + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [2, 4], [2, 4], 2, + entire_line=False) + assert_equal(intersect, False) + + def test_2d_box_constraints_entire_line(self): + # Both constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-2, 2], + [-1, -2], [1, 2], 2, + entire_line=True) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # None of the constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-1, 1], + [-1, -3], [1, 3], 10, + entire_line=True) + assert_array_almost_equal([ta, tb], [0, 2]) + assert_equal(intersect, True) + + # Box constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [-1, -3], [1, 3], 10, + entire_line=True) + assert_array_almost_equal([ta, tb], [0, 0.5]) + assert_equal(intersect, True) + + # Spherical constraints are active + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [-1, -3], [1, 3], 2, + entire_line=True) + assert_array_almost_equal([ta, tb], [0, 0.25]) + assert_equal(intersect, True) + + # Infeasible problems + ta, tb, intersect = box_sphere_intersections([2, 2], [-4, 4], + [-1, -3], [1, 3], 2, + entire_line=True) + assert_equal(intersect, False) + ta, tb, intersect = box_sphere_intersections([1, 1], [-4, 4], + [2, 4], [2, 4], 2, + entire_line=True) + assert_equal(intersect, False) + + +class TestModifiedDogleg(TestCase): + + def test_cauchypoint_equalsto_newtonpoint(self): + A = np.array([[1, 8]]) + b = np.array([-16]) + _, _, Y = projections(A) + newton_point = np.array([0.24615385, 1.96923077]) + + # Newton point inside boundaries + x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf], [np.inf, np.inf]) + assert_array_almost_equal(x, newton_point) + + # Spherical constraint active + x = modified_dogleg(A, Y, b, 1, [-np.inf, -np.inf], [np.inf, np.inf]) + assert_array_almost_equal(x, newton_point/np.linalg.norm(newton_point)) + + # Box constraints active + x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf], [0.1, np.inf]) + assert_array_almost_equal(x, (newton_point/newton_point[0]) * 0.1) + + def test_3d_example(self): + A = np.array([[1, 8, 1], + [4, 2, 2]]) + b = np.array([-16, 2]) + Z, LS, Y = projections(A) + + newton_point = np.array([-1.37090909, 2.23272727, -0.49090909]) + cauchy_point = np.array([0.11165723, 1.73068711, 0.16748585]) + origin = np.zeros_like(newton_point) + + # newton_point inside boundaries + x = modified_dogleg(A, Y, b, 3, [-np.inf, -np.inf, -np.inf], + [np.inf, np.inf, np.inf]) + assert_array_almost_equal(x, newton_point) + + # line between cauchy_point and newton_point contains best point + # (spherical constraint is active). + x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf, -np.inf], + [np.inf, np.inf, np.inf]) + z = cauchy_point + d = newton_point-cauchy_point + t = ((x-z)/(d)) + assert_array_almost_equal(t, np.full(3, 0.40807330)) + assert_array_almost_equal(np.linalg.norm(x), 2) + + # line between cauchy_point and newton_point contains best point + # (box constraint is active). + x = modified_dogleg(A, Y, b, 5, [-1, -np.inf, -np.inf], + [np.inf, np.inf, np.inf]) + z = cauchy_point + d = newton_point-cauchy_point + t = ((x-z)/(d)) + assert_array_almost_equal(t, np.full(3, 0.7498195)) + assert_array_almost_equal(x[0], -1) + + # line between origin and cauchy_point contains best point + # (spherical constraint is active). + x = modified_dogleg(A, Y, b, 1, [-np.inf, -np.inf, -np.inf], + [np.inf, np.inf, np.inf]) + z = origin + d = cauchy_point + t = ((x-z)/(d)) + assert_array_almost_equal(t, np.full(3, 0.573936265)) + assert_array_almost_equal(np.linalg.norm(x), 1) + + # line between origin and newton_point contains best point + # (box constraint is active). + x = modified_dogleg(A, Y, b, 2, [-np.inf, -np.inf, -np.inf], + [np.inf, 1, np.inf]) + z = origin + d = newton_point + t = ((x-z)/(d)) + assert_array_almost_equal(t, np.full(3, 0.4478827364)) + assert_array_almost_equal(x[1], 1) + + +class TestProjectCG(TestCase): + + # From Example 16.2 Nocedal/Wright "Numerical + # Optimization" p.452. + def test_nocedal_example(self): + H = csc_matrix([[6, 2, 1], + [2, 5, 2], + [1, 2, 4]]) + A = csc_matrix([[1, 0, 1], + [0, 1, 1]]) + c = np.array([-8, -3, -3]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b) + assert_equal(info["stop_cond"], 4) + assert_equal(info["hits_boundary"], False) + assert_array_almost_equal(x, [2, -1, 1]) + + def test_compare_with_direct_fact(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, tol=0) + x_kkt, _ = eqp_kktfact(H, c, A, b) + assert_equal(info["stop_cond"], 1) + assert_equal(info["hits_boundary"], False) + assert_array_almost_equal(x, x_kkt) + + def test_trust_region_infeasible(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + trust_radius = 1 + Z, _, Y = projections(A) + with pytest.raises(ValueError): + projected_cg(H, c, Z, Y, b, trust_radius=trust_radius) + + def test_trust_region_barely_feasible(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + trust_radius = 2.32379000772445021283 + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + trust_radius=trust_radius) + assert_equal(info["stop_cond"], 2) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(np.linalg.norm(x), trust_radius) + assert_array_almost_equal(x, -Y.dot(b)) + + def test_hits_boundary(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + trust_radius = 3 + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + trust_radius=trust_radius) + assert_equal(info["stop_cond"], 2) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(np.linalg.norm(x), trust_radius) + + def test_negative_curvature_unconstrained(self): + H = csc_matrix([[1, 2, 1, 3], + [2, 0, 2, 4], + [1, 2, 0, 2], + [3, 4, 2, 0]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 0, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + with pytest.raises(ValueError): + projected_cg(H, c, Z, Y, b, tol=0) + + def test_negative_curvature(self): + H = csc_matrix([[1, 2, 1, 3], + [2, 0, 2, 4], + [1, 2, 0, 2], + [3, 4, 2, 0]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 0, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + trust_radius = 1000 + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + trust_radius=trust_radius) + assert_equal(info["stop_cond"], 3) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(np.linalg.norm(x), trust_radius) + + # The box constraints are inactive at the solution but + # are active during the iterations. + def test_inactive_box_constraints(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + lb=[0.5, -np.inf, + -np.inf, -np.inf], + return_all=True) + x_kkt, _ = eqp_kktfact(H, c, A, b) + assert_equal(info["stop_cond"], 1) + assert_equal(info["hits_boundary"], False) + assert_array_almost_equal(x, x_kkt) + + # The box constraints active and the termination is + # by maximum iterations (infeasible interaction). + def test_active_box_constraints_maximum_iterations_reached(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + lb=[0.8, -np.inf, + -np.inf, -np.inf], + return_all=True) + assert_equal(info["stop_cond"], 1) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(A.dot(x), -b) + assert_array_almost_equal(x[0], 0.8) + + # The box constraints are active and the termination is + # because it hits boundary (without infeasible interaction). + def test_active_box_constraints_hits_boundaries(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + trust_radius = 3 + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + ub=[np.inf, np.inf, 1.6, np.inf], + trust_radius=trust_radius, + return_all=True) + assert_equal(info["stop_cond"], 2) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(x[2], 1.6) + + # The box constraints are active and the termination is + # because it hits boundary (infeasible interaction). + def test_active_box_constraints_hits_boundaries_infeasible_iter(self): + H = csc_matrix([[6, 2, 1, 3], + [2, 5, 2, 4], + [1, 2, 4, 5], + [3, 4, 5, 7]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 1, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + trust_radius = 4 + Z, _, Y = projections(A) + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + ub=[np.inf, 0.1, np.inf, np.inf], + trust_radius=trust_radius, + return_all=True) + assert_equal(info["stop_cond"], 2) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(x[1], 0.1) + + # The box constraints are active and the termination is + # because it hits boundary (no infeasible interaction). + def test_active_box_constraints_negative_curvature(self): + H = csc_matrix([[1, 2, 1, 3], + [2, 0, 2, 4], + [1, 2, 0, 2], + [3, 4, 2, 0]]) + A = csc_matrix([[1, 0, 1, 0], + [0, 1, 0, 1]]) + c = np.array([-2, -3, -3, 1]) + b = -np.array([3, 0]) + Z, _, Y = projections(A) + trust_radius = 1000 + x, info = projected_cg(H, c, Z, Y, b, + tol=0, + ub=[np.inf, np.inf, 100, np.inf], + trust_radius=trust_radius) + assert_equal(info["stop_cond"], 3) + assert_equal(info["hits_boundary"], True) + assert_array_almost_equal(x[2], 100) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_report.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_report.py new file mode 100644 index 0000000000000000000000000000000000000000..c82796fea723ab043971564306d4b76bdf9f0380 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tests/test_report.py @@ -0,0 +1,34 @@ +import pytest +import numpy as np +from scipy.optimize import minimize, Bounds + +def test_gh10880(): + # checks that verbose reporting works with trust-constr for + # bound-contrained problems + bnds = Bounds(1, 2) + opts = {'maxiter': 1000, 'verbose': 2} + minimize(lambda x: x**2, x0=2., method='trust-constr', + bounds=bnds, options=opts) + + opts = {'maxiter': 1000, 'verbose': 3} + minimize(lambda x: x**2, x0=2., method='trust-constr', + bounds=bnds, options=opts) + +@pytest.mark.xslow +def test_gh12922(): + # checks that verbose reporting works with trust-constr for + # general constraints + def objective(x): + return np.array([(np.sum((x+1)**4))]) + + cons = {'type': 'ineq', 'fun': lambda x: -x[0]**2} + n = 25 + x0 = np.linspace(-5, 5, n) + + opts = {'maxiter': 1000, 'verbose': 2} + minimize(objective, x0=x0, method='trust-constr', + constraints=cons, options=opts) + + opts = {'maxiter': 1000, 'verbose': 3} + minimize(objective, x0=x0, method='trust-constr', + constraints=cons, options=opts) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tr_interior_point.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tr_interior_point.py new file mode 100644 index 0000000000000000000000000000000000000000..121143fad2a8df3a8986beffc5043622d9ace993 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_constr/tr_interior_point.py @@ -0,0 +1,346 @@ +"""Trust-region interior point method. + +References +---------- +.. [1] Byrd, Richard H., Mary E. Hribar, and Jorge Nocedal. + "An interior point algorithm for large-scale nonlinear + programming." SIAM Journal on Optimization 9.4 (1999): 877-900. +.. [2] Byrd, Richard H., Guanghui Liu, and Jorge Nocedal. + "On the local behavior of an interior point method for + nonlinear programming." Numerical analysis 1997 (1997): 37-56. +.. [3] Nocedal, Jorge, and Stephen J. Wright. "Numerical optimization" + Second Edition (2006). +""" + +import scipy.sparse as sps +import numpy as np +from .equality_constrained_sqp import equality_constrained_sqp +from scipy.sparse.linalg import LinearOperator + +__all__ = ['tr_interior_point'] + + +class BarrierSubproblem: + """ + Barrier optimization problem: + minimize fun(x) - barrier_parameter*sum(log(s)) + subject to: constr_eq(x) = 0 + constr_ineq(x) + s = 0 + """ + + def __init__(self, x0, s0, fun, grad, lagr_hess, n_vars, n_ineq, n_eq, + constr, jac, barrier_parameter, tolerance, + enforce_feasibility, global_stop_criteria, + xtol, fun0, grad0, constr_ineq0, jac_ineq0, constr_eq0, + jac_eq0): + # Store parameters + self.n_vars = n_vars + self.x0 = x0 + self.s0 = s0 + self.fun = fun + self.grad = grad + self.lagr_hess = lagr_hess + self.constr = constr + self.jac = jac + self.barrier_parameter = barrier_parameter + self.tolerance = tolerance + self.n_eq = n_eq + self.n_ineq = n_ineq + self.enforce_feasibility = enforce_feasibility + self.global_stop_criteria = global_stop_criteria + self.xtol = xtol + self.fun0 = self._compute_function(fun0, constr_ineq0, s0) + self.grad0 = self._compute_gradient(grad0) + self.constr0 = self._compute_constr(constr_ineq0, constr_eq0, s0) + self.jac0 = self._compute_jacobian(jac_eq0, jac_ineq0, s0) + self.terminate = False + + def update(self, barrier_parameter, tolerance): + self.barrier_parameter = barrier_parameter + self.tolerance = tolerance + + def get_slack(self, z): + return z[self.n_vars:self.n_vars+self.n_ineq] + + def get_variables(self, z): + return z[:self.n_vars] + + def function_and_constraints(self, z): + """Returns barrier function and constraints at given point. + + For z = [x, s], returns barrier function: + function(z) = fun(x) - barrier_parameter*sum(log(s)) + and barrier constraints: + constraints(z) = [ constr_eq(x) ] + [ constr_ineq(x) + s ] + + """ + # Get variables and slack variables + x = self.get_variables(z) + s = self.get_slack(z) + # Compute function and constraints + f = self.fun(x) + c_eq, c_ineq = self.constr(x) + # Return objective function and constraints + return (self._compute_function(f, c_ineq, s), + self._compute_constr(c_ineq, c_eq, s)) + + def _compute_function(self, f, c_ineq, s): + # Use technique from Nocedal and Wright book, ref [3]_, p.576, + # to guarantee constraints from `enforce_feasibility` + # stay feasible along iterations. + s[self.enforce_feasibility] = -c_ineq[self.enforce_feasibility] + log_s = [np.log(s_i) if s_i > 0 else -np.inf for s_i in s] + # Compute barrier objective function + return f - self.barrier_parameter*np.sum(log_s) + + def _compute_constr(self, c_ineq, c_eq, s): + # Compute barrier constraint + return np.hstack((c_eq, + c_ineq + s)) + + def scaling(self, z): + """Returns scaling vector. + Given by: + scaling = [ones(n_vars), s] + """ + s = self.get_slack(z) + diag_elements = np.hstack((np.ones(self.n_vars), s)) + + # Diagonal matrix + def matvec(vec): + return diag_elements*vec + return LinearOperator((self.n_vars+self.n_ineq, + self.n_vars+self.n_ineq), + matvec) + + def gradient_and_jacobian(self, z): + """Returns scaled gradient. + + Return scaled gradient: + gradient = [ grad(x) ] + [ -barrier_parameter*ones(n_ineq) ] + and scaled Jacobian matrix: + jacobian = [ jac_eq(x) 0 ] + [ jac_ineq(x) S ] + Both of them scaled by the previously defined scaling factor. + """ + # Get variables and slack variables + x = self.get_variables(z) + s = self.get_slack(z) + # Compute first derivatives + g = self.grad(x) + J_eq, J_ineq = self.jac(x) + # Return gradient and Jacobian + return (self._compute_gradient(g), + self._compute_jacobian(J_eq, J_ineq, s)) + + def _compute_gradient(self, g): + return np.hstack((g, -self.barrier_parameter*np.ones(self.n_ineq))) + + def _compute_jacobian(self, J_eq, J_ineq, s): + if self.n_ineq == 0: + return J_eq + else: + if sps.issparse(J_eq) or sps.issparse(J_ineq): + # It is expected that J_eq and J_ineq + # are already `csr_matrix` because of + # the way ``BoxConstraint``, ``NonlinearConstraint`` + # and ``LinearConstraint`` are defined. + J_eq = sps.csr_matrix(J_eq) + J_ineq = sps.csr_matrix(J_ineq) + return self._assemble_sparse_jacobian(J_eq, J_ineq, s) + else: + S = np.diag(s) + zeros = np.zeros((self.n_eq, self.n_ineq)) + # Convert to matrix + if sps.issparse(J_ineq): + J_ineq = J_ineq.toarray() + if sps.issparse(J_eq): + J_eq = J_eq.toarray() + # Concatenate matrices + return np.block([[J_eq, zeros], + [J_ineq, S]]) + + def _assemble_sparse_jacobian(self, J_eq, J_ineq, s): + """Assemble sparse Jacobian given its components. + + Given ``J_eq``, ``J_ineq`` and ``s`` returns: + jacobian = [ J_eq, 0 ] + [ J_ineq, diag(s) ] + + It is equivalent to: + sps.bmat([[ J_eq, None ], + [ J_ineq, diag(s) ]], "csr") + but significantly more efficient for this + given structure. + """ + n_vars, n_ineq, n_eq = self.n_vars, self.n_ineq, self.n_eq + J_aux = sps.vstack([J_eq, J_ineq], "csr") + indptr, indices, data = J_aux.indptr, J_aux.indices, J_aux.data + new_indptr = indptr + np.hstack((np.zeros(n_eq, dtype=int), + np.arange(n_ineq+1, dtype=int))) + size = indices.size+n_ineq + new_indices = np.empty(size) + new_data = np.empty(size) + mask = np.full(size, False, bool) + mask[new_indptr[-n_ineq:]-1] = True + new_indices[mask] = n_vars+np.arange(n_ineq) + new_indices[~mask] = indices + new_data[mask] = s + new_data[~mask] = data + J = sps.csr_matrix((new_data, new_indices, new_indptr), + (n_eq + n_ineq, n_vars + n_ineq)) + return J + + def lagrangian_hessian_x(self, z, v): + """Returns Lagrangian Hessian (in relation to `x`) -> Hx""" + x = self.get_variables(z) + # Get lagrange multipliers related to nonlinear equality constraints + v_eq = v[:self.n_eq] + # Get lagrange multipliers related to nonlinear ineq. constraints + v_ineq = v[self.n_eq:self.n_eq+self.n_ineq] + lagr_hess = self.lagr_hess + return lagr_hess(x, v_eq, v_ineq) + + def lagrangian_hessian_s(self, z, v): + """Returns scaled Lagrangian Hessian (in relation to`s`) -> S Hs S""" + s = self.get_slack(z) + # Using the primal formulation: + # S Hs S = diag(s)*diag(barrier_parameter/s**2)*diag(s). + # Reference [1]_ p. 882, formula (3.1) + primal = self.barrier_parameter + # Using the primal-dual formulation + # S Hs S = diag(s)*diag(v/s)*diag(s) + # Reference [1]_ p. 883, formula (3.11) + primal_dual = v[-self.n_ineq:]*s + # Uses the primal-dual formulation for + # positives values of v_ineq, and primal + # formulation for the remaining ones. + return np.where(v[-self.n_ineq:] > 0, primal_dual, primal) + + def lagrangian_hessian(self, z, v): + """Returns scaled Lagrangian Hessian""" + # Compute Hessian in relation to x and s + Hx = self.lagrangian_hessian_x(z, v) + if self.n_ineq > 0: + S_Hs_S = self.lagrangian_hessian_s(z, v) + + # The scaled Lagragian Hessian is: + # [ Hx 0 ] + # [ 0 S Hs S ] + def matvec(vec): + vec_x = self.get_variables(vec) + vec_s = self.get_slack(vec) + if self.n_ineq > 0: + return np.hstack((Hx.dot(vec_x), S_Hs_S*vec_s)) + else: + return Hx.dot(vec_x) + return LinearOperator((self.n_vars+self.n_ineq, + self.n_vars+self.n_ineq), + matvec) + + def stop_criteria(self, state, z, last_iteration_failed, + optimality, constr_violation, + trust_radius, penalty, cg_info): + """Stop criteria to the barrier problem. + The criteria here proposed is similar to formula (2.3) + from [1]_, p.879. + """ + x = self.get_variables(z) + if self.global_stop_criteria(state, x, + last_iteration_failed, + trust_radius, penalty, + cg_info, + self.barrier_parameter, + self.tolerance): + self.terminate = True + return True + else: + g_cond = (optimality < self.tolerance and + constr_violation < self.tolerance) + x_cond = trust_radius < self.xtol + return g_cond or x_cond + + +def tr_interior_point(fun, grad, lagr_hess, n_vars, n_ineq, n_eq, + constr, jac, x0, fun0, grad0, + constr_ineq0, jac_ineq0, constr_eq0, + jac_eq0, stop_criteria, + enforce_feasibility, xtol, state, + initial_barrier_parameter, + initial_tolerance, + initial_penalty, + initial_trust_radius, + factorization_method): + """Trust-region interior points method. + + Solve problem: + minimize fun(x) + subject to: constr_ineq(x) <= 0 + constr_eq(x) = 0 + using trust-region interior point method described in [1]_. + """ + # BOUNDARY_PARAMETER controls the decrease on the slack + # variables. Represents ``tau`` from [1]_ p.885, formula (3.18). + BOUNDARY_PARAMETER = 0.995 + # BARRIER_DECAY_RATIO controls the decay of the barrier parameter + # and of the subproblem toloerance. Represents ``theta`` from [1]_ p.879. + BARRIER_DECAY_RATIO = 0.2 + # TRUST_ENLARGEMENT controls the enlargement on trust radius + # after each iteration + TRUST_ENLARGEMENT = 5 + + # Default enforce_feasibility + if enforce_feasibility is None: + enforce_feasibility = np.zeros(n_ineq, bool) + # Initial Values + barrier_parameter = initial_barrier_parameter + tolerance = initial_tolerance + trust_radius = initial_trust_radius + # Define initial value for the slack variables + s0 = np.maximum(-1.5*constr_ineq0, np.ones(n_ineq)) + # Define barrier subproblem + subprob = BarrierSubproblem( + x0, s0, fun, grad, lagr_hess, n_vars, n_ineq, n_eq, constr, jac, + barrier_parameter, tolerance, enforce_feasibility, + stop_criteria, xtol, fun0, grad0, constr_ineq0, jac_ineq0, + constr_eq0, jac_eq0) + # Define initial parameter for the first iteration. + z = np.hstack((x0, s0)) + fun0_subprob, constr0_subprob = subprob.fun0, subprob.constr0 + grad0_subprob, jac0_subprob = subprob.grad0, subprob.jac0 + # Define trust region bounds + trust_lb = np.hstack((np.full(subprob.n_vars, -np.inf), + np.full(subprob.n_ineq, -BOUNDARY_PARAMETER))) + trust_ub = np.full(subprob.n_vars+subprob.n_ineq, np.inf) + + # Solves a sequence of barrier problems + while True: + # Solve SQP subproblem + z, state = equality_constrained_sqp( + subprob.function_and_constraints, + subprob.gradient_and_jacobian, + subprob.lagrangian_hessian, + z, fun0_subprob, grad0_subprob, + constr0_subprob, jac0_subprob, subprob.stop_criteria, + state, initial_penalty, trust_radius, + factorization_method, trust_lb, trust_ub, subprob.scaling) + if subprob.terminate: + break + # Update parameters + trust_radius = max(initial_trust_radius, + TRUST_ENLARGEMENT*state.tr_radius) + # TODO: Use more advanced strategies from [2]_ + # to update this parameters. + barrier_parameter *= BARRIER_DECAY_RATIO + tolerance *= BARRIER_DECAY_RATIO + # Update Barrier Problem + subprob.update(barrier_parameter, tolerance) + # Compute initial values for next iteration + fun0_subprob, constr0_subprob = subprob.function_and_constraints(z) + grad0_subprob, jac0_subprob = subprob.gradient_and_jacobian(z) + + # Get x and s + x = subprob.get_variables(z) + return x, state diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_ncg.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_ncg.py new file mode 100644 index 0000000000000000000000000000000000000000..fed17ff8b84eaf019c0ad69a03f260ca674477ad --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_trustregion_ncg.py @@ -0,0 +1,126 @@ +"""Newton-CG trust-region optimization.""" +import math + +import numpy as np +import scipy.linalg +from ._trustregion import (_minimize_trust_region, BaseQuadraticSubproblem) + +__all__ = [] + + +def _minimize_trust_ncg(fun, x0, args=(), jac=None, hess=None, hessp=None, + **trust_region_options): + """ + Minimization of scalar function of one or more variables using + the Newton conjugate gradient trust-region algorithm. + + Options + ------- + initial_trust_radius : float + Initial trust-region radius. + max_trust_radius : float + Maximum value of the trust-region radius. No steps that are longer + than this value will be proposed. + eta : float + Trust region related acceptance stringency for proposed steps. + gtol : float + Gradient norm must be less than `gtol` before successful + termination. + + """ + if jac is None: + raise ValueError('Jacobian is required for Newton-CG trust-region ' + 'minimization') + if hess is None and hessp is None: + raise ValueError('Either the Hessian or the Hessian-vector product ' + 'is required for Newton-CG trust-region minimization') + return _minimize_trust_region(fun, x0, args=args, jac=jac, hess=hess, + hessp=hessp, subproblem=CGSteihaugSubproblem, + **trust_region_options) + + +class CGSteihaugSubproblem(BaseQuadraticSubproblem): + """Quadratic subproblem solved by a conjugate gradient method""" + def solve(self, trust_radius): + """ + Solve the subproblem using a conjugate gradient method. + + Parameters + ---------- + trust_radius : float + We are allowed to wander only this far away from the origin. + + Returns + ------- + p : ndarray + The proposed step. + hits_boundary : bool + True if the proposed step is on the boundary of the trust region. + + Notes + ----- + This is algorithm (7.2) of Nocedal and Wright 2nd edition. + Only the function that computes the Hessian-vector product is required. + The Hessian itself is not required, and the Hessian does + not need to be positive semidefinite. + """ + + # get the norm of jacobian and define the origin + p_origin = np.zeros_like(self.jac) + + # define a default tolerance + tolerance = min(0.5, math.sqrt(self.jac_mag)) * self.jac_mag + + # Stop the method if the search direction + # is a direction of nonpositive curvature. + if self.jac_mag < tolerance: + hits_boundary = False + return p_origin, hits_boundary + + # init the state for the first iteration + z = p_origin + r = self.jac + d = -r + + # Search for the min of the approximation of the objective function. + while True: + + # do an iteration + Bd = self.hessp(d) + dBd = np.dot(d, Bd) + if dBd <= 0: + # Look at the two boundary points. + # Find both values of t to get the boundary points such that + # ||z + t d|| == trust_radius + # and then choose the one with the predicted min value. + ta, tb = self.get_boundaries_intersections(z, d, trust_radius) + pa = z + ta * d + pb = z + tb * d + if self(pa) < self(pb): + p_boundary = pa + else: + p_boundary = pb + hits_boundary = True + return p_boundary, hits_boundary + r_squared = np.dot(r, r) + alpha = r_squared / dBd + z_next = z + alpha * d + if scipy.linalg.norm(z_next) >= trust_radius: + # Find t >= 0 to get the boundary point such that + # ||z + t d|| == trust_radius + ta, tb = self.get_boundaries_intersections(z, d, trust_radius) + p_boundary = z + tb * d + hits_boundary = True + return p_boundary, hits_boundary + r_next = r + alpha * Bd + r_next_squared = np.dot(r_next, r_next) + if math.sqrt(r_next_squared) < tolerance: + hits_boundary = False + return z_next, hits_boundary + beta_next = r_next_squared / r_squared + d_next = -r_next + beta_next * d + + # update the state for the next iteration + z = z_next + r = r_next + d = d_next diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tstutils.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tstutils.py new file mode 100644 index 0000000000000000000000000000000000000000..f56e835e345d66023efae81114a45ed29269f18d --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_tstutils.py @@ -0,0 +1,972 @@ +r""" +Parameters used in test and benchmark methods. + +Collections of test cases suitable for testing 1-D root-finders + 'original': The original benchmarking functions. + Real-valued functions of real-valued inputs on an interval + with a zero. + f1, .., f3 are continuous and infinitely differentiable + f4 has a left- and right- discontinuity at the root + f5 has a root at 1 replacing a 1st order pole + f6 is randomly positive on one side of the root, + randomly negative on the other. + f4 - f6 are not continuous at the root. + + 'aps': The test problems in the 1995 paper + TOMS "Algorithm 748: Enclosing Zeros of Continuous Functions" + by Alefeld, Potra and Shi. Real-valued functions of + real-valued inputs on an interval with a zero. + Suitable for methods which start with an enclosing interval, and + derivatives up to 2nd order. + + 'complex': Some complex-valued functions of complex-valued inputs. + No enclosing bracket is provided. + Suitable for methods which use one or more starting values, and + derivatives up to 2nd order. + + The test cases are provided as a list of dictionaries. The dictionary + keys will be a subset of: + ["f", "fprime", "fprime2", "args", "bracket", "smoothness", + "a", "b", "x0", "x1", "root", "ID"] +""" + +# Sources: +# [1] Alefeld, G. E. and Potra, F. A. and Shi, Yixun, +# "Algorithm 748: Enclosing Zeros of Continuous Functions", +# ACM Trans. Math. Softw. Volume 221(1995) +# doi = {10.1145/210089.210111}, +# [2] Chandrupatla, Tirupathi R. "A new hybrid quadratic/bisection algorithm +# for finding the zero of a nonlinear function without using derivatives." +# Advances in Engineering Software 28.3 (1997): 145-149. + +from random import random + +import numpy as np + +from scipy.optimize import _zeros_py as cc +from scipy._lib._array_api import array_namespace + +# "description" refers to the original functions +description = """ +f2 is a symmetric parabola, x**2 - 1 +f3 is a quartic polynomial with large hump in interval +f4 is step function with a discontinuity at 1 +f5 is a hyperbola with vertical asymptote at 1 +f6 has random values positive to left of 1, negative to right + +Of course, these are not real problems. They just test how the +'good' solvers behave in bad circumstances where bisection is +really the best. A good solver should not be much worse than +bisection in such circumstance, while being faster for smooth +monotone sorts of functions. +""" + + +def f1(x): + r"""f1 is a quadratic with roots at 0 and 1""" + return x * (x - 1.) + + +def f1_fp(x): + return 2 * x - 1 + + +def f1_fpp(x): + return 2 + + +def f2(x): + r"""f2 is a symmetric parabola, x**2 - 1""" + return x**2 - 1 + + +def f2_fp(x): + return 2 * x + + +def f2_fpp(x): + return 2 + + +def f3(x): + r"""A quartic with roots at 0, 1, 2 and 3""" + return x * (x - 1.) * (x - 2.) * (x - 3.) # x**4 - 6x**3 + 11x**2 - 6x + + +def f3_fp(x): + return 4 * x**3 - 18 * x**2 + 22 * x - 6 + + +def f3_fpp(x): + return 12 * x**2 - 36 * x + 22 + + +def f4(x): + r"""Piecewise linear, left- and right- discontinuous at x=1, the root.""" + if x > 1: + return 1.0 + .1 * x + if x < 1: + return -1.0 + .1 * x + return 0 + + +def f5(x): + r""" + Hyperbola with a pole at x=1, but pole replaced with 0. Not continuous at root. + """ + if x != 1: + return 1.0 / (1. - x) + return 0 + + +# f6(x) returns random value. Without memoization, calling twice with the +# same x returns different values, hence a "random value", not a +# "function with random values" +_f6_cache = {} +def f6(x): + v = _f6_cache.get(x, None) + if v is None: + if x > 1: + v = random() + elif x < 1: + v = -random() + else: + v = 0 + _f6_cache[x] = v + return v + + +# Each Original test case has +# - a function and its two derivatives, +# - additional arguments, +# - a bracket enclosing a root, +# - the order of differentiability (smoothness) on this interval +# - a starting value for methods which don't require a bracket +# - the root (inside the bracket) +# - an Identifier of the test case + +_ORIGINAL_TESTS_KEYS = [ + "f", "fprime", "fprime2", "args", "bracket", "smoothness", "x0", "root", "ID" +] +_ORIGINAL_TESTS = [ + [f1, f1_fp, f1_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.01.00"], + [f2, f2_fp, f2_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.02.00"], + [f3, f3_fp, f3_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.03.00"], + [f4, None, None, (), [0.5, np.sqrt(3)], -1, 0.6, 1.0, "original.04.00"], + [f5, None, None, (), [0.5, np.sqrt(3)], -1, 0.6, 1.0, "original.05.00"], + [f6, None, None, (), [0.5, np.sqrt(3)], -np.inf, 0.6, 1.0, "original.05.00"] +] + +_ORIGINAL_TESTS_DICTS = [ + dict(zip(_ORIGINAL_TESTS_KEYS, testcase)) for testcase in _ORIGINAL_TESTS +] + +# ################## +# "APS" test cases +# Functions and test cases that appear in [1] + + +def aps01_f(x): + r"""Straightforward sum of trigonometric function and polynomial""" + return np.sin(x) - x / 2 + + +def aps01_fp(x): + return np.cos(x) - 1.0 / 2 + + +def aps01_fpp(x): + return -np.sin(x) + + +def aps02_f(x): + r"""poles at x=n**2, 1st and 2nd derivatives at root are also close to 0""" + ii = np.arange(1, 21) + return -2 * np.sum((2 * ii - 5)**2 / (x - ii**2)**3) + + +def aps02_fp(x): + ii = np.arange(1, 21) + return 6 * np.sum((2 * ii - 5)**2 / (x - ii**2)**4) + + +def aps02_fpp(x): + ii = np.arange(1, 21) + return 24 * np.sum((2 * ii - 5)**2 / (x - ii**2)**5) + + +def aps03_f(x, a, b): + r"""Rapidly changing at the root""" + return a * x * np.exp(b * x) + + +def aps03_fp(x, a, b): + return a * (b * x + 1) * np.exp(b * x) + + +def aps03_fpp(x, a, b): + return a * (b * (b * x + 1) + b) * np.exp(b * x) + + +def aps04_f(x, n, a): + r"""Medium-degree polynomial""" + return x**n - a + + +def aps04_fp(x, n, a): + return n * x**(n - 1) + + +def aps04_fpp(x, n, a): + return n * (n - 1) * x**(n - 2) + + +def aps05_f(x): + r"""Simple Trigonometric function""" + return np.sin(x) - 1.0 / 2 + + +def aps05_fp(x): + return np.cos(x) + + +def aps05_fpp(x): + return -np.sin(x) + + +def aps06_f(x, n): + r"""Exponential rapidly changing from -1 to 1 at x=0""" + return 2 * x * np.exp(-n) - 2 * np.exp(-n * x) + 1 + + +def aps06_fp(x, n): + return 2 * np.exp(-n) + 2 * n * np.exp(-n * x) + + +def aps06_fpp(x, n): + return -2 * n * n * np.exp(-n * x) + + +def aps07_f(x, n): + r"""Upside down parabola with parametrizable height""" + return (1 + (1 - n)**2) * x - (1 - n * x)**2 + + +def aps07_fp(x, n): + return (1 + (1 - n)**2) + 2 * n * (1 - n * x) + + +def aps07_fpp(x, n): + return -2 * n * n + + +def aps08_f(x, n): + r"""Degree n polynomial""" + return x * x - (1 - x)**n + + +def aps08_fp(x, n): + return 2 * x + n * (1 - x)**(n - 1) + + +def aps08_fpp(x, n): + return 2 - n * (n - 1) * (1 - x)**(n - 2) + + +def aps09_f(x, n): + r"""Upside down quartic with parametrizable height""" + return (1 + (1 - n)**4) * x - (1 - n * x)**4 + + +def aps09_fp(x, n): + return (1 + (1 - n)**4) + 4 * n * (1 - n * x)**3 + + +def aps09_fpp(x, n): + return -12 * n * (1 - n * x)**2 + + +def aps10_f(x, n): + r"""Exponential plus a polynomial""" + return np.exp(-n * x) * (x - 1) + x**n + + +def aps10_fp(x, n): + return np.exp(-n * x) * (-n * (x - 1) + 1) + n * x**(n - 1) + + +def aps10_fpp(x, n): + return (np.exp(-n * x) * (-n * (-n * (x - 1) + 1) + -n * x) + + n * (n - 1) * x**(n - 2)) + + +def aps11_f(x, n): + r"""Rational function with a zero at x=1/n and a pole at x=0""" + return (n * x - 1) / ((n - 1) * x) + + +def aps11_fp(x, n): + return 1 / (n - 1) / x**2 + + +def aps11_fpp(x, n): + return -2 / (n - 1) / x**3 + + +def aps12_f(x, n): + r"""nth root of x, with a zero at x=n""" + return np.power(x, 1.0 / n) - np.power(n, 1.0 / n) + + +def aps12_fp(x, n): + return np.power(x, (1.0 - n) / n) / n + + +def aps12_fpp(x, n): + return np.power(x, (1.0 - 2 * n) / n) * (1.0 / n) * (1.0 - n) / n + + +_MAX_EXPABLE = np.log(np.finfo(float).max) + + +def aps13_f(x): + r"""Function with *all* derivatives 0 at the root""" + if x == 0: + return 0 + # x2 = 1.0/x**2 + # if x2 > 708: + # return 0 + y = 1 / x**2 + if y > _MAX_EXPABLE: + return 0 + return x / np.exp(y) + + +def aps13_fp(x): + if x == 0: + return 0 + y = 1 / x**2 + if y > _MAX_EXPABLE: + return 0 + return (1 + 2 / x**2) / np.exp(y) + + +def aps13_fpp(x): + if x == 0: + return 0 + y = 1 / x**2 + if y > _MAX_EXPABLE: + return 0 + return 2 * (2 - x**2) / x**5 / np.exp(y) + + +def aps14_f(x, n): + r"""0 for negative x-values, trigonometric+linear for x positive""" + if x <= 0: + return -n / 20.0 + return n / 20.0 * (x / 1.5 + np.sin(x) - 1) + + +def aps14_fp(x, n): + if x <= 0: + return 0 + return n / 20.0 * (1.0 / 1.5 + np.cos(x)) + + +def aps14_fpp(x, n): + if x <= 0: + return 0 + return -n / 20.0 * (np.sin(x)) + + +def aps15_f(x, n): + r"""piecewise linear, constant outside of [0, 0.002/(1+n)]""" + if x < 0: + return -0.859 + if x > 2 * 1e-3 / (1 + n): + return np.e - 1.859 + return np.exp((n + 1) * x / 2 * 1000) - 1.859 + + +def aps15_fp(x, n): + if not 0 <= x <= 2 * 1e-3 / (1 + n): + return np.e - 1.859 + return np.exp((n + 1) * x / 2 * 1000) * (n + 1) / 2 * 1000 + + +def aps15_fpp(x, n): + if not 0 <= x <= 2 * 1e-3 / (1 + n): + return np.e - 1.859 + return np.exp((n + 1) * x / 2 * 1000) * (n + 1) / 2 * 1000 * (n + 1) / 2 * 1000 + + +# Each APS test case has +# - a function and its two derivatives, +# - additional arguments, +# - a bracket enclosing a root, +# - the order of differentiability of the function on this interval +# - a starting value for methods which don't require a bracket +# - the root (inside the bracket) +# - an Identifier of the test case +# +# Algorithm 748 is a bracketing algorithm so a bracketing interval was provided +# in [1] for each test case. Newton and Halley methods need a single +# starting point x0, which was chosen to be near the middle of the interval, +# unless that would have made the problem too easy. + +_APS_TESTS_KEYS = [ + "f", "fprime", "fprime2", "args", "bracket", "smoothness", "x0", "root", "ID" +] +_APS_TESTS = [ + [aps01_f, aps01_fp, aps01_fpp, (), [np.pi / 2, np.pi], np.inf, + 3, 1.89549426703398094e+00, "aps.01.00"], + [aps02_f, aps02_fp, aps02_fpp, (), [1 + 1e-9, 4 - 1e-9], np.inf, + 2, 3.02291534727305677e+00, "aps.02.00"], + [aps02_f, aps02_fp, aps02_fpp, (), [4 + 1e-9, 9 - 1e-9], np.inf, + 5, 6.68375356080807848e+00, "aps.02.01"], + [aps02_f, aps02_fp, aps02_fpp, (), [9 + 1e-9, 16 - 1e-9], np.inf, + 10, 1.12387016550022114e+01, "aps.02.02"], + [aps02_f, aps02_fp, aps02_fpp, (), [16 + 1e-9, 25 - 1e-9], np.inf, + 17, 1.96760000806234103e+01, "aps.02.03"], + [aps02_f, aps02_fp, aps02_fpp, (), [25 + 1e-9, 36 - 1e-9], np.inf, + 26, 2.98282273265047557e+01, "aps.02.04"], + [aps02_f, aps02_fp, aps02_fpp, (), [36 + 1e-9, 49 - 1e-9], np.inf, + 37, 4.19061161952894139e+01, "aps.02.05"], + [aps02_f, aps02_fp, aps02_fpp, (), [49 + 1e-9, 64 - 1e-9], np.inf, + 50, 5.59535958001430913e+01, "aps.02.06"], + [aps02_f, aps02_fp, aps02_fpp, (), [64 + 1e-9, 81 - 1e-9], np.inf, + 65, 7.19856655865877997e+01, "aps.02.07"], + [aps02_f, aps02_fp, aps02_fpp, (), [81 + 1e-9, 100 - 1e-9], np.inf, + 82, 9.00088685391666701e+01, "aps.02.08"], + [aps02_f, aps02_fp, aps02_fpp, (), [100 + 1e-9, 121 - 1e-9], np.inf, + 101, 1.10026532748330197e+02, "aps.02.09"], + [aps03_f, aps03_fp, aps03_fpp, (-40, -1), [-9, 31], np.inf, + -2, 0, "aps.03.00"], + [aps03_f, aps03_fp, aps03_fpp, (-100, -2), [-9, 31], np.inf, + -2, 0, "aps.03.01"], + [aps03_f, aps03_fp, aps03_fpp, (-200, -3), [-9, 31], np.inf, + -2, 0, "aps.03.02"], + [aps04_f, aps04_fp, aps04_fpp, (4, 0.2), [0, 5], np.inf, + 2.5, 6.68740304976422006e-01, "aps.04.00"], + [aps04_f, aps04_fp, aps04_fpp, (6, 0.2), [0, 5], np.inf, + 2.5, 7.64724491331730039e-01, "aps.04.01"], + [aps04_f, aps04_fp, aps04_fpp, (8, 0.2), [0, 5], np.inf, + 2.5, 8.17765433957942545e-01, "aps.04.02"], + [aps04_f, aps04_fp, aps04_fpp, (10, 0.2), [0, 5], np.inf, + 2.5, 8.51339922520784609e-01, "aps.04.03"], + [aps04_f, aps04_fp, aps04_fpp, (12, 0.2), [0, 5], np.inf, + 2.5, 8.74485272221167897e-01, "aps.04.04"], + [aps04_f, aps04_fp, aps04_fpp, (4, 1), [0, 5], np.inf, + 2.5, 1, "aps.04.05"], + [aps04_f, aps04_fp, aps04_fpp, (6, 1), [0, 5], np.inf, + 2.5, 1, "aps.04.06"], + [aps04_f, aps04_fp, aps04_fpp, (8, 1), [0, 5], np.inf, + 2.5, 1, "aps.04.07"], + [aps04_f, aps04_fp, aps04_fpp, (10, 1), [0, 5], np.inf, + 2.5, 1, "aps.04.08"], + [aps04_f, aps04_fp, aps04_fpp, (12, 1), [0, 5], np.inf, + 2.5, 1, "aps.04.09"], + [aps04_f, aps04_fp, aps04_fpp, (8, 1), [-0.95, 4.05], np.inf, + 1.5, 1, "aps.04.10"], + [aps04_f, aps04_fp, aps04_fpp, (10, 1), [-0.95, 4.05], np.inf, + 1.5, 1, "aps.04.11"], + [aps04_f, aps04_fp, aps04_fpp, (12, 1), [-0.95, 4.05], np.inf, + 1.5, 1, "aps.04.12"], + [aps04_f, aps04_fp, aps04_fpp, (14, 1), [-0.95, 4.05], np.inf, + 1.5, 1, "aps.04.13"], + [aps05_f, aps05_fp, aps05_fpp, (), [0, 1.5], np.inf, + 1.3, np.pi / 6, "aps.05.00"], + [aps06_f, aps06_fp, aps06_fpp, (1,), [0, 1], np.inf, + 0.5, 4.22477709641236709e-01, "aps.06.00"], + [aps06_f, aps06_fp, aps06_fpp, (2,), [0, 1], np.inf, + 0.5, 3.06699410483203705e-01, "aps.06.01"], + [aps06_f, aps06_fp, aps06_fpp, (3,), [0, 1], np.inf, + 0.5, 2.23705457654662959e-01, "aps.06.02"], + [aps06_f, aps06_fp, aps06_fpp, (4,), [0, 1], np.inf, + 0.5, 1.71719147519508369e-01, "aps.06.03"], + [aps06_f, aps06_fp, aps06_fpp, (5,), [0, 1], np.inf, + 0.4, 1.38257155056824066e-01, "aps.06.04"], + [aps06_f, aps06_fp, aps06_fpp, (20,), [0, 1], np.inf, + 0.1, 3.46573590208538521e-02, "aps.06.05"], + [aps06_f, aps06_fp, aps06_fpp, (40,), [0, 1], np.inf, + 5e-02, 1.73286795139986315e-02, "aps.06.06"], + [aps06_f, aps06_fp, aps06_fpp, (60,), [0, 1], np.inf, + 1.0 / 30, 1.15524530093324210e-02, "aps.06.07"], + [aps06_f, aps06_fp, aps06_fpp, (80,), [0, 1], np.inf, + 2.5e-02, 8.66433975699931573e-03, "aps.06.08"], + [aps06_f, aps06_fp, aps06_fpp, (100,), [0, 1], np.inf, + 2e-02, 6.93147180559945415e-03, "aps.06.09"], + [aps07_f, aps07_fp, aps07_fpp, (5,), [0, 1], np.inf, + 0.4, 3.84025518406218985e-02, "aps.07.00"], + [aps07_f, aps07_fp, aps07_fpp, (10,), [0, 1], np.inf, + 0.4, 9.90000999800049949e-03, "aps.07.01"], + [aps07_f, aps07_fp, aps07_fpp, (20,), [0, 1], np.inf, + 0.4, 2.49375003906201174e-03, "aps.07.02"], + [aps08_f, aps08_fp, aps08_fpp, (2,), [0, 1], np.inf, + 0.9, 0.5, "aps.08.00"], + [aps08_f, aps08_fp, aps08_fpp, (5,), [0, 1], np.inf, + 0.9, 3.45954815848242059e-01, "aps.08.01"], + [aps08_f, aps08_fp, aps08_fpp, (10,), [0, 1], np.inf, + 0.9, 2.45122333753307220e-01, "aps.08.02"], + [aps08_f, aps08_fp, aps08_fpp, (15,), [0, 1], np.inf, + 0.9, 1.95547623536565629e-01, "aps.08.03"], + [aps08_f, aps08_fp, aps08_fpp, (20,), [0, 1], np.inf, + 0.9, 1.64920957276440960e-01, "aps.08.04"], + [aps09_f, aps09_fp, aps09_fpp, (1,), [0, 1], np.inf, + 0.5, 2.75508040999484394e-01, "aps.09.00"], + [aps09_f, aps09_fp, aps09_fpp, (2,), [0, 1], np.inf, + 0.5, 1.37754020499742197e-01, "aps.09.01"], + [aps09_f, aps09_fp, aps09_fpp, (4,), [0, 1], np.inf, + 0.5, 1.03052837781564422e-02, "aps.09.02"], + [aps09_f, aps09_fp, aps09_fpp, (5,), [0, 1], np.inf, + 0.5, 3.61710817890406339e-03, "aps.09.03"], + [aps09_f, aps09_fp, aps09_fpp, (8,), [0, 1], np.inf, + 0.5, 4.10872918496395375e-04, "aps.09.04"], + [aps09_f, aps09_fp, aps09_fpp, (15,), [0, 1], np.inf, + 0.5, 2.59895758929076292e-05, "aps.09.05"], + [aps09_f, aps09_fp, aps09_fpp, (20,), [0, 1], np.inf, + 0.5, 7.66859512218533719e-06, "aps.09.06"], + [aps10_f, aps10_fp, aps10_fpp, (1,), [0, 1], np.inf, + 0.9, 4.01058137541547011e-01, "aps.10.00"], + [aps10_f, aps10_fp, aps10_fpp, (5,), [0, 1], np.inf, + 0.9, 5.16153518757933583e-01, "aps.10.01"], + [aps10_f, aps10_fp, aps10_fpp, (10,), [0, 1], np.inf, + 0.9, 5.39522226908415781e-01, "aps.10.02"], + [aps10_f, aps10_fp, aps10_fpp, (15,), [0, 1], np.inf, + 0.9, 5.48182294340655241e-01, "aps.10.03"], + [aps10_f, aps10_fp, aps10_fpp, (20,), [0, 1], np.inf, + 0.9, 5.52704666678487833e-01, "aps.10.04"], + [aps11_f, aps11_fp, aps11_fpp, (2,), [0.01, 1], np.inf, + 1e-02, 1.0 / 2, "aps.11.00"], + [aps11_f, aps11_fp, aps11_fpp, (5,), [0.01, 1], np.inf, + 1e-02, 1.0 / 5, "aps.11.01"], + [aps11_f, aps11_fp, aps11_fpp, (15,), [0.01, 1], np.inf, + 1e-02, 1.0 / 15, "aps.11.02"], + [aps11_f, aps11_fp, aps11_fpp, (20,), [0.01, 1], np.inf, + 1e-02, 1.0 / 20, "aps.11.03"], + [aps12_f, aps12_fp, aps12_fpp, (2,), [1, 100], np.inf, + 1.1, 2, "aps.12.00"], + [aps12_f, aps12_fp, aps12_fpp, (3,), [1, 100], np.inf, + 1.1, 3, "aps.12.01"], + [aps12_f, aps12_fp, aps12_fpp, (4,), [1, 100], np.inf, + 1.1, 4, "aps.12.02"], + [aps12_f, aps12_fp, aps12_fpp, (5,), [1, 100], np.inf, + 1.1, 5, "aps.12.03"], + [aps12_f, aps12_fp, aps12_fpp, (6,), [1, 100], np.inf, + 1.1, 6, "aps.12.04"], + [aps12_f, aps12_fp, aps12_fpp, (7,), [1, 100], np.inf, + 1.1, 7, "aps.12.05"], + [aps12_f, aps12_fp, aps12_fpp, (9,), [1, 100], np.inf, + 1.1, 9, "aps.12.06"], + [aps12_f, aps12_fp, aps12_fpp, (11,), [1, 100], np.inf, + 1.1, 11, "aps.12.07"], + [aps12_f, aps12_fp, aps12_fpp, (13,), [1, 100], np.inf, + 1.1, 13, "aps.12.08"], + [aps12_f, aps12_fp, aps12_fpp, (15,), [1, 100], np.inf, + 1.1, 15, "aps.12.09"], + [aps12_f, aps12_fp, aps12_fpp, (17,), [1, 100], np.inf, + 1.1, 17, "aps.12.10"], + [aps12_f, aps12_fp, aps12_fpp, (19,), [1, 100], np.inf, + 1.1, 19, "aps.12.11"], + [aps12_f, aps12_fp, aps12_fpp, (21,), [1, 100], np.inf, + 1.1, 21, "aps.12.12"], + [aps12_f, aps12_fp, aps12_fpp, (23,), [1, 100], np.inf, + 1.1, 23, "aps.12.13"], + [aps12_f, aps12_fp, aps12_fpp, (25,), [1, 100], np.inf, + 1.1, 25, "aps.12.14"], + [aps12_f, aps12_fp, aps12_fpp, (27,), [1, 100], np.inf, + 1.1, 27, "aps.12.15"], + [aps12_f, aps12_fp, aps12_fpp, (29,), [1, 100], np.inf, + 1.1, 29, "aps.12.16"], + [aps12_f, aps12_fp, aps12_fpp, (31,), [1, 100], np.inf, + 1.1, 31, "aps.12.17"], + [aps12_f, aps12_fp, aps12_fpp, (33,), [1, 100], np.inf, + 1.1, 33, "aps.12.18"], + [aps13_f, aps13_fp, aps13_fpp, (), [-1, 4], np.inf, + 1.5, 0, "aps.13.00"], + [aps14_f, aps14_fp, aps14_fpp, (1,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.00"], + [aps14_f, aps14_fp, aps14_fpp, (2,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.01"], + [aps14_f, aps14_fp, aps14_fpp, (3,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.02"], + [aps14_f, aps14_fp, aps14_fpp, (4,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.03"], + [aps14_f, aps14_fp, aps14_fpp, (5,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.04"], + [aps14_f, aps14_fp, aps14_fpp, (6,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.05"], + [aps14_f, aps14_fp, aps14_fpp, (7,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.06"], + [aps14_f, aps14_fp, aps14_fpp, (8,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.07"], + [aps14_f, aps14_fp, aps14_fpp, (9,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.08"], + [aps14_f, aps14_fp, aps14_fpp, (10,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.09"], + [aps14_f, aps14_fp, aps14_fpp, (11,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.10"], + [aps14_f, aps14_fp, aps14_fpp, (12,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.11"], + [aps14_f, aps14_fp, aps14_fpp, (13,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.12"], + [aps14_f, aps14_fp, aps14_fpp, (14,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.13"], + [aps14_f, aps14_fp, aps14_fpp, (15,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.14"], + [aps14_f, aps14_fp, aps14_fpp, (16,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.15"], + [aps14_f, aps14_fp, aps14_fpp, (17,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.16"], + [aps14_f, aps14_fp, aps14_fpp, (18,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.17"], + [aps14_f, aps14_fp, aps14_fpp, (19,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.18"], + [aps14_f, aps14_fp, aps14_fpp, (20,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.19"], + [aps14_f, aps14_fp, aps14_fpp, (21,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.20"], + [aps14_f, aps14_fp, aps14_fpp, (22,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.21"], + [aps14_f, aps14_fp, aps14_fpp, (23,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.22"], + [aps14_f, aps14_fp, aps14_fpp, (24,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.23"], + [aps14_f, aps14_fp, aps14_fpp, (25,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.24"], + [aps14_f, aps14_fp, aps14_fpp, (26,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.25"], + [aps14_f, aps14_fp, aps14_fpp, (27,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.26"], + [aps14_f, aps14_fp, aps14_fpp, (28,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.27"], + [aps14_f, aps14_fp, aps14_fpp, (29,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.28"], + [aps14_f, aps14_fp, aps14_fpp, (30,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.29"], + [aps14_f, aps14_fp, aps14_fpp, (31,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.30"], + [aps14_f, aps14_fp, aps14_fpp, (32,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.31"], + [aps14_f, aps14_fp, aps14_fpp, (33,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.32"], + [aps14_f, aps14_fp, aps14_fpp, (34,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.33"], + [aps14_f, aps14_fp, aps14_fpp, (35,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.34"], + [aps14_f, aps14_fp, aps14_fpp, (36,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.35"], + [aps14_f, aps14_fp, aps14_fpp, (37,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.36"], + [aps14_f, aps14_fp, aps14_fpp, (38,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.37"], + [aps14_f, aps14_fp, aps14_fpp, (39,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.38"], + [aps14_f, aps14_fp, aps14_fpp, (40,), [-1000, np.pi / 2], 0, + 1, 6.23806518961612433e-01, "aps.14.39"], + [aps15_f, aps15_fp, aps15_fpp, (20,), [-1000, 1e-4], 0, + -2, 5.90513055942197166e-05, "aps.15.00"], + [aps15_f, aps15_fp, aps15_fpp, (21,), [-1000, 1e-4], 0, + -2, 5.63671553399369967e-05, "aps.15.01"], + [aps15_f, aps15_fp, aps15_fpp, (22,), [-1000, 1e-4], 0, + -2, 5.39164094555919196e-05, "aps.15.02"], + [aps15_f, aps15_fp, aps15_fpp, (23,), [-1000, 1e-4], 0, + -2, 5.16698923949422470e-05, "aps.15.03"], + [aps15_f, aps15_fp, aps15_fpp, (24,), [-1000, 1e-4], 0, + -2, 4.96030966991445609e-05, "aps.15.04"], + [aps15_f, aps15_fp, aps15_fpp, (25,), [-1000, 1e-4], 0, + -2, 4.76952852876389951e-05, "aps.15.05"], + [aps15_f, aps15_fp, aps15_fpp, (26,), [-1000, 1e-4], 0, + -2, 4.59287932399486662e-05, "aps.15.06"], + [aps15_f, aps15_fp, aps15_fpp, (27,), [-1000, 1e-4], 0, + -2, 4.42884791956647841e-05, "aps.15.07"], + [aps15_f, aps15_fp, aps15_fpp, (28,), [-1000, 1e-4], 0, + -2, 4.27612902578832391e-05, "aps.15.08"], + [aps15_f, aps15_fp, aps15_fpp, (29,), [-1000, 1e-4], 0, + -2, 4.13359139159538030e-05, "aps.15.09"], + [aps15_f, aps15_fp, aps15_fpp, (30,), [-1000, 1e-4], 0, + -2, 4.00024973380198076e-05, "aps.15.10"], + [aps15_f, aps15_fp, aps15_fpp, (31,), [-1000, 1e-4], 0, + -2, 3.87524192962066869e-05, "aps.15.11"], + [aps15_f, aps15_fp, aps15_fpp, (32,), [-1000, 1e-4], 0, + -2, 3.75781035599579910e-05, "aps.15.12"], + [aps15_f, aps15_fp, aps15_fpp, (33,), [-1000, 1e-4], 0, + -2, 3.64728652199592355e-05, "aps.15.13"], + [aps15_f, aps15_fp, aps15_fpp, (34,), [-1000, 1e-4], 0, + -2, 3.54307833565318273e-05, "aps.15.14"], + [aps15_f, aps15_fp, aps15_fpp, (35,), [-1000, 1e-4], 0, + -2, 3.44465949299614980e-05, "aps.15.15"], + [aps15_f, aps15_fp, aps15_fpp, (36,), [-1000, 1e-4], 0, + -2, 3.35156058778003705e-05, "aps.15.16"], + [aps15_f, aps15_fp, aps15_fpp, (37,), [-1000, 1e-4], 0, + -2, 3.26336162494372125e-05, "aps.15.17"], + [aps15_f, aps15_fp, aps15_fpp, (38,), [-1000, 1e-4], 0, + -2, 3.17968568584260013e-05, "aps.15.18"], + [aps15_f, aps15_fp, aps15_fpp, (39,), [-1000, 1e-4], 0, + -2, 3.10019354369653455e-05, "aps.15.19"], + [aps15_f, aps15_fp, aps15_fpp, (40,), [-1000, 1e-4], 0, + -2, 3.02457906702100968e-05, "aps.15.20"], + [aps15_f, aps15_fp, aps15_fpp, (100,), [-1000, 1e-4], 0, + -2, 1.22779942324615231e-05, "aps.15.21"], + [aps15_f, aps15_fp, aps15_fpp, (200,), [-1000, 1e-4], 0, + -2, 6.16953939044086617e-06, "aps.15.22"], + [aps15_f, aps15_fp, aps15_fpp, (300,), [-1000, 1e-4], 0, + -2, 4.11985852982928163e-06, "aps.15.23"], + [aps15_f, aps15_fp, aps15_fpp, (400,), [-1000, 1e-4], 0, + -2, 3.09246238772721682e-06, "aps.15.24"], + [aps15_f, aps15_fp, aps15_fpp, (500,), [-1000, 1e-4], 0, + -2, 2.47520442610501789e-06, "aps.15.25"], + [aps15_f, aps15_fp, aps15_fpp, (600,), [-1000, 1e-4], 0, + -2, 2.06335676785127107e-06, "aps.15.26"], + [aps15_f, aps15_fp, aps15_fpp, (700,), [-1000, 1e-4], 0, + -2, 1.76901200781542651e-06, "aps.15.27"], + [aps15_f, aps15_fp, aps15_fpp, (800,), [-1000, 1e-4], 0, + -2, 1.54816156988591016e-06, "aps.15.28"], + [aps15_f, aps15_fp, aps15_fpp, (900,), [-1000, 1e-4], 0, + -2, 1.37633453660223511e-06, "aps.15.29"], + [aps15_f, aps15_fp, aps15_fpp, (1000,), [-1000, 1e-4], 0, + -2, 1.23883857889971403e-06, "aps.15.30"] +] + +_APS_TESTS_DICTS = [dict(zip(_APS_TESTS_KEYS, testcase)) for testcase in _APS_TESTS] + + +# ################## +# "complex" test cases +# A few simple, complex-valued, functions, defined on the complex plane. + + +def cplx01_f(z, n, a): + r"""z**n-a: Use to find the nth root of a""" + return z**n - a + + +def cplx01_fp(z, n, a): + return n * z**(n - 1) + + +def cplx01_fpp(z, n, a): + return n * (n - 1) * z**(n - 2) + + +def cplx02_f(z, a): + r"""e**z - a: Use to find the log of a""" + return np.exp(z) - a + + +def cplx02_fp(z, a): + return np.exp(z) + + +def cplx02_fpp(z, a): + return np.exp(z) + + +# Each "complex" test case has +# - a function and its two derivatives, +# - additional arguments, +# - the order of differentiability of the function on this interval +# - two starting values x0 and x1 +# - the root +# - an Identifier of the test case +# +# Algorithm 748 is a bracketing algorithm so a bracketing interval was provided +# in [1] for each test case. Newton and Halley need a single starting point +# x0, which was chosen to be near the middle of the interval, unless that +# would make the problem too easy. + + +_COMPLEX_TESTS_KEYS = [ + "f", "fprime", "fprime2", "args", "smoothness", "x0", "x1", "root", "ID" +] +_COMPLEX_TESTS = [ + [cplx01_f, cplx01_fp, cplx01_fpp, (2, -1), np.inf, + (1 + 1j), (0.5 + 0.5j), 1j, "complex.01.00"], + [cplx01_f, cplx01_fp, cplx01_fpp, (3, 1), np.inf, + (-1 + 1j), (-0.5 + 2.0j), (-0.5 + np.sqrt(3) / 2 * 1.0j), + "complex.01.01"], + [cplx01_f, cplx01_fp, cplx01_fpp, (3, -1), np.inf, + 1j, (0.5 + 0.5j), (0.5 + np.sqrt(3) / 2 * 1.0j), + "complex.01.02"], + [cplx01_f, cplx01_fp, cplx01_fpp, (3, 8), np.inf, + 5, 4, 2, "complex.01.03"], + [cplx02_f, cplx02_fp, cplx02_fpp, (-1,), np.inf, + (1 + 2j), (0.5 + 0.5j), np.pi * 1.0j, "complex.02.00"], + [cplx02_f, cplx02_fp, cplx02_fpp, (1j,), np.inf, + (1 + 2j), (0.5 + 0.5j), np.pi * 0.5j, "complex.02.01"], +] + +_COMPLEX_TESTS_DICTS = [ + dict(zip(_COMPLEX_TESTS_KEYS, testcase)) for testcase in _COMPLEX_TESTS +] + + +def _add_a_b(tests): + r"""Add "a" and "b" keys to each test from the "bracket" value""" + for d in tests: + for k, v in zip(['a', 'b'], d.get('bracket', [])): + d[k] = v + + +_add_a_b(_ORIGINAL_TESTS_DICTS) +_add_a_b(_APS_TESTS_DICTS) +_add_a_b(_COMPLEX_TESTS_DICTS) + + +def get_tests(collection='original', smoothness=None): + r"""Return the requested collection of test cases, as an array of dicts with subset-specific keys + + Allowed values of collection: + 'original': The original benchmarking functions. + Real-valued functions of real-valued inputs on an interval with a zero. + f1, .., f3 are continuous and infinitely differentiable + f4 has a single discontinuity at the root + f5 has a root at 1 replacing a 1st order pole + f6 is randomly positive on one side of the root, randomly negative on the other + 'aps': The test problems in the TOMS "Algorithm 748: Enclosing Zeros of Continuous Functions" + paper by Alefeld, Potra and Shi. Real-valued functions of + real-valued inputs on an interval with a zero. + Suitable for methods which start with an enclosing interval, and + derivatives up to 2nd order. + 'complex': Some complex-valued functions of complex-valued inputs. + No enclosing bracket is provided. + Suitable for methods which use one or more starting values, and + derivatives up to 2nd order. + + The dictionary keys will be a subset of + ["f", "fprime", "fprime2", "args", "bracket", "a", b", "smoothness", "x0", "x1", "root", "ID"] + """ # noqa: E501 + collection = collection or "original" + subsets = {"aps": _APS_TESTS_DICTS, + "complex": _COMPLEX_TESTS_DICTS, + "original": _ORIGINAL_TESTS_DICTS, + "chandrupatla": _CHANDRUPATLA_TESTS_DICTS} + tests = subsets.get(collection, []) + if smoothness is not None: + tests = [tc for tc in tests if tc['smoothness'] >= smoothness] + return tests + + +# Backwards compatibility +methods = [cc.bisect, cc.ridder, cc.brenth, cc.brentq] +mstrings = ['cc.bisect', 'cc.ridder', 'cc.brenth', 'cc.brentq'] +functions = [f2, f3, f4, f5, f6] +fstrings = ['f2', 'f3', 'f4', 'f5', 'f6'] + +# ################## +# "Chandrupatla" test cases +# Functions and test cases that appear in [2] + +def fun1(x): + return x**3 - 2*x - 5 +fun1.root = 2.0945514815423265 # additional precision using mpmath.findroot + + +def fun2(x): + return 1 - 1/x**2 +fun2.root = 1 + + +def fun3(x): + return (x-3)**3 +fun3.root = 3 + + +def fun4(x): + return 6*(x-2)**5 +fun4.root = 2 + + +def fun5(x): + return x**9 +fun5.root = 0 + + +def fun6(x): + return x**19 +fun6.root = 0 + + +def fun7(x): + xp = array_namespace(x) + return 0 if xp.abs(x) < 3.8e-4 else x*xp.exp(-x**(-2)) +fun7.root = 0 + + +def fun8(x): + xp = array_namespace(x) + xi = 0.61489 + return -(3062*(1-xi)*xp.exp(-x))/(xi + (1-xi)*xp.exp(-x)) - 1013 + 1628/x +fun8.root = 1.0375360332870405 + + +def fun9(x): + xp = array_namespace(x) + return xp.exp(x) - 2 - 0.01/x**2 + .000002/x**3 +fun9.root = 0.7032048403631358 + +# Each "chandropatla" test case has +# - a function, +# - two starting values x0 and x1 +# - the root +# - the number of function evaluations required by Chandrupatla's algorithm +# - an Identifier of the test case +# +# Chandrupatla's is a bracketing algorithm, so a bracketing interval was +# provided in [2] for each test case. No special support for testing with +# secant/Newton/Halley is provided. + +_CHANDRUPATLA_TESTS_KEYS = ["f", "bracket", "root", "nfeval", "ID"] +_CHANDRUPATLA_TESTS = [ + [fun1, [2, 3], fun1.root, 7], + [fun1, [1, 10], fun1.root, 11], + [fun1, [1, 100], fun1.root, 14], + [fun1, [-1e4, 1e4], fun1.root, 23], + [fun1, [-1e10, 1e10], fun1.root, 43], + [fun2, [0.5, 1.51], fun2.root, 8], + [fun2, [1e-4, 1e4], fun2.root, 22], + [fun2, [1e-6, 1e6], fun2.root, 28], + [fun2, [1e-10, 1e10], fun2.root, 41], + [fun2, [1e-12, 1e12], fun2.root, 48], + [fun3, [0, 5], fun3.root, 21], + [fun3, [-10, 10], fun3.root, 23], + [fun3, [-1e4, 1e4], fun3.root, 36], + [fun3, [-1e6, 1e6], fun3.root, 45], + [fun3, [-1e10, 1e10], fun3.root, 55], + [fun4, [0, 5], fun4.root, 21], + [fun4, [-10, 10], fun4.root, 23], + [fun4, [-1e4, 1e4], fun4.root, 33], + [fun4, [-1e6, 1e6], fun4.root, 43], + [fun4, [-1e10, 1e10], fun4.root, 54], + [fun5, [-1, 4], fun5.root, 21], + [fun5, [-2, 5], fun5.root, 22], + [fun5, [-1, 10], fun5.root, 23], + [fun5, [-5, 50], fun5.root, 25], + [fun5, [-10, 100], fun5.root, 26], + [fun6, [-1., 4.], fun6.root, 21], + [fun6, [-2., 5.], fun6.root, 22], + [fun6, [-1., 10.], fun6.root, 23], + [fun6, [-5., 50.], fun6.root, 25], + [fun6, [-10., 100.], fun6.root, 26], + [fun7, [-1, 4], fun7.root, 8], + [fun7, [-2, 5], fun7.root, 8], + [fun7, [-1, 10], fun7.root, 11], + [fun7, [-5, 50], fun7.root, 18], + [fun7, [-10, 100], fun7.root, 19], + [fun8, [2e-4, 2], fun8.root, 9], + [fun8, [2e-4, 3], fun8.root, 10], + [fun8, [2e-4, 9], fun8.root, 11], + [fun8, [2e-4, 27], fun8.root, 12], + [fun8, [2e-4, 81], fun8.root, 14], + [fun9, [2e-4, 1], fun9.root, 7], + [fun9, [2e-4, 3], fun9.root, 8], + [fun9, [2e-4, 9], fun9.root, 10], + [fun9, [2e-4, 27], fun9.root, 11], + [fun9, [2e-4, 81], fun9.root, 13], +] +_CHANDRUPATLA_TESTS = [test + [f'{test[0].__name__}.{i%5+1}'] + for i, test in enumerate(_CHANDRUPATLA_TESTS)] + +_CHANDRUPATLA_TESTS_DICTS = [dict(zip(_CHANDRUPATLA_TESTS_KEYS, testcase)) + for testcase in _CHANDRUPATLA_TESTS] +_add_a_b(_CHANDRUPATLA_TESTS_DICTS) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_zeros_py.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_zeros_py.py new file mode 100644 index 0000000000000000000000000000000000000000..986031920d69578c1c7c470b03deae5b3d24c309 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/_zeros_py.py @@ -0,0 +1,1403 @@ +import warnings +from collections import namedtuple +import operator +from . import _zeros +from ._optimize import OptimizeResult +import numpy as np + + +_iter = 100 +_xtol = 2e-12 +_rtol = 4 * np.finfo(float).eps + +__all__ = ['newton', 'bisect', 'ridder', 'brentq', 'brenth', 'toms748', + 'RootResults'] + +# Must agree with CONVERGED, SIGNERR, CONVERR, ... in zeros.h +_ECONVERGED = 0 +_ESIGNERR = -1 # used in _chandrupatla +_ECONVERR = -2 +_EVALUEERR = -3 +_ECALLBACK = -4 +_EINPROGRESS = 1 + +CONVERGED = 'converged' +SIGNERR = 'sign error' +CONVERR = 'convergence error' +VALUEERR = 'value error' +INPROGRESS = 'No error' + + +flag_map = {_ECONVERGED: CONVERGED, _ESIGNERR: SIGNERR, _ECONVERR: CONVERR, + _EVALUEERR: VALUEERR, _EINPROGRESS: INPROGRESS} + + +class RootResults(OptimizeResult): + """Represents the root finding result. + + Attributes + ---------- + root : float + Estimated root location. + iterations : int + Number of iterations needed to find the root. + function_calls : int + Number of times the function was called. + converged : bool + True if the routine converged. + flag : str + Description of the cause of termination. + method : str + Root finding method used. + + """ + + def __init__(self, root, iterations, function_calls, flag, method): + self.root = root + self.iterations = iterations + self.function_calls = function_calls + self.converged = flag == _ECONVERGED + if flag in flag_map: + self.flag = flag_map[flag] + else: + self.flag = flag + self.method = method + + +def results_c(full_output, r, method): + if full_output: + x, funcalls, iterations, flag = r + results = RootResults(root=x, + iterations=iterations, + function_calls=funcalls, + flag=flag, method=method) + return x, results + else: + return r + + +def _results_select(full_output, r, method): + """Select from a tuple of (root, funccalls, iterations, flag)""" + x, funcalls, iterations, flag = r + if full_output: + results = RootResults(root=x, + iterations=iterations, + function_calls=funcalls, + flag=flag, method=method) + return x, results + return x + + +def _wrap_nan_raise(f): + + def f_raise(x, *args): + fx = f(x, *args) + f_raise._function_calls += 1 + if np.isnan(fx): + msg = (f'The function value at x={x} is NaN; ' + 'solver cannot continue.') + err = ValueError(msg) + err._x = x + err._function_calls = f_raise._function_calls + raise err + return fx + + f_raise._function_calls = 0 + return f_raise + + +def newton(func, x0, fprime=None, args=(), tol=1.48e-8, maxiter=50, + fprime2=None, x1=None, rtol=0.0, + full_output=False, disp=True): + """ + Find a root of a real or complex function using the Newton-Raphson + (or secant or Halley's) method. + + Find a root of the scalar-valued function `func` given a nearby scalar + starting point `x0`. + The Newton-Raphson method is used if the derivative `fprime` of `func` + is provided, otherwise the secant method is used. If the second order + derivative `fprime2` of `func` is also provided, then Halley's method is + used. + + If `x0` is a sequence with more than one item, `newton` returns an array: + the roots of the function from each (scalar) starting point in `x0`. + In this case, `func` must be vectorized to return a sequence or array of + the same shape as its first argument. If `fprime` (`fprime2`) is given, + then its return must also have the same shape: each element is the first + (second) derivative of `func` with respect to its only variable evaluated + at each element of its first argument. + + `newton` is for finding roots of a scalar-valued functions of a single + variable. For problems involving several variables, see `root`. + + Parameters + ---------- + func : callable + The function whose root is wanted. It must be a function of a + single variable of the form ``f(x,a,b,c...)``, where ``a,b,c...`` + are extra arguments that can be passed in the `args` parameter. + x0 : float, sequence, or ndarray + An initial estimate of the root that should be somewhere near the + actual root. If not scalar, then `func` must be vectorized and return + a sequence or array of the same shape as its first argument. + fprime : callable, optional + The derivative of the function when available and convenient. If it + is None (default), then the secant method is used. + args : tuple, optional + Extra arguments to be used in the function call. + tol : float, optional + The allowable error of the root's value. If `func` is complex-valued, + a larger `tol` is recommended as both the real and imaginary parts + of `x` contribute to ``|x - x0|``. + maxiter : int, optional + Maximum number of iterations. + fprime2 : callable, optional + The second order derivative of the function when available and + convenient. If it is None (default), then the normal Newton-Raphson + or the secant method is used. If it is not None, then Halley's method + is used. + x1 : float, optional + Another estimate of the root that should be somewhere near the + actual root. Used if `fprime` is not provided. + rtol : float, optional + Tolerance (relative) for termination. + full_output : bool, optional + If `full_output` is False (default), the root is returned. + If True and `x0` is scalar, the return value is ``(x, r)``, where ``x`` + is the root and ``r`` is a `RootResults` object. + If True and `x0` is non-scalar, the return value is ``(x, converged, + zero_der)`` (see Returns section for details). + disp : bool, optional + If True, raise a RuntimeError if the algorithm didn't converge, with + the error message containing the number of iterations and current + function value. Otherwise, the convergence status is recorded in a + `RootResults` return object. + Ignored if `x0` is not scalar. + *Note: this has little to do with displaying, however, + the `disp` keyword cannot be renamed for backwards compatibility.* + + Returns + ------- + root : float, sequence, or ndarray + Estimated location where function is zero. + r : `RootResults`, optional + Present if ``full_output=True`` and `x0` is scalar. + Object containing information about the convergence. In particular, + ``r.converged`` is True if the routine converged. + converged : ndarray of bool, optional + Present if ``full_output=True`` and `x0` is non-scalar. + For vector functions, indicates which elements converged successfully. + zero_der : ndarray of bool, optional + Present if ``full_output=True`` and `x0` is non-scalar. + For vector functions, indicates which elements had a zero derivative. + + See Also + -------- + root_scalar : interface to root solvers for scalar functions + root : interface to root solvers for multi-input, multi-output functions + + Notes + ----- + The convergence rate of the Newton-Raphson method is quadratic, + the Halley method is cubic, and the secant method is + sub-quadratic. This means that if the function is well-behaved + the actual error in the estimated root after the nth iteration + is approximately the square (cube for Halley) of the error + after the (n-1)th step. However, the stopping criterion used + here is the step size and there is no guarantee that a root + has been found. Consequently, the result should be verified. + Safer algorithms are brentq, brenth, ridder, and bisect, + but they all require that the root first be bracketed in an + interval where the function changes sign. The brentq algorithm + is recommended for general use in one dimensional problems + when such an interval has been found. + + When `newton` is used with arrays, it is best suited for the following + types of problems: + + * The initial guesses, `x0`, are all relatively the same distance from + the roots. + * Some or all of the extra arguments, `args`, are also arrays so that a + class of similar problems can be solved together. + * The size of the initial guesses, `x0`, is larger than O(100) elements. + Otherwise, a naive loop may perform as well or better than a vector. + + Examples + -------- + >>> import numpy as np + >>> import matplotlib.pyplot as plt + >>> from scipy import optimize + + >>> def f(x): + ... return (x**3 - 1) # only one real root at x = 1 + + ``fprime`` is not provided, use the secant method: + + >>> root = optimize.newton(f, 1.5) + >>> root + 1.0000000000000016 + >>> root = optimize.newton(f, 1.5, fprime2=lambda x: 6 * x) + >>> root + 1.0000000000000016 + + Only ``fprime`` is provided, use the Newton-Raphson method: + + >>> root = optimize.newton(f, 1.5, fprime=lambda x: 3 * x**2) + >>> root + 1.0 + + Both ``fprime2`` and ``fprime`` are provided, use Halley's method: + + >>> root = optimize.newton(f, 1.5, fprime=lambda x: 3 * x**2, + ... fprime2=lambda x: 6 * x) + >>> root + 1.0 + + When we want to find roots for a set of related starting values and/or + function parameters, we can provide both of those as an array of inputs: + + >>> f = lambda x, a: x**3 - a + >>> fder = lambda x, a: 3 * x**2 + >>> rng = np.random.default_rng() + >>> x = rng.standard_normal(100) + >>> a = np.arange(-50, 50) + >>> vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200) + + The above is the equivalent of solving for each value in ``(x, a)`` + separately in a for-loop, just faster: + + >>> loop_res = [optimize.newton(f, x0, fprime=fder, args=(a0,), + ... maxiter=200) + ... for x0, a0 in zip(x, a)] + >>> np.allclose(vec_res, loop_res) + True + + Plot the results found for all values of ``a``: + + >>> analytical_result = np.sign(a) * np.abs(a)**(1/3) + >>> fig, ax = plt.subplots() + >>> ax.plot(a, analytical_result, 'o') + >>> ax.plot(a, vec_res, '.') + >>> ax.set_xlabel('$a$') + >>> ax.set_ylabel('$x$ where $f(x, a)=0$') + >>> plt.show() + + """ + if tol <= 0: + raise ValueError("tol too small (%g <= 0)" % tol) + maxiter = operator.index(maxiter) + if maxiter < 1: + raise ValueError("maxiter must be greater than 0") + if np.size(x0) > 1: + return _array_newton(func, x0, fprime, args, tol, maxiter, fprime2, + full_output) + + # Convert to float (don't use float(x0); this works also for complex x0) + # Use np.asarray because we want x0 to be a numpy object, not a Python + # object. e.g. np.complex(1+1j) > 0 is possible, but (1 + 1j) > 0 raises + # a TypeError + x0 = np.asarray(x0)[()] * 1.0 + p0 = x0 + funcalls = 0 + if fprime is not None: + # Newton-Raphson method + method = "newton" + for itr in range(maxiter): + # first evaluate fval + fval = func(p0, *args) + funcalls += 1 + # If fval is 0, a root has been found, then terminate + if fval == 0: + return _results_select( + full_output, (p0, funcalls, itr, _ECONVERGED), method) + fder = fprime(p0, *args) + funcalls += 1 + if fder == 0: + msg = "Derivative was zero." + if disp: + msg += ( + " Failed to converge after %d iterations, value is %s." + % (itr + 1, p0)) + raise RuntimeError(msg) + warnings.warn(msg, RuntimeWarning, stacklevel=2) + return _results_select( + full_output, (p0, funcalls, itr + 1, _ECONVERR), method) + newton_step = fval / fder + if fprime2: + fder2 = fprime2(p0, *args) + funcalls += 1 + method = "halley" + # Halley's method: + # newton_step /= (1.0 - 0.5 * newton_step * fder2 / fder) + # Only do it if denominator stays close enough to 1 + # Rationale: If 1-adj < 0, then Halley sends x in the + # opposite direction to Newton. Doesn't happen if x is close + # enough to root. + adj = newton_step * fder2 / fder / 2 + if np.abs(adj) < 1: + newton_step /= 1.0 - adj + p = p0 - newton_step + if np.isclose(p, p0, rtol=rtol, atol=tol): + return _results_select( + full_output, (p, funcalls, itr + 1, _ECONVERGED), method) + p0 = p + else: + # Secant method + method = "secant" + if x1 is not None: + if x1 == x0: + raise ValueError("x1 and x0 must be different") + p1 = x1 + else: + eps = 1e-4 + p1 = x0 * (1 + eps) + p1 += (eps if p1 >= 0 else -eps) + q0 = func(p0, *args) + funcalls += 1 + q1 = func(p1, *args) + funcalls += 1 + if abs(q1) < abs(q0): + p0, p1, q0, q1 = p1, p0, q1, q0 + for itr in range(maxiter): + if q1 == q0: + if p1 != p0: + msg = "Tolerance of %s reached." % (p1 - p0) + if disp: + msg += ( + " Failed to converge after %d iterations, value is %s." + % (itr + 1, p1)) + raise RuntimeError(msg) + warnings.warn(msg, RuntimeWarning, stacklevel=2) + p = (p1 + p0) / 2.0 + return _results_select( + full_output, (p, funcalls, itr + 1, _ECONVERR), method) + else: + if abs(q1) > abs(q0): + p = (-q0 / q1 * p1 + p0) / (1 - q0 / q1) + else: + p = (-q1 / q0 * p0 + p1) / (1 - q1 / q0) + if np.isclose(p, p1, rtol=rtol, atol=tol): + return _results_select( + full_output, (p, funcalls, itr + 1, _ECONVERGED), method) + p0, q0 = p1, q1 + p1 = p + q1 = func(p1, *args) + funcalls += 1 + + if disp: + msg = ("Failed to converge after %d iterations, value is %s." + % (itr + 1, p)) + raise RuntimeError(msg) + + return _results_select(full_output, (p, funcalls, itr + 1, _ECONVERR), method) + + +def _array_newton(func, x0, fprime, args, tol, maxiter, fprime2, full_output): + """ + A vectorized version of Newton, Halley, and secant methods for arrays. + + Do not use this method directly. This method is called from `newton` + when ``np.size(x0) > 1`` is ``True``. For docstring, see `newton`. + """ + # Explicitly copy `x0` as `p` will be modified inplace, but the + # user's array should not be altered. + p = np.array(x0, copy=True) + + failures = np.ones_like(p, dtype=bool) + nz_der = np.ones_like(failures) + if fprime is not None: + # Newton-Raphson method + for iteration in range(maxiter): + # first evaluate fval + fval = np.asarray(func(p, *args)) + # If all fval are 0, all roots have been found, then terminate + if not fval.any(): + failures = fval.astype(bool) + break + fder = np.asarray(fprime(p, *args)) + nz_der = (fder != 0) + # stop iterating if all derivatives are zero + if not nz_der.any(): + break + # Newton step + dp = fval[nz_der] / fder[nz_der] + if fprime2 is not None: + fder2 = np.asarray(fprime2(p, *args)) + dp = dp / (1.0 - 0.5 * dp * fder2[nz_der] / fder[nz_der]) + # only update nonzero derivatives + p = np.asarray(p, dtype=np.result_type(p, dp, np.float64)) + p[nz_der] -= dp + failures[nz_der] = np.abs(dp) >= tol # items not yet converged + # stop iterating if there aren't any failures, not incl zero der + if not failures[nz_der].any(): + break + else: + # Secant method + dx = np.finfo(float).eps**0.33 + p1 = p * (1 + dx) + np.where(p >= 0, dx, -dx) + q0 = np.asarray(func(p, *args)) + q1 = np.asarray(func(p1, *args)) + active = np.ones_like(p, dtype=bool) + for iteration in range(maxiter): + nz_der = (q1 != q0) + # stop iterating if all derivatives are zero + if not nz_der.any(): + p = (p1 + p) / 2.0 + break + # Secant Step + dp = (q1 * (p1 - p))[nz_der] / (q1 - q0)[nz_der] + # only update nonzero derivatives + p = np.asarray(p, dtype=np.result_type(p, p1, dp, np.float64)) + p[nz_der] = p1[nz_der] - dp + active_zero_der = ~nz_der & active + p[active_zero_der] = (p1 + p)[active_zero_der] / 2.0 + active &= nz_der # don't assign zero derivatives again + failures[nz_der] = np.abs(dp) >= tol # not yet converged + # stop iterating if there aren't any failures, not incl zero der + if not failures[nz_der].any(): + break + p1, p = p, p1 + q0 = q1 + q1 = np.asarray(func(p1, *args)) + + zero_der = ~nz_der & failures # don't include converged with zero-ders + if zero_der.any(): + # Secant warnings + if fprime is None: + nonzero_dp = (p1 != p) + # non-zero dp, but infinite newton step + zero_der_nz_dp = (zero_der & nonzero_dp) + if zero_der_nz_dp.any(): + rms = np.sqrt( + sum((p1[zero_der_nz_dp] - p[zero_der_nz_dp]) ** 2) + ) + warnings.warn(f'RMS of {rms:g} reached', RuntimeWarning, stacklevel=3) + # Newton or Halley warnings + else: + all_or_some = 'all' if zero_der.all() else 'some' + msg = f'{all_or_some:s} derivatives were zero' + warnings.warn(msg, RuntimeWarning, stacklevel=3) + elif failures.any(): + all_or_some = 'all' if failures.all() else 'some' + msg = f'{all_or_some:s} failed to converge after {maxiter:d} iterations' + if failures.all(): + raise RuntimeError(msg) + warnings.warn(msg, RuntimeWarning, stacklevel=3) + + if full_output: + result = namedtuple('result', ('root', 'converged', 'zero_der')) + p = result(p, ~failures, zero_der) + + return p + + +def bisect(f, a, b, args=(), + xtol=_xtol, rtol=_rtol, maxiter=_iter, + full_output=False, disp=True): + """ + Find root of a function within an interval using bisection. + + Basic bisection routine to find a root of the function `f` between the + arguments `a` and `b`. `f(a)` and `f(b)` cannot have the same signs. + Slow but sure. + + Parameters + ---------- + f : function + Python function returning a number. `f` must be continuous, and + f(a) and f(b) must have opposite signs. + a : scalar + One end of the bracketing interval [a,b]. + b : scalar + The other end of the bracketing interval [a,b]. + xtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter must be positive. + rtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter cannot be smaller than its default value of + ``4*np.finfo(float).eps``. + maxiter : int, optional + If convergence is not achieved in `maxiter` iterations, an error is + raised. Must be >= 0. + args : tuple, optional + Containing extra arguments for the function `f`. + `f` is called by ``apply(f, (x)+args)``. + full_output : bool, optional + If `full_output` is False, the root is returned. If `full_output` is + True, the return value is ``(x, r)``, where x is the root, and r is + a `RootResults` object. + disp : bool, optional + If True, raise RuntimeError if the algorithm didn't converge. + Otherwise, the convergence status is recorded in a `RootResults` + return object. + + Returns + ------- + root : float + Root of `f` between `a` and `b`. + r : `RootResults` (present if ``full_output = True``) + Object containing information about the convergence. In particular, + ``r.converged`` is True if the routine converged. + + Examples + -------- + + >>> def f(x): + ... return (x**2 - 1) + + >>> from scipy import optimize + + >>> root = optimize.bisect(f, 0, 2) + >>> root + 1.0 + + >>> root = optimize.bisect(f, -2, 0) + >>> root + -1.0 + + See Also + -------- + brentq, brenth, bisect, newton + fixed_point : scalar fixed-point finder + fsolve : n-dimensional root-finding + + """ + if not isinstance(args, tuple): + args = (args,) + maxiter = operator.index(maxiter) + if xtol <= 0: + raise ValueError("xtol too small (%g <= 0)" % xtol) + if rtol < _rtol: + raise ValueError(f"rtol too small ({rtol:g} < {_rtol:g})") + f = _wrap_nan_raise(f) + r = _zeros._bisect(f, a, b, xtol, rtol, maxiter, args, full_output, disp) + return results_c(full_output, r, "bisect") + + +def ridder(f, a, b, args=(), + xtol=_xtol, rtol=_rtol, maxiter=_iter, + full_output=False, disp=True): + """ + Find a root of a function in an interval using Ridder's method. + + Parameters + ---------- + f : function + Python function returning a number. f must be continuous, and f(a) and + f(b) must have opposite signs. + a : scalar + One end of the bracketing interval [a,b]. + b : scalar + The other end of the bracketing interval [a,b]. + xtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter must be positive. + rtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter cannot be smaller than its default value of + ``4*np.finfo(float).eps``. + maxiter : int, optional + If convergence is not achieved in `maxiter` iterations, an error is + raised. Must be >= 0. + args : tuple, optional + Containing extra arguments for the function `f`. + `f` is called by ``apply(f, (x)+args)``. + full_output : bool, optional + If `full_output` is False, the root is returned. If `full_output` is + True, the return value is ``(x, r)``, where `x` is the root, and `r` is + a `RootResults` object. + disp : bool, optional + If True, raise RuntimeError if the algorithm didn't converge. + Otherwise, the convergence status is recorded in any `RootResults` + return object. + + Returns + ------- + root : float + Root of `f` between `a` and `b`. + r : `RootResults` (present if ``full_output = True``) + Object containing information about the convergence. + In particular, ``r.converged`` is True if the routine converged. + + See Also + -------- + brentq, brenth, bisect, newton : 1-D root-finding + fixed_point : scalar fixed-point finder + + Notes + ----- + Uses [Ridders1979]_ method to find a root of the function `f` between the + arguments `a` and `b`. Ridders' method is faster than bisection, but not + generally as fast as the Brent routines. [Ridders1979]_ provides the + classic description and source of the algorithm. A description can also be + found in any recent edition of Numerical Recipes. + + The routine used here diverges slightly from standard presentations in + order to be a bit more careful of tolerance. + + References + ---------- + .. [Ridders1979] + Ridders, C. F. J. "A New Algorithm for Computing a + Single Root of a Real Continuous Function." + IEEE Trans. Circuits Systems 26, 979-980, 1979. + + Examples + -------- + + >>> def f(x): + ... return (x**2 - 1) + + >>> from scipy import optimize + + >>> root = optimize.ridder(f, 0, 2) + >>> root + 1.0 + + >>> root = optimize.ridder(f, -2, 0) + >>> root + -1.0 + """ + if not isinstance(args, tuple): + args = (args,) + maxiter = operator.index(maxiter) + if xtol <= 0: + raise ValueError("xtol too small (%g <= 0)" % xtol) + if rtol < _rtol: + raise ValueError(f"rtol too small ({rtol:g} < {_rtol:g})") + f = _wrap_nan_raise(f) + r = _zeros._ridder(f, a, b, xtol, rtol, maxiter, args, full_output, disp) + return results_c(full_output, r, "ridder") + + +def brentq(f, a, b, args=(), + xtol=_xtol, rtol=_rtol, maxiter=_iter, + full_output=False, disp=True): + """ + Find a root of a function in a bracketing interval using Brent's method. + + Uses the classic Brent's method to find a root of the function `f` on + the sign changing interval [a , b]. Generally considered the best of the + rootfinding routines here. It is a safe version of the secant method that + uses inverse quadratic extrapolation. Brent's method combines root + bracketing, interval bisection, and inverse quadratic interpolation. It is + sometimes known as the van Wijngaarden-Dekker-Brent method. Brent (1973) + claims convergence is guaranteed for functions computable within [a,b]. + + [Brent1973]_ provides the classic description of the algorithm. Another + description can be found in a recent edition of Numerical Recipes, including + [PressEtal1992]_. A third description is at + http://mathworld.wolfram.com/BrentsMethod.html. It should be easy to + understand the algorithm just by reading our code. Our code diverges a bit + from standard presentations: we choose a different formula for the + extrapolation step. + + Parameters + ---------- + f : function + Python function returning a number. The function :math:`f` + must be continuous, and :math:`f(a)` and :math:`f(b)` must + have opposite signs. + a : scalar + One end of the bracketing interval :math:`[a, b]`. + b : scalar + The other end of the bracketing interval :math:`[a, b]`. + xtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter must be positive. For nice functions, Brent's + method will often satisfy the above condition with ``xtol/2`` + and ``rtol/2``. [Brent1973]_ + rtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter cannot be smaller than its default value of + ``4*np.finfo(float).eps``. For nice functions, Brent's + method will often satisfy the above condition with ``xtol/2`` + and ``rtol/2``. [Brent1973]_ + maxiter : int, optional + If convergence is not achieved in `maxiter` iterations, an error is + raised. Must be >= 0. + args : tuple, optional + Containing extra arguments for the function `f`. + `f` is called by ``apply(f, (x)+args)``. + full_output : bool, optional + If `full_output` is False, the root is returned. If `full_output` is + True, the return value is ``(x, r)``, where `x` is the root, and `r` is + a `RootResults` object. + disp : bool, optional + If True, raise RuntimeError if the algorithm didn't converge. + Otherwise, the convergence status is recorded in any `RootResults` + return object. + + Returns + ------- + root : float + Root of `f` between `a` and `b`. + r : `RootResults` (present if ``full_output = True``) + Object containing information about the convergence. In particular, + ``r.converged`` is True if the routine converged. + + Notes + ----- + `f` must be continuous. f(a) and f(b) must have opposite signs. + + Related functions fall into several classes: + + multivariate local optimizers + `fmin`, `fmin_powell`, `fmin_cg`, `fmin_bfgs`, `fmin_ncg` + nonlinear least squares minimizer + `leastsq` + constrained multivariate optimizers + `fmin_l_bfgs_b`, `fmin_tnc`, `fmin_cobyla` + global optimizers + `basinhopping`, `brute`, `differential_evolution` + local scalar minimizers + `fminbound`, `brent`, `golden`, `bracket` + N-D root-finding + `fsolve` + 1-D root-finding + `brenth`, `ridder`, `bisect`, `newton` + scalar fixed-point finder + `fixed_point` + + References + ---------- + .. [Brent1973] + Brent, R. P., + *Algorithms for Minimization Without Derivatives*. + Englewood Cliffs, NJ: Prentice-Hall, 1973. Ch. 3-4. + + .. [PressEtal1992] + Press, W. H.; Flannery, B. P.; Teukolsky, S. A.; and Vetterling, W. T. + *Numerical Recipes in FORTRAN: The Art of Scientific Computing*, 2nd ed. + Cambridge, England: Cambridge University Press, pp. 352-355, 1992. + Section 9.3: "Van Wijngaarden-Dekker-Brent Method." + + Examples + -------- + >>> def f(x): + ... return (x**2 - 1) + + >>> from scipy import optimize + + >>> root = optimize.brentq(f, -2, 0) + >>> root + -1.0 + + >>> root = optimize.brentq(f, 0, 2) + >>> root + 1.0 + """ + if not isinstance(args, tuple): + args = (args,) + maxiter = operator.index(maxiter) + if xtol <= 0: + raise ValueError("xtol too small (%g <= 0)" % xtol) + if rtol < _rtol: + raise ValueError(f"rtol too small ({rtol:g} < {_rtol:g})") + f = _wrap_nan_raise(f) + r = _zeros._brentq(f, a, b, xtol, rtol, maxiter, args, full_output, disp) + return results_c(full_output, r, "brentq") + + +def brenth(f, a, b, args=(), + xtol=_xtol, rtol=_rtol, maxiter=_iter, + full_output=False, disp=True): + """Find a root of a function in a bracketing interval using Brent's + method with hyperbolic extrapolation. + + A variation on the classic Brent routine to find a root of the function f + between the arguments a and b that uses hyperbolic extrapolation instead of + inverse quadratic extrapolation. Bus & Dekker (1975) guarantee convergence + for this method, claiming that the upper bound of function evaluations here + is 4 or 5 times that of bisection. + f(a) and f(b) cannot have the same signs. Generally, on a par with the + brent routine, but not as heavily tested. It is a safe version of the + secant method that uses hyperbolic extrapolation. + The version here is by Chuck Harris, and implements Algorithm M of + [BusAndDekker1975]_, where further details (convergence properties, + additional remarks and such) can be found + + Parameters + ---------- + f : function + Python function returning a number. f must be continuous, and f(a) and + f(b) must have opposite signs. + a : scalar + One end of the bracketing interval [a,b]. + b : scalar + The other end of the bracketing interval [a,b]. + xtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter must be positive. As with `brentq`, for nice + functions the method will often satisfy the above condition + with ``xtol/2`` and ``rtol/2``. + rtol : number, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter cannot be smaller than its default value of + ``4*np.finfo(float).eps``. As with `brentq`, for nice functions + the method will often satisfy the above condition with + ``xtol/2`` and ``rtol/2``. + maxiter : int, optional + If convergence is not achieved in `maxiter` iterations, an error is + raised. Must be >= 0. + args : tuple, optional + Containing extra arguments for the function `f`. + `f` is called by ``apply(f, (x)+args)``. + full_output : bool, optional + If `full_output` is False, the root is returned. If `full_output` is + True, the return value is ``(x, r)``, where `x` is the root, and `r` is + a `RootResults` object. + disp : bool, optional + If True, raise RuntimeError if the algorithm didn't converge. + Otherwise, the convergence status is recorded in any `RootResults` + return object. + + Returns + ------- + root : float + Root of `f` between `a` and `b`. + r : `RootResults` (present if ``full_output = True``) + Object containing information about the convergence. In particular, + ``r.converged`` is True if the routine converged. + + See Also + -------- + fmin, fmin_powell, fmin_cg, fmin_bfgs, fmin_ncg : multivariate local optimizers + leastsq : nonlinear least squares minimizer + fmin_l_bfgs_b, fmin_tnc, fmin_cobyla : constrained multivariate optimizers + basinhopping, differential_evolution, brute : global optimizers + fminbound, brent, golden, bracket : local scalar minimizers + fsolve : N-D root-finding + brentq, brenth, ridder, bisect, newton : 1-D root-finding + fixed_point : scalar fixed-point finder + + References + ---------- + .. [BusAndDekker1975] + Bus, J. C. P., Dekker, T. J., + "Two Efficient Algorithms with Guaranteed Convergence for Finding a Zero + of a Function", ACM Transactions on Mathematical Software, Vol. 1, Issue + 4, Dec. 1975, pp. 330-345. Section 3: "Algorithm M". + :doi:`10.1145/355656.355659` + + Examples + -------- + >>> def f(x): + ... return (x**2 - 1) + + >>> from scipy import optimize + + >>> root = optimize.brenth(f, -2, 0) + >>> root + -1.0 + + >>> root = optimize.brenth(f, 0, 2) + >>> root + 1.0 + + """ + if not isinstance(args, tuple): + args = (args,) + maxiter = operator.index(maxiter) + if xtol <= 0: + raise ValueError("xtol too small (%g <= 0)" % xtol) + if rtol < _rtol: + raise ValueError(f"rtol too small ({rtol:g} < {_rtol:g})") + f = _wrap_nan_raise(f) + r = _zeros._brenth(f, a, b, xtol, rtol, maxiter, args, full_output, disp) + return results_c(full_output, r, "brenth") + + +################################ +# TOMS "Algorithm 748: Enclosing Zeros of Continuous Functions", by +# Alefeld, G. E. and Potra, F. A. and Shi, Yixun, +# See [1] + + +def _notclose(fs, rtol=_rtol, atol=_xtol): + # Ensure not None, not 0, all finite, and not very close to each other + notclosefvals = ( + all(fs) and all(np.isfinite(fs)) and + not any(any(np.isclose(_f, fs[i + 1:], rtol=rtol, atol=atol)) + for i, _f in enumerate(fs[:-1]))) + return notclosefvals + + +def _secant(xvals, fvals): + """Perform a secant step, taking a little care""" + # Secant has many "mathematically" equivalent formulations + # x2 = x0 - (x1 - x0)/(f1 - f0) * f0 + # = x1 - (x1 - x0)/(f1 - f0) * f1 + # = (-x1 * f0 + x0 * f1) / (f1 - f0) + # = (-f0 / f1 * x1 + x0) / (1 - f0 / f1) + # = (-f1 / f0 * x0 + x1) / (1 - f1 / f0) + x0, x1 = xvals[:2] + f0, f1 = fvals[:2] + if f0 == f1: + return np.nan + if np.abs(f1) > np.abs(f0): + x2 = (-f0 / f1 * x1 + x0) / (1 - f0 / f1) + else: + x2 = (-f1 / f0 * x0 + x1) / (1 - f1 / f0) + return x2 + + +def _update_bracket(ab, fab, c, fc): + """Update a bracket given (c, fc), return the discarded endpoints.""" + fa, fb = fab + idx = (0 if np.sign(fa) * np.sign(fc) > 0 else 1) + rx, rfx = ab[idx], fab[idx] + fab[idx] = fc + ab[idx] = c + return rx, rfx + + +def _compute_divided_differences(xvals, fvals, N=None, full=True, + forward=True): + """Return a matrix of divided differences for the xvals, fvals pairs + + DD[i, j] = f[x_{i-j}, ..., x_i] for 0 <= j <= i + + If full is False, just return the main diagonal(or last row): + f[a], f[a, b] and f[a, b, c]. + If forward is False, return f[c], f[b, c], f[a, b, c].""" + if full: + if forward: + xvals = np.asarray(xvals) + else: + xvals = np.array(xvals)[::-1] + M = len(xvals) + N = M if N is None else min(N, M) + DD = np.zeros([M, N]) + DD[:, 0] = fvals[:] + for i in range(1, N): + DD[i:, i] = (np.diff(DD[i - 1:, i - 1]) / + (xvals[i:] - xvals[:M - i])) + return DD + + xvals = np.asarray(xvals) + dd = np.array(fvals) + row = np.array(fvals) + idx2Use = (0 if forward else -1) + dd[0] = fvals[idx2Use] + for i in range(1, len(xvals)): + denom = xvals[i:i + len(row) - 1] - xvals[:len(row) - 1] + row = np.diff(row)[:] / denom + dd[i] = row[idx2Use] + return dd + + +def _interpolated_poly(xvals, fvals, x): + """Compute p(x) for the polynomial passing through the specified locations. + + Use Neville's algorithm to compute p(x) where p is the minimal degree + polynomial passing through the points xvals, fvals""" + xvals = np.asarray(xvals) + N = len(xvals) + Q = np.zeros([N, N]) + D = np.zeros([N, N]) + Q[:, 0] = fvals[:] + D[:, 0] = fvals[:] + for k in range(1, N): + alpha = D[k:, k - 1] - Q[k - 1:N - 1, k - 1] + diffik = xvals[0:N - k] - xvals[k:N] + Q[k:, k] = (xvals[k:] - x) / diffik * alpha + D[k:, k] = (xvals[:N - k] - x) / diffik * alpha + # Expect Q[-1, 1:] to be small relative to Q[-1, 0] as x approaches a root + return np.sum(Q[-1, 1:]) + Q[-1, 0] + + +def _inverse_poly_zero(a, b, c, d, fa, fb, fc, fd): + """Inverse cubic interpolation f-values -> x-values + + Given four points (fa, a), (fb, b), (fc, c), (fd, d) with + fa, fb, fc, fd all distinct, find poly IP(y) through the 4 points + and compute x=IP(0). + """ + return _interpolated_poly([fa, fb, fc, fd], [a, b, c, d], 0) + + +def _newton_quadratic(ab, fab, d, fd, k): + """Apply Newton-Raphson like steps, using divided differences to approximate f' + + ab is a real interval [a, b] containing a root, + fab holds the real values of f(a), f(b) + d is a real number outside [ab, b] + k is the number of steps to apply + """ + a, b = ab + fa, fb = fab + _, B, A = _compute_divided_differences([a, b, d], [fa, fb, fd], + forward=True, full=False) + + # _P is the quadratic polynomial through the 3 points + def _P(x): + # Horner evaluation of fa + B * (x - a) + A * (x - a) * (x - b) + return (A * (x - b) + B) * (x - a) + fa + + if A == 0: + r = a - fa / B + else: + r = (a if np.sign(A) * np.sign(fa) > 0 else b) + # Apply k Newton-Raphson steps to _P(x), starting from x=r + for i in range(k): + r1 = r - _P(r) / (B + A * (2 * r - a - b)) + if not (ab[0] < r1 < ab[1]): + if (ab[0] < r < ab[1]): + return r + r = sum(ab) / 2.0 + break + r = r1 + + return r + + +class TOMS748Solver: + """Solve f(x, *args) == 0 using Algorithm748 of Alefeld, Potro & Shi. + """ + _MU = 0.5 + _K_MIN = 1 + _K_MAX = 100 # A very high value for real usage. Expect 1, 2, maybe 3. + + def __init__(self): + self.f = None + self.args = None + self.function_calls = 0 + self.iterations = 0 + self.k = 2 + # ab=[a,b] is a global interval containing a root + self.ab = [np.nan, np.nan] + # fab is function values at a, b + self.fab = [np.nan, np.nan] + self.d = None + self.fd = None + self.e = None + self.fe = None + self.disp = False + self.xtol = _xtol + self.rtol = _rtol + self.maxiter = _iter + + def configure(self, xtol, rtol, maxiter, disp, k): + self.disp = disp + self.xtol = xtol + self.rtol = rtol + self.maxiter = maxiter + # Silently replace a low value of k with 1 + self.k = max(k, self._K_MIN) + # Noisily replace a high value of k with self._K_MAX + if self.k > self._K_MAX: + msg = "toms748: Overriding k: ->%d" % self._K_MAX + warnings.warn(msg, RuntimeWarning, stacklevel=3) + self.k = self._K_MAX + + def _callf(self, x, error=True): + """Call the user-supplied function, update book-keeping""" + fx = self.f(x, *self.args) + self.function_calls += 1 + if not np.isfinite(fx) and error: + raise ValueError(f"Invalid function value: f({x:f}) -> {fx} ") + return fx + + def get_result(self, x, flag=_ECONVERGED): + r"""Package the result and statistics into a tuple.""" + return (x, self.function_calls, self.iterations, flag) + + def _update_bracket(self, c, fc): + return _update_bracket(self.ab, self.fab, c, fc) + + def start(self, f, a, b, args=()): + r"""Prepare for the iterations.""" + self.function_calls = 0 + self.iterations = 0 + + self.f = f + self.args = args + self.ab[:] = [a, b] + if not np.isfinite(a) or np.imag(a) != 0: + raise ValueError("Invalid x value: %s " % (a)) + if not np.isfinite(b) or np.imag(b) != 0: + raise ValueError("Invalid x value: %s " % (b)) + + fa = self._callf(a) + if not np.isfinite(fa) or np.imag(fa) != 0: + raise ValueError(f"Invalid function value: f({a:f}) -> {fa} ") + if fa == 0: + return _ECONVERGED, a + fb = self._callf(b) + if not np.isfinite(fb) or np.imag(fb) != 0: + raise ValueError(f"Invalid function value: f({b:f}) -> {fb} ") + if fb == 0: + return _ECONVERGED, b + + if np.sign(fb) * np.sign(fa) > 0: + raise ValueError("f(a) and f(b) must have different signs, but " + f"f({a:e})={fa:e}, f({b:e})={fb:e} ") + self.fab[:] = [fa, fb] + + return _EINPROGRESS, sum(self.ab) / 2.0 + + def get_status(self): + """Determine the current status.""" + a, b = self.ab[:2] + if np.isclose(a, b, rtol=self.rtol, atol=self.xtol): + return _ECONVERGED, sum(self.ab) / 2.0 + if self.iterations >= self.maxiter: + return _ECONVERR, sum(self.ab) / 2.0 + return _EINPROGRESS, sum(self.ab) / 2.0 + + def iterate(self): + """Perform one step in the algorithm. + + Implements Algorithm 4.1(k=1) or 4.2(k=2) in [APS1995] + """ + self.iterations += 1 + eps = np.finfo(float).eps + d, fd, e, fe = self.d, self.fd, self.e, self.fe + ab_width = self.ab[1] - self.ab[0] # Need the start width below + c = None + + for nsteps in range(2, self.k+2): + # If the f-values are sufficiently separated, perform an inverse + # polynomial interpolation step. Otherwise, nsteps repeats of + # an approximate Newton-Raphson step. + if _notclose(self.fab + [fd, fe], rtol=0, atol=32*eps): + c0 = _inverse_poly_zero(self.ab[0], self.ab[1], d, e, + self.fab[0], self.fab[1], fd, fe) + if self.ab[0] < c0 < self.ab[1]: + c = c0 + if c is None: + c = _newton_quadratic(self.ab, self.fab, d, fd, nsteps) + + fc = self._callf(c) + if fc == 0: + return _ECONVERGED, c + + # re-bracket + e, fe = d, fd + d, fd = self._update_bracket(c, fc) + + # u is the endpoint with the smallest f-value + uix = (0 if np.abs(self.fab[0]) < np.abs(self.fab[1]) else 1) + u, fu = self.ab[uix], self.fab[uix] + + _, A = _compute_divided_differences(self.ab, self.fab, + forward=(uix == 0), full=False) + c = u - 2 * fu / A + if np.abs(c - u) > 0.5 * (self.ab[1] - self.ab[0]): + c = sum(self.ab) / 2.0 + else: + if np.isclose(c, u, rtol=eps, atol=0): + # c didn't change (much). + # Either because the f-values at the endpoints have vastly + # differing magnitudes, or because the root is very close to + # that endpoint + frs = np.frexp(self.fab)[1] + if frs[uix] < frs[1 - uix] - 50: # Differ by more than 2**50 + c = (31 * self.ab[uix] + self.ab[1 - uix]) / 32 + else: + # Make a bigger adjustment, about the + # size of the requested tolerance. + mm = (1 if uix == 0 else -1) + adj = mm * np.abs(c) * self.rtol + mm * self.xtol + c = u + adj + if not self.ab[0] < c < self.ab[1]: + c = sum(self.ab) / 2.0 + + fc = self._callf(c) + if fc == 0: + return _ECONVERGED, c + + e, fe = d, fd + d, fd = self._update_bracket(c, fc) + + # If the width of the new interval did not decrease enough, bisect + if self.ab[1] - self.ab[0] > self._MU * ab_width: + e, fe = d, fd + z = sum(self.ab) / 2.0 + fz = self._callf(z) + if fz == 0: + return _ECONVERGED, z + d, fd = self._update_bracket(z, fz) + + # Record d and e for next iteration + self.d, self.fd = d, fd + self.e, self.fe = e, fe + + status, xn = self.get_status() + return status, xn + + def solve(self, f, a, b, args=(), + xtol=_xtol, rtol=_rtol, k=2, maxiter=_iter, disp=True): + r"""Solve f(x) = 0 given an interval containing a root.""" + self.configure(xtol=xtol, rtol=rtol, maxiter=maxiter, disp=disp, k=k) + status, xn = self.start(f, a, b, args) + if status == _ECONVERGED: + return self.get_result(xn) + + # The first step only has two x-values. + c = _secant(self.ab, self.fab) + if not self.ab[0] < c < self.ab[1]: + c = sum(self.ab) / 2.0 + fc = self._callf(c) + if fc == 0: + return self.get_result(c) + + self.d, self.fd = self._update_bracket(c, fc) + self.e, self.fe = None, None + self.iterations += 1 + + while True: + status, xn = self.iterate() + if status == _ECONVERGED: + return self.get_result(xn) + if status == _ECONVERR: + fmt = "Failed to converge after %d iterations, bracket is %s" + if disp: + msg = fmt % (self.iterations + 1, self.ab) + raise RuntimeError(msg) + return self.get_result(xn, _ECONVERR) + + +def toms748(f, a, b, args=(), k=1, + xtol=_xtol, rtol=_rtol, maxiter=_iter, + full_output=False, disp=True): + """ + Find a root using TOMS Algorithm 748 method. + + Implements the Algorithm 748 method of Alefeld, Potro and Shi to find a + root of the function `f` on the interval `[a , b]`, where `f(a)` and + `f(b)` must have opposite signs. + + It uses a mixture of inverse cubic interpolation and + "Newton-quadratic" steps. [APS1995]. + + Parameters + ---------- + f : function + Python function returning a scalar. The function :math:`f` + must be continuous, and :math:`f(a)` and :math:`f(b)` + have opposite signs. + a : scalar, + lower boundary of the search interval + b : scalar, + upper boundary of the search interval + args : tuple, optional + containing extra arguments for the function `f`. + `f` is called by ``f(x, *args)``. + k : int, optional + The number of Newton quadratic steps to perform each + iteration. ``k>=1``. + xtol : scalar, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. The + parameter must be positive. + rtol : scalar, optional + The computed root ``x0`` will satisfy ``np.allclose(x, x0, + atol=xtol, rtol=rtol)``, where ``x`` is the exact root. + maxiter : int, optional + If convergence is not achieved in `maxiter` iterations, an error is + raised. Must be >= 0. + full_output : bool, optional + If `full_output` is False, the root is returned. If `full_output` is + True, the return value is ``(x, r)``, where `x` is the root, and `r` is + a `RootResults` object. + disp : bool, optional + If True, raise RuntimeError if the algorithm didn't converge. + Otherwise, the convergence status is recorded in the `RootResults` + return object. + + Returns + ------- + root : float + Approximate root of `f` + r : `RootResults` (present if ``full_output = True``) + Object containing information about the convergence. In particular, + ``r.converged`` is True if the routine converged. + + See Also + -------- + brentq, brenth, ridder, bisect, newton + fsolve : find roots in N dimensions. + + Notes + ----- + `f` must be continuous. + Algorithm 748 with ``k=2`` is asymptotically the most efficient + algorithm known for finding roots of a four times continuously + differentiable function. + In contrast with Brent's algorithm, which may only decrease the length of + the enclosing bracket on the last step, Algorithm 748 decreases it each + iteration with the same asymptotic efficiency as it finds the root. + + For easy statement of efficiency indices, assume that `f` has 4 + continuouous deriviatives. + For ``k=1``, the convergence order is at least 2.7, and with about + asymptotically 2 function evaluations per iteration, the efficiency + index is approximately 1.65. + For ``k=2``, the order is about 4.6 with asymptotically 3 function + evaluations per iteration, and the efficiency index 1.66. + For higher values of `k`, the efficiency index approaches + the kth root of ``(3k-2)``, hence ``k=1`` or ``k=2`` are + usually appropriate. + + References + ---------- + .. [APS1995] + Alefeld, G. E. and Potra, F. A. and Shi, Yixun, + *Algorithm 748: Enclosing Zeros of Continuous Functions*, + ACM Trans. Math. Softw. Volume 221(1995) + doi = {10.1145/210089.210111} + + Examples + -------- + >>> def f(x): + ... return (x**3 - 1) # only one real root at x = 1 + + >>> from scipy import optimize + >>> root, results = optimize.toms748(f, 0, 2, full_output=True) + >>> root + 1.0 + >>> results + converged: True + flag: converged + function_calls: 11 + iterations: 5 + root: 1.0 + method: toms748 + """ + if xtol <= 0: + raise ValueError("xtol too small (%g <= 0)" % xtol) + if rtol < _rtol / 4: + raise ValueError(f"rtol too small ({rtol:g} < {_rtol/4:g})") + maxiter = operator.index(maxiter) + if maxiter < 1: + raise ValueError("maxiter must be greater than 0") + if not np.isfinite(a): + raise ValueError("a is not finite %s" % a) + if not np.isfinite(b): + raise ValueError("b is not finite %s" % b) + if a >= b: + raise ValueError(f"a and b are not an interval [{a}, {b}]") + if not k >= 1: + raise ValueError("k too small (%s < 1)" % k) + + if not isinstance(args, tuple): + args = (args,) + f = _wrap_nan_raise(f) + solver = TOMS748Solver() + result = solver.solve(f, a, b, args=args, k=k, xtol=xtol, rtol=rtol, + maxiter=maxiter, disp=disp) + x, function_calls, iterations, flag = result + return _results_select(full_output, (x, function_calls, iterations, flag), + "toms748") diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/cobyla.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/cobyla.py new file mode 100644 index 0000000000000000000000000000000000000000..87d111d8fc1634e54d3766a3f1c58abd37ac58cb --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/cobyla.py @@ -0,0 +1,19 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + +from scipy._lib.deprecation import _sub_module_deprecation + + +__all__ = [ # noqa: F822 + 'OptimizeResult', + 'fmin_cobyla', +] + +def __dir__(): + return __all__ + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="cobyla", + private_modules=["_cobyla_py"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/lbfgsb.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/lbfgsb.py new file mode 100644 index 0000000000000000000000000000000000000000..866407cabb3decf0ff72239e6fd372f69f7550c0 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/lbfgsb.py @@ -0,0 +1,23 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + +from scipy._lib.deprecation import _sub_module_deprecation + + +__all__ = [ # noqa: F822 + 'LbfgsInvHessProduct', + 'OptimizeResult', + 'fmin_l_bfgs_b', + 'zeros', +] + + +def __dir__(): + return __all__ + + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="lbfgsb", + private_modules=["_lbfgsb_py"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/minpack2.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/minpack2.py new file mode 100644 index 0000000000000000000000000000000000000000..cdb3503e0e1e4c886c89bfb62e6a2efc3ba54549 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/minpack2.py @@ -0,0 +1,17 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + +from scipy._lib.deprecation import _sub_module_deprecation + +__all__: list[str] = [] + + +def __dir__(): + return __all__ + + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="minpack2", + private_modules=["_minpack2"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/moduleTNC.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/moduleTNC.py new file mode 100644 index 0000000000000000000000000000000000000000..3fc5884ed5c39437b7681395419d641443a1fdb8 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/moduleTNC.py @@ -0,0 +1,19 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + + +from scipy._lib.deprecation import _sub_module_deprecation + + +__all__ = [] + + +def __dir__(): + return __all__ + + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="moduleTNC", + private_modules=["_moduleTNC"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/slsqp.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/slsqp.py new file mode 100644 index 0000000000000000000000000000000000000000..c2b77d2eb447527cd91e92907e06ad53dd1ad3d8 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/slsqp.py @@ -0,0 +1,23 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + +from scipy._lib.deprecation import _sub_module_deprecation + + +__all__ = [ # noqa: F822 + 'OptimizeResult', + 'fmin_slsqp', + 'slsqp', + 'zeros', +] + + +def __dir__(): + return __all__ + + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="slsqp", + private_modules=["_slsqp_py"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/tnc.py b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/tnc.py new file mode 100644 index 0000000000000000000000000000000000000000..e0f66058bbcc501eb1303eb3075cb55705b93192 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/optimize/tnc.py @@ -0,0 +1,22 @@ +# This file is not meant for public use and will be removed in SciPy v2.0.0. +# Use the `scipy.optimize` namespace for importing the functions +# included below. + +from scipy._lib.deprecation import _sub_module_deprecation + + +__all__ = [ # noqa: F822 + 'OptimizeResult', + 'fmin_tnc', + 'zeros', +] + + +def __dir__(): + return __all__ + + +def __getattr__(name): + return _sub_module_deprecation(sub_package="optimize", module="tnc", + private_modules=["_tnc"], all=__all__, + attribute=name) diff --git a/evalkit_tf446/lib/python3.10/site-packages/scipy/special/tests/__pycache__/test_basic.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/scipy/special/tests/__pycache__/test_basic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73dea79b7570158130e173e9b054c3cbb394c2fd --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/scipy/special/tests/__pycache__/test_basic.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ff6247433671abbc60d4cb2f502879a0c22a78bb8dffb2192e51d69f5b14955 +size 157256