diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c38324f43e34985fbbb2dae08a96a55cb52ab7dc Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b23b7f826feb8d80665a04f1c9d728a33c73005e Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9ad3bdf92dafb69d3991b74e4d85fd2c7b11a1b Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2_grpc.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2_grpc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5899e92d58f6a97c13a0d4b6daede7b53048a0f Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debug_service_pb2_grpc.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debugger_event_metadata_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debugger_event_metadata_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c41920420c2dd368b70951ea0477897296f19286 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/__pycache__/debugger_event_metadata_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..bf3922fbfefae1ed2d700cd68374bbcd01593051 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/debug/debug_service.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from tensorflow.core.framework import tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__pb2 +from tensorflow.core.profiler import tfprof_log_pb2 as tensorflow_dot_core_dot_profiler_dot_tfprof__log__pb2 +from tensorflow.core.protobuf import debug_pb2 as tensorflow_dot_core_dot_protobuf_dot_debug__pb2 +from tensorflow.core.util import event_pb2 as tensorflow_dot_core_dot_util_dot_event__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)tensorflow/core/debug/debug_service.proto\x12\ntensorflow\x1a&tensorflow/core/framework/tensor.proto\x1a)tensorflow/core/profiler/tfprof_log.proto\x1a$tensorflow/core/protobuf/debug.proto\x1a tensorflow/core/util/event.proto\"\xde\x02\n\nEventReply\x12I\n\x16\x64\x65\x62ug_op_state_changes\x18\x01 \x03(\x0b\x32).tensorflow.EventReply.DebugOpStateChange\x12\'\n\x06tensor\x18\x02 \x01(\x0b\x32\x17.tensorflow.TensorProto\x1a\xdb\x01\n\x12\x44\x65\x62ugOpStateChange\x12>\n\x05state\x18\x01 \x01(\x0e\x32/.tensorflow.EventReply.DebugOpStateChange.State\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x13\n\x0boutput_slot\x18\x03 \x01(\x05\x12\x10\n\x08\x64\x65\x62ug_op\x18\x04 \x01(\t\"K\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\r\n\tREAD_ONLY\x10\x02\x12\x0e\n\nREAD_WRITE\x10\x03\"\xa7\x03\n\rCallTraceback\x12\x35\n\tcall_type\x18\x01 \x01(\x0e\x32\".tensorflow.CallTraceback.CallType\x12\x10\n\x08\x63\x61ll_key\x18\x02 \x01(\t\x12\x30\n\x0corigin_stack\x18\x03 \x01(\x0b\x32\x1a.tensorflow.tfprof.CodeDef\x12L\n\x13origin_id_to_string\x18\x04 \x03(\x0b\x32/.tensorflow.CallTraceback.OriginIdToStringEntry\x12\x36\n\x0fgraph_traceback\x18\x05 \x01(\x0b\x32\x1d.tensorflow.tfprof.OpLogProto\x12\x15\n\rgraph_version\x18\x06 \x01(\x03\x1a\x37\n\x15OriginIdToStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"E\n\x08\x43\x61llType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x13\n\x0fGRAPH_EXECUTION\x10\x01\x12\x13\n\x0f\x45\x41GER_EXECUTION\x10\x02\x32\xdd\x01\n\rEventListener\x12;\n\nSendEvents\x12\x11.tensorflow.Event\x1a\x16.tensorflow.EventReply(\x01\x30\x01\x12\x43\n\x0eSendTracebacks\x12\x19.tensorflow.CallTraceback\x1a\x16.tensorflow.EventReply\x12J\n\x0fSendSourceFiles\x12\x1f.tensorflow.DebuggedSourceFiles\x1a\x16.tensorflow.EventReplyb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.core.debug.debug_service_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _CALLTRACEBACK_ORIGINIDTOSTRINGENTRY._options = None + _CALLTRACEBACK_ORIGINIDTOSTRINGENTRY._serialized_options = b'8\001' + _EVENTREPLY._serialized_start=213 + _EVENTREPLY._serialized_end=563 + _EVENTREPLY_DEBUGOPSTATECHANGE._serialized_start=344 + _EVENTREPLY_DEBUGOPSTATECHANGE._serialized_end=563 + _EVENTREPLY_DEBUGOPSTATECHANGE_STATE._serialized_start=488 + _EVENTREPLY_DEBUGOPSTATECHANGE_STATE._serialized_end=563 + _CALLTRACEBACK._serialized_start=566 + _CALLTRACEBACK._serialized_end=989 + _CALLTRACEBACK_ORIGINIDTOSTRINGENTRY._serialized_start=863 + _CALLTRACEBACK_ORIGINIDTOSTRINGENTRY._serialized_end=918 + _CALLTRACEBACK_CALLTYPE._serialized_start=920 + _CALLTRACEBACK_CALLTYPE._serialized_end=989 + _EVENTLISTENER._serialized_start=992 + _EVENTLISTENER._serialized_end=1213 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2_grpc.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2_grpc.py new file mode 100644 index 0000000000000000000000000000000000000000..d8b581395ba8fc611ceb819a5b639d541520c81a --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debug_service_pb2_grpc.py @@ -0,0 +1,90 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from tensorflow.core.debug import debug_service_pb2 as tensorflow_dot_core_dot_debug_dot_debug__service__pb2 +from tensorflow.core.protobuf import debug_pb2 as tensorflow_dot_core_dot_protobuf_dot_debug__pb2 +from tensorflow.core.util import event_pb2 as tensorflow_dot_core_dot_util_dot_event__pb2 + + +class EventListenerStub(object): + """EventListener: Receives Event protos, e.g., from debugged TensorFlow + runtime(s). + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SendEvents = channel.stream_stream( + '/tensorflow.EventListener/SendEvents', + request_serializer=tensorflow_dot_core_dot_util_dot_event__pb2.Event.SerializeToString, + response_deserializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.FromString, + ) + self.SendTracebacks = channel.unary_unary( + '/tensorflow.EventListener/SendTracebacks', + request_serializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.CallTraceback.SerializeToString, + response_deserializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.FromString, + ) + self.SendSourceFiles = channel.unary_unary( + '/tensorflow.EventListener/SendSourceFiles', + request_serializer=tensorflow_dot_core_dot_protobuf_dot_debug__pb2.DebuggedSourceFiles.SerializeToString, + response_deserializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.FromString, + ) + + +class EventListenerServicer(object): + """EventListener: Receives Event protos, e.g., from debugged TensorFlow + runtime(s). + """ + + def SendEvents(self, request_iterator, context): + """Client(s) can use this RPC method to send the EventListener Event protos. + The Event protos can hold information such as: + 1) intermediate tensors from a debugged graph being executed, which can + be sent from DebugIdentity ops configured with grpc URLs. + 2) GraphDefs of partition graphs, which can be sent from special debug + ops that get executed immediately after the beginning of the graph + execution. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SendTracebacks(self, request, context): + """Send the tracebacks of a TensorFlow execution call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SendSourceFiles(self, request, context): + """Send a collection of source code files being debugged. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_EventListenerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SendEvents': grpc.stream_stream_rpc_method_handler( + servicer.SendEvents, + request_deserializer=tensorflow_dot_core_dot_util_dot_event__pb2.Event.FromString, + response_serializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.SerializeToString, + ), + 'SendTracebacks': grpc.unary_unary_rpc_method_handler( + servicer.SendTracebacks, + request_deserializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.CallTraceback.FromString, + response_serializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.SerializeToString, + ), + 'SendSourceFiles': grpc.unary_unary_rpc_method_handler( + servicer.SendSourceFiles, + request_deserializer=tensorflow_dot_core_dot_protobuf_dot_debug__pb2.DebuggedSourceFiles.FromString, + response_serializer=tensorflow_dot_core_dot_debug_dot_debug__service__pb2.EventReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'tensorflow.EventListener', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debugger_event_metadata_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debugger_event_metadata_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..0cb7ef73c95d41af3ceed1deacc96b4bf3c6d850 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/debug/debugger_event_metadata_pb2.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/debug/debugger_event_metadata.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n3tensorflow/core/debug/debugger_event_metadata.proto\x12!third_party.tensorflow.core.debug\"e\n\x15\x44\x65\x62uggerEventMetadata\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12\x13\n\x0boutput_slot\x18\x02 \x01(\x05\x12\x12\n\nnum_chunks\x18\x03 \x01(\x05\x12\x13\n\x0b\x63hunk_index\x18\x04 \x01(\x05\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.core.debug.debugger_event_metadata_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _DEBUGGEREVENTMETADATA._serialized_start=90 + _DEBUGGEREVENTMETADATA._serialized_end=191 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b43f405d7296b7a726618031694ca86cb738f31 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bb72ab7903b12ea74a1c286f17a813376ccedf0 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_cache.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_cache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0efd80caa3fb382880cc964653106b47dc4d8d2 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_cache.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d585a75a1c7dafc40f189b3d52126117ab962001 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46cbbdbe11ac857d7326959fa418b36d3ae85270 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/function_type_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/type_dispatch.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/type_dispatch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9afd635327e0d1d6311831d1336b665373563091 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/__pycache__/type_dispatch.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_cache.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..d22ff95b11a1050f369f9d6f476f24741cfd644d --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_cache.py @@ -0,0 +1,103 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Cache to manage functions based on their FunctionType.""" + +import collections +from typing import Any, NamedTuple, Optional + +from tensorflow.core.function.polymorphism import function_type as function_type_lib +from tensorflow.core.function.polymorphism import type_dispatch + + +class FunctionContext(NamedTuple): + """Contains information regarding tf.function execution context.""" + context: Any = None + scope_type: Any = None + + +class FunctionCache: + """A container for managing functions.""" + + __slots__ = ["_primary", "_dispatch_dict", "_garbage_collectors"] + + def __init__(self): + # Maps (FunctionContext, FunctionType) to a function. + self._primary = collections.OrderedDict() + + # Maps FunctionContext to a TypeDispatchTable containing FunctionTypes of + # that particular context. + self._dispatch_dict = {} + + def lookup(self, function_type: function_type_lib.FunctionType, + context: Optional[FunctionContext] = None) -> Optional[Any]: + """Looks up a function based on the context and type.""" + context = context or FunctionContext() + if context in self._dispatch_dict: + dispatch_type = self._dispatch_dict[context].dispatch(function_type) + if dispatch_type: + return self._primary[(context, dispatch_type)] + + return None + + def delete(self, function_type: function_type_lib.FunctionType, + context: Optional[FunctionContext] = None, + ) -> bool: + """Deletes a function given the context and type.""" + context = context or FunctionContext() + if (context, function_type) not in self._primary: + return False + + del self._primary[(context, function_type)] + self._dispatch_dict[context].delete(function_type) + + return True + + def add(self, fn: Any, context: Optional[FunctionContext] = None) -> None: + """Adds a new function using its function_type. + + Args: + fn: The function to be added to the cache. + context: A FunctionContext representing the current context. + """ + context = context or FunctionContext() + self._primary[(context, fn.function_type)] = fn + if context not in self._dispatch_dict: + self._dispatch_dict[context] = type_dispatch.TypeDispatchTable() + + self._dispatch_dict[context].add_target(fn.function_type) + + def generalize( + self, context: FunctionContext, + function_type: function_type_lib.FunctionType + ) -> function_type_lib.FunctionType: + """Try to generalize a FunctionType within a FunctionContext.""" + if context in self._dispatch_dict: + return self._dispatch_dict[context].try_generalizing_function_type( + function_type) + else: + return function_type + + # TODO(b/205971333): Remove this function. + def clear(self): + """Removes all functions from the cache.""" + self._primary.clear() + self._dispatch_dict.clear() + + def values(self): + """Returns a list of all functions held by this cache.""" + return list(self._primary.values()) + + def __len__(self): + return len(self._primary) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py new file mode 100644 index 0000000000000000000000000000000000000000..e06dcedadab1666361855b640adad2b3ad0bc330 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py @@ -0,0 +1,720 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Represents the types of TF functions.""" + +import collections +import inspect +from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple + +from absl import logging + +from tensorflow.core.function import trace_type +from tensorflow.core.function.polymorphism import function_type_pb2 +from tensorflow.core.function.trace_type import serialization +from tensorflow.python.types import core +from tensorflow.python.types import trace + + +# Represents a defined parameter default value that is saved alongside the +# function's captures. +class CapturedDefaultValue: + def __repr__(self): + return "" + + def __str__(self): + return "" + +CAPTURED_DEFAULT_VALUE = CapturedDefaultValue() + +PROTO_TO_PY_ENUM = { + function_type_pb2.Parameter.Kind.POSITIONAL_ONLY: + inspect.Parameter.POSITIONAL_ONLY, + function_type_pb2.Parameter.Kind.POSITIONAL_OR_KEYWORD: + inspect.Parameter.POSITIONAL_OR_KEYWORD, + function_type_pb2.Parameter.Kind.VAR_POSITIONAL: + inspect.Parameter.VAR_POSITIONAL, + function_type_pb2.Parameter.Kind.KEYWORD_ONLY: + inspect.Parameter.KEYWORD_ONLY, + function_type_pb2.Parameter.Kind.VAR_KEYWORD: + inspect.Parameter.VAR_KEYWORD, +} + +PY_TO_PROTO_ENUM = {v: k for k, v in PROTO_TO_PY_ENUM.items()} + + +class Parameter(inspect.Parameter): + """Represents a parameter to a function.""" + + def __init__(self, name: str, kind: Any, optional: bool, + type_constraint: Optional[trace.TraceType]): + if optional and kind not in [ + self.POSITIONAL_ONLY, self.KEYWORD_ONLY, self.POSITIONAL_OR_KEYWORD + ]: + raise ValueError( + "Parameter " + name + + " is optional and its kind must be one of {POSITIONAL_ONLY, " + + "KEYWORD_ONLY, POSITIONAL_OR_KEYWORD}. Got: " + str(kind)) + + if type_constraint and kind in [self.VAR_POSITIONAL, self.VAR_KEYWORD]: + raise TypeError("Variable args/kwargs can not have type constraints.") + + if not isinstance(type_constraint, (trace.TraceType, type(None))): + raise TypeError( + "Type constraints can only be an instance of a TraceType but got " + + "type_constraint=" + str(type_constraint) + " for Parameter " + name) + + super().__init__( + name, + kind, + default=CAPTURED_DEFAULT_VALUE if optional else self.empty, + annotation=type_constraint + if type_constraint is not None else self.empty) + + @classmethod + def from_proto(cls, proto: Any) -> "Parameter": + """Generate a Parameter from the proto representation.""" + deserialized_type_constraint = serialization.deserialize( + proto.type_constraint) if proto.HasField("type_constraint") else None + return Parameter(proto.name, PROTO_TO_PY_ENUM[proto.kind], + proto.is_optional, deserialized_type_constraint) + + def to_proto(self) -> function_type_pb2.Parameter: + """Generate a proto representation of the Parameter.""" + serialized_type_constraint = serialization.serialize( + self.type_constraint) if self.type_constraint else None + return function_type_pb2.Parameter( + name=self.name, + kind=PY_TO_PROTO_ENUM[self.kind], + is_optional=self.optional, + type_constraint=serialized_type_constraint) + + @property + def optional(self) -> bool: + """If this parameter might not be supplied for a call.""" + return self.default is not self.empty + + @property + def type_constraint(self) -> Optional[trace.TraceType]: + """A supertype that the parameter's type must subtype for validity.""" + return self.annotation if self.annotation is not self.empty else None + + def is_subtype_of(self, other: "Parameter") -> bool: + """Returns True if self is a supertype of other Parameter.""" + if not self.type_constraint or not other.type_constraint: + raise TypeError( + "Can not determine relationship between partially specified types.") + + if ((self.name, self.kind, self.optional) != + (other.name, other.kind, other.optional)): + return False + + return self.type_constraint.is_subtype_of(other.type_constraint) + + def most_specific_common_supertype( + self, others: Sequence["Parameter"]) -> Optional["Parameter"]: + """Returns a common supertype (if exists).""" + if not self.type_constraint or any( + not other.type_constraint for other in others): + raise TypeError( + "Can not determine relationship between partially specified types.") + + for other in others: + if ((self.name, self.kind, self.optional) != + (other.name, other.kind, other.optional)): + return None + + supertyped_constraint = self.type_constraint.most_specific_common_supertype( + [other.type_constraint for other in others]) + if supertyped_constraint: + return Parameter(self.name, self.kind, self.optional, + supertyped_constraint) + else: + return None + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Parameter): + return NotImplemented + + return ((self.name, self.kind, self.optional, + self.type_constraint) == (other.name, other.kind, other.optional, + other.type_constraint)) + + def __hash__(self): + return hash((self.name, self.kind, self.optional, self.type_constraint)) + + def __repr__(self): + return ("Parameter(name=" + self.name + ", kind=" + str(self.kind) + + ", optional=" + repr(self.optional) + ", type_constraint=" + + repr(self.type_constraint) + ")") + + def __reduce__(self): + return (self.__class__, (self.name, self.kind, self.optional, + self.type_constraint)) + + +class FunctionType(core.FunctionType): + """Represents the type of a TensorFlow function. + + FunctionType is the canonical way to represent the input/output contract of + all kinds of functions within the tf.function domain, including: + - Polymorphic Function + - Concrete Function + - Atomic Function + + It provides consistent, centralized and layered logic for: + - Canonicalization of Python input arguments + - Type-based dispatch to monomorphic functions + - Packing/unpacking structured python values to Tensors + - Generation of structured placeholder values for tracing + + Additionaly, it also provides: + - Lossless serialization + - Native integration with Python function signature representation + - Seamless migration from older representation formats + """ + + def __init__(self, + parameters: Sequence[inspect.Parameter], + captures: Optional[collections.OrderedDict] = None, + **kwargs): + super().__init__(parameters, **kwargs) + self._captures = captures if captures else collections.OrderedDict() + + @property + def parameters(self) -> Mapping[str, Any]: + """Returns an ordered mapping of parameter name to specification.""" + return super().parameters + + @property + def captures(self) -> collections.OrderedDict: + """Returns an ordered mapping of capture id to type.""" + return self._captures + + @property + def output(self) -> Optional[trace.TraceType]: + """Return the output TraceType if specified.""" + return ( + self.return_annotation + if self.return_annotation is not self.empty + else None + ) + + @classmethod + def from_callable(cls, + obj: Callable[..., Any], + *, + follow_wrapped: bool = True) -> "FunctionType": + """Generate FunctionType from a python Callable.""" + signature = super().from_callable(obj, follow_wrapped=follow_wrapped) + # TODO(fmuham): Support TraceType-based annotations. + parameters = [ + Parameter(p.name, p.kind, p.default is not p.empty, None) + for p in signature.parameters.values() + ] + + return FunctionType(parameters) + + @classmethod + def get_default_values(cls, + obj: Callable[..., Any], + *, + follow_wrapped: bool = True) -> Dict[str, Any]: + """Inspects and returns a dictionary of default values.""" + signature = super().from_callable(obj, follow_wrapped=follow_wrapped) + default_values = {} + for p in signature.parameters.values(): + if p.default is not p.empty: + default_values[p.name] = p.default + return default_values + + @classmethod + def from_proto(cls, proto: Any) -> "FunctionType": + """Generate a FunctionType from the proto representation.""" + return FunctionType([Parameter.from_proto(p) for p in proto.parameters], + collections.OrderedDict([ + (c.name, + serialization.deserialize(c.type_constraint)) + for c in proto.captures + ])) + + def to_proto(self) -> Any: + """Generate a proto representation from the FunctionType.""" + return function_type_pb2.FunctionType( + parameters=[p.to_proto() for p in self.parameters.values()], + captures=[ + function_type_pb2.Capture( + name=n, type_constraint=serialization.serialize(t)) + for n, t in self.captures.items() + ]) + + def bind_with_defaults(self, args, kwargs, default_values): + """Returns BoundArguments with default values filled in.""" + bound_arguments = self.bind(*args, **kwargs) + bound_arguments.apply_defaults() + + with_default_args = collections.OrderedDict() + for name, value in bound_arguments.arguments.items(): + if value is CAPTURED_DEFAULT_VALUE: + with_default_args[name] = default_values[name] + else: + with_default_args[name] = value + + for arg_name in with_default_args: + constraint = self.parameters[arg_name].type_constraint + if constraint: + with_default_args[arg_name] = constraint.cast( + with_default_args[arg_name], + trace_type.InternalCastContext(allow_specs=True), + ) + bound_arguments = inspect.BoundArguments(self, with_default_args) + return bound_arguments + + def is_supertype_of(self, other: "FunctionType") -> bool: + """Returns True if self is a supertype of other FunctionType.""" + if len(self.parameters) != len(other.parameters): + return False + + for self_param, other_param in zip(self.parameters.values(), + other.parameters.values()): + # Functions are contravariant on their parameter types. + if not self_param.is_subtype_of(other_param): + return False + + # Other must have all capture names of self. + if not all(name in other.captures for name in self.captures): + return False + + # Functions are contravariant upon the capture types. + return all(capture_type.is_subtype_of(other.captures[name]) + for name, capture_type in self.captures.items()) + + def most_specific_common_subtype( + self, others: Sequence["FunctionType"]) -> Optional["FunctionType"]: + """Returns a common subtype (if exists).""" + subtyped_parameters = [] + + for i, parameter in enumerate(self.parameters.values()): + # Functions are contravariant on their parameter types. + subtyped_parameter = parameter.most_specific_common_supertype( + [list(other.parameters.values())[i] for other in others]) + if subtyped_parameter is None: + return None + subtyped_parameters.append(subtyped_parameter) + + if not all(subtyped_parameters): + return None + + # Common subtype has superset of all captures. + capture_names = set(self.captures.keys()) + for other in others: + capture_names = capture_names.union(other.captures.keys()) + + subtyped_captures = collections.OrderedDict() + for name in capture_names: + containing = [t for t in [self, *others] if name in t.captures] + # Pick the first type that has the capture as the base. + base = containing[0] + relevant_others = containing[1:] + + # Functions are contravariant upon the capture types. + common_type = base.captures[name].most_specific_common_supertype( + [other.captures[name] for other in relevant_others] + ) + if common_type is None: + return None + else: + subtyped_captures[name] = common_type + + return FunctionType(subtyped_parameters, subtyped_captures) + + def placeholder_arguments( + self, placeholder_context: trace.PlaceholderContext + ) -> inspect.BoundArguments: + """Returns BoundArguments of values that can be used for tracing.""" + arguments = collections.OrderedDict() + for parameter in self.parameters.values(): + if parameter.kind in {Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD}: + raise ValueError("Can not generate placeholder values for " + "variable length function type.") + + if not parameter.type_constraint: + raise ValueError("Can not generate placeholder value for " + "partially defined function type.") + placeholder_context.update_naming_scope(parameter.name) + arguments[parameter.name] = parameter.type_constraint.placeholder_value( + placeholder_context) + + return inspect.BoundArguments(self, arguments) + + @property + def flat_inputs(self) -> List[trace.TraceType]: + """Flat tensor inputs accepted by this FunctionType.""" + if not hasattr(self, "_cached_flat_inputs"): + cached_flat_inputs = [] + for p in self.parameters.values(): + cached_flat_inputs.extend(p.type_constraint.flatten()) + self._cached_flat_inputs = cached_flat_inputs + + return self._cached_flat_inputs + + def unpack_inputs( + self, bound_parameters: inspect.BoundArguments + ) -> List[core.Tensor]: + """Unpacks python arguments to flat tensor inputs accepted by this type.""" + # Sort keyword-only parameters by name. + sorted_parameters = [] + kwonly_parameters = [] + for p in self.parameters.values(): + if p.kind is Parameter.KEYWORD_ONLY: + kwonly_parameters.append(p) + else: + sorted_parameters.append(p) + sorted_parameters = sorted_parameters + sorted( + kwonly_parameters, key=lambda p: p.name + ) + + flat = [] + for p in sorted_parameters: + flat.extend( + p.type_constraint.to_tensors(bound_parameters.arguments[p.name]) + ) + + dealiased_inputs = [] + ids_used = set() + for tensor, input_type in zip(flat, self.flat_inputs): + alias_id = input_type._alias_id() # pylint: disable=protected-access + if alias_id is None or alias_id not in ids_used: + dealiased_inputs.append(tensor) + + if alias_id is not None: + ids_used.add(alias_id) + + return dealiased_inputs + + @property + def flat_captures(self) -> List[trace.TraceType]: + """Flat tensor captures needed by this FunctionType.""" + if not hasattr(self, "_cached_flat_captures"): + cached_flat_captures = [] + for t in self.captures.values(): + cached_flat_captures.extend(t.flatten()) + self._cached_flat_captures = cached_flat_captures + + return self._cached_flat_captures + + def unpack_captures(self, captures) -> List[core.Tensor]: + """Unpacks captures to flat tensors.""" + flat = [] + for v, t in zip(captures, self.captures.values()): + flat.extend(t.to_tensors(v)) + if len(flat) != len(self.flat_captures): + raise TypeError( + f"Flattening captures {captures} with type {self!r} produced" + f" {len(flat)} tensors instead of {len(self.flat_captures)}" + ) + return flat + + @property + def flat_outputs(self) -> List[trace.TraceType]: + """Flat tensor outputs returned by this FunctionType.""" + if not hasattr(self, "_cached_flat_outputs"): + if self.output is not None: + self._cached_flat_outputs = self.output.flatten() + + return self._cached_flat_outputs + + def pack_output(self, flat_values: Sequence[core.Tensor]) -> Any: + """Packs flat tensors to generate a value of the output type.""" + if flat_values is None: + flat_values = [] + + if self.output is None: + raise ValueError("Can not pack outputs for undefined output type.") + else: + return self.output.from_tensors(iter(flat_values)) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, FunctionType): + return NotImplemented + + return (self.parameters, self.captures) == (other.parameters, + other.captures) + + def __hash__(self) -> int: + return hash((tuple(self.parameters.items()), tuple(self.captures.items()))) + + def __repr__(self): + if hasattr(self, "_cached_repr"): + return self._cached_repr + + lines = ["Input Parameters:"] + for parameter in self.parameters.values(): + lines.append( + f" {parameter.name} ({parameter.kind}): {parameter.type_constraint}" + ) + + lines.append("Output Type:") + lines.append(f" {self.output}") + + lines.append("Captures:") + if self.captures: + for capture_id, capture_type in self.captures.items(): + lines.append(f" {capture_id}: {capture_type}") + else: + lines.append(" None") + + self._cached_repr = "\n".join(lines) + return self._cached_repr + + +MAX_SANITIZATION_WARNINGS = 5 +sanitization_warnings_given = 0 + + +# TODO(fmuham): In future, replace warning with exception. +# TODO(fmuham): Sanitize to graph node conventions. +def sanitize_arg_name(name: str) -> str: + """Sanitizes function argument names. + + Matches Python symbol naming rules. + + Without sanitization, names that are not legal Python parameter names can be + set which makes it challenging to represent callables supporting the named + calling capability. + + Args: + name: The name to sanitize. + + Returns: + A string that meets Python parameter conventions. + """ + # Replace non-alphanumeric chars with '_' + swapped = "".join([c if c.isalnum() else "_" for c in name]) + result = swapped if swapped[0].isalpha() else "arg_" + swapped + + global sanitization_warnings_given + if name != result and sanitization_warnings_given < MAX_SANITIZATION_WARNINGS: + logging.warning( + "`%s` is not a valid tf.function parameter name. Sanitizing to `%s`.", + name, result) + sanitization_warnings_given += 1 + + return result + + +# TODO(fmuham): Consider forcing kind to be always POSITIONAL_OR_KEYWORD. +def _make_validated_mono_param( + name, value, kind, type_context, poly_type +) -> Parameter: + """Generates and validates a parameter for Monomorphic FunctionType.""" + mono_type = trace_type.from_value(value, type_context) + + if poly_type and not mono_type.is_subtype_of(poly_type): + raise TypeError(f"Parameter `{name}` was expected to be of type " + f"{poly_type} but is {mono_type}") + + return Parameter(name, kind, False, mono_type) + + +def canonicalize_to_monomorphic( + args: Tuple[Any, ...], kwargs: Dict[Any, Any], default_values: Dict[Any, + Any], + capture_types: collections.OrderedDict, polymorphic_type: FunctionType +) -> Tuple[FunctionType, trace_type.InternalTracingContext]: + """Generates a monomorphic type out of polymorphic type for given args.""" + poly_bound_arguments = polymorphic_type.bind(*args, **kwargs) + + # Inject Default Values. + if default_values: + poly_bound_arguments.apply_defaults() + default_values_injected = poly_bound_arguments.arguments + for name, value in default_values_injected.items(): + if value is CAPTURED_DEFAULT_VALUE: + default_values_injected[name] = default_values[name] + poly_bound_arguments = inspect.BoundArguments( + poly_bound_arguments.signature, default_values_injected + ) + + parameters = [] + type_context = trace_type.InternalTracingContext() + has_var_positional = any(p.kind is Parameter.VAR_POSITIONAL + for p in polymorphic_type.parameters.values()) + + for name, arg in poly_bound_arguments.arguments.items(): + poly_parameter = polymorphic_type.parameters[name] + if (has_var_positional and + poly_parameter.kind is Parameter.POSITIONAL_OR_KEYWORD): + # If there is a VAR_POSITIONAL, all POSITIONAL_OR_KEYWORD become + # POSITIONAL_ONLY. + parameters.append( + _make_validated_mono_param(name, arg, Parameter.POSITIONAL_ONLY, + type_context, + poly_parameter.type_constraint)) + + elif poly_parameter.kind is Parameter.VAR_POSITIONAL: + # Unbundle VAR_POSITIONAL into individual POSITIONAL_ONLY args. + for i, value in enumerate(arg): + parameters.append( + _make_validated_mono_param(f"{poly_parameter.name}_{i}", value, + Parameter.POSITIONAL_ONLY, type_context, + poly_parameter.type_constraint)) + + elif poly_parameter.kind is Parameter.VAR_KEYWORD: + # Unbundle VAR_KEYWORD into individual KEYWORD_ONLY args. + for kwarg_name in sorted(arg.keys()): + parameters.append( + _make_validated_mono_param(kwarg_name, arg[kwarg_name], + Parameter.KEYWORD_ONLY, type_context, + poly_parameter.type_constraint)) + else: + parameters.append( + _make_validated_mono_param(name, arg, poly_parameter.kind, + type_context, + poly_parameter.type_constraint)) + + return FunctionType(parameters, capture_types), type_context + + +# TODO(fmuham): Share code with canonicalize_to_monomorphic. +# TODO(fmuham): Lift unnecessary restrictions on input_signature validity. +def add_type_constraints(function_type: FunctionType, input_signature: Any, + default_values: Dict[str, Any]) -> FunctionType: + """Adds type constraints to a FunctionType based on the input_signature.""" + context = trace_type.InternalTracingContext(is_legacy_signature=True) + constraints = [trace_type.from_value(c, context) for c in input_signature] + parameters = [] + + has_var_pos = any( + p.kind is p.VAR_POSITIONAL for p in function_type.parameters.values()) + + for param in function_type.parameters.values(): + # VAR_POSITIONAL does not allow POSITIONAL_OR_KEYWORD args. + sanitized_kind = ( + param.POSITIONAL_ONLY if has_var_pos and + param.kind is param.POSITIONAL_OR_KEYWORD else param.kind) + + if param.name == "self": + # Type constraints do not apply on them. + parameters.append(Parameter("self", sanitized_kind, param.optional, None)) + + elif param.kind is param.VAR_KEYWORD: + # Disabled when input_signature is specified. + continue + + elif param.kind is param.VAR_POSITIONAL: + # Convert into Positional Only args based on length of constraints. + for i in range(len(constraints)): + parameters.append( + Parameter(param.name + "_" + str(i), Parameter.POSITIONAL_ONLY, + False, constraints.pop(0))) + + elif (param.kind in [ + param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD, param.KEYWORD_ONLY + ]): + if param.kind is param.KEYWORD_ONLY and param.name not in default_values: + raise TypeError( + "Since input_signature is defined, keyword-only parameter" + f" `{param.name}` must have a default value" + ) + + if constraints: + parameters.append( + Parameter(param.name, sanitized_kind, param.optional, + constraints.pop(0))) + elif param.name in default_values: + type_constraint = trace_type.from_value(default_values[param.name]) + parameters.append( + Parameter(param.name, sanitized_kind, param.optional, + type_constraint)) + else: + raise TypeError( + f"input_signature missing type constraint for {param.name}") + + if constraints: + raise TypeError( + f"input_signature contains {len(constraints)} extra type constraints.") + + return FunctionType(parameters) + + +def from_structured_signature( + input_signature=None, output_signature=None, capture_types=None +) -> FunctionType: + """Generates a FunctionType from legacy signature representation.""" + if input_signature is None: + input_signature = ((), {}) + + args, kwargs = input_signature + parameters = [] + + for i, arg in enumerate(args): + parameters.append( + Parameter( + "arg_" + str(i), + Parameter.POSITIONAL_ONLY, + False, + trace_type.from_value( + arg, trace_type.InternalTracingContext(is_legacy_signature=True) + ), + ) + ) + + for name, kwarg in kwargs.items(): + parameters.append( + Parameter( + sanitize_arg_name(name), + Parameter.KEYWORD_ONLY, + False, + trace_type.from_value( + kwarg, + trace_type.InternalTracingContext(is_legacy_signature=True), + ), + ) + ) + + return_type = trace_type.from_value( + output_signature, + trace_type.InternalTracingContext(is_legacy_signature=True), + ) + + return FunctionType( + parameters, capture_types or {}, return_annotation=return_type + ) + + +def to_structured_signature(function_type: FunctionType) -> Tuple[Any, Any]: + """Returns structured input and output signatures from a FunctionType.""" + def to_signature(x_type): + if x_type is None: + raise TypeError( + "Can not generate structured signature if FunctionType is not fully" + f" specified. Received {function_type}" + ) + return x_type.placeholder_value( + trace_type.InternalPlaceholderContext(unnest_only=True) + ) + + args_signature = [] + kwargs_signature = {} + for p in function_type.parameters.values(): + if p.kind == Parameter.POSITIONAL_ONLY: + args_signature.append(to_signature(p.type_constraint)) + else: + kwargs_signature[p.name] = to_signature(p.type_constraint) + + input_signature = (tuple(args_signature), kwargs_signature) + output_signature = to_signature(function_type.output) + + return input_signature, output_signature diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..c143163df8199aaa5cc039f9633738bba98f2fcc --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/function/polymorphism/function_type.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from tensorflow.core.function.trace_type import serialization_pb2 as tensorflow_dot_core_dot_function_dot_trace__type_dot_serialization__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n9tensorflow/core/function/polymorphism/function_type.proto\x12\x33tensorflow.core.function.polymorphism.function_type\x1a\x37tensorflow/core/function/trace_type/serialization.proto\"\xe0\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x04kind\x18\x02 \x01(\x0e\x32\x43.tensorflow.core.function.polymorphism.function_type.Parameter.Kind\x12\x13\n\x0bis_optional\x18\x03 \x01(\x08\x12_\n\x0ftype_constraint\x18\x04 \x01(\x0b\x32\x46.tensorflow.core.function.trace_type.serialization.SerializedTraceType\"|\n\x04Kind\x12\r\n\tUNDEFINED\x10\x00\x12\x13\n\x0fPOSITIONAL_ONLY\x10\x01\x12\x19\n\x15POSITIONAL_OR_KEYWORD\x10\x02\x12\x12\n\x0eVAR_POSITIONAL\x10\x03\x12\x10\n\x0cKEYWORD_ONLY\x10\x04\x12\x0f\n\x0bVAR_KEYWORD\x10\x05\"x\n\x07\x43\x61pture\x12\x0c\n\x04name\x18\x01 \x01(\t\x12_\n\x0ftype_constraint\x18\x02 \x01(\x0b\x32\x46.tensorflow.core.function.trace_type.serialization.SerializedTraceType\"\xb2\x01\n\x0c\x46unctionType\x12R\n\nparameters\x18\x01 \x03(\x0b\x32>.tensorflow.core.function.polymorphism.function_type.Parameter\x12N\n\x08\x63\x61ptures\x18\x02 \x03(\x0b\x32<.tensorflow.core.function.polymorphism.function_type.Capture') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.core.function.polymorphism.function_type_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _PARAMETER._serialized_start=172 + _PARAMETER._serialized_end=524 + _PARAMETER_KIND._serialized_start=400 + _PARAMETER_KIND._serialized_end=524 + _CAPTURE._serialized_start=526 + _CAPTURE._serialized_end=646 + _FUNCTIONTYPE._serialized_start=649 + _FUNCTIONTYPE._serialized_end=827 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/type_dispatch.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/type_dispatch.py new file mode 100644 index 0000000000000000000000000000000000000000..0898e4676352411471145813e82e1b46ae955f6d --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/type_dispatch.py @@ -0,0 +1,131 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Polymorphic Type Dispatch.""" + +import collections +from typing import Optional, Iterable + +from tensorflow.core.function.polymorphism import function_type + +# The maximum number of dispatch lookups to cache. +_MAX_DISPATCH_CACHE = 1024 + + +class TypeDispatchTable: + """Type dispatch table implementation. + + A type dispatch table is a list, L, of target types. Given a request type, R, + the table selects a target type, T, according to the following dispatch rules: + 1. R == T or R is supertype of T (functions are contravariant on args) + 2. There does not exist O in L such that R is supertype of O and O is a + supertype of T (in other words, T is the closest to R, within list L). + 3. If the above two rules are satisfied by multiple targets, the earliest + inserted one is chosen. + """ + + def __init__(self): + """Creates a TypeDispatchTable object.""" + # Holds all inserted types as keys mapping to None. + # (Using OrderedDict as a set for determinism) + self._dispatch_table = collections.OrderedDict() + + # LRU cache for dispatch results. + # Maps request types to target types (see class description). + # Does not contain exact matches, i.e, if cache[a] is b then a is not b. + self._dispatch_cache = collections.OrderedDict() + + def add_target(self, target: function_type.FunctionType) -> None: + """Adds a new target type.""" + self._dispatch_table[target] = None + for request in self._dispatch_cache: + if target.is_supertype_of(self._dispatch_cache[request]): + self._dispatch_cache[request] = target + + @property + def targets(self) -> Iterable[function_type.FunctionType]: + """Returns an iterable to all targets in the table.""" + return self._dispatch_table.keys() + + def delete(self, target: function_type.FunctionType) -> None: + """Deletes a target in the table if it exists.""" + if target in self._dispatch_table: + del self._dispatch_table[target] + for request in list(self._dispatch_cache.keys()): + if self._dispatch_cache[request] == target: + del self._dispatch_cache[request] + + # TODO(b/205971333): remove once FunctionCache 'clear' is removed. + def clear(self) -> None: + """Deletes all targets in the table.""" + self._dispatch_table.clear() + self._dispatch_cache.clear() + + def dispatch( + self, request: function_type.FunctionType + ) -> Optional[function_type.FunctionType]: + """Returns the most specific supertype target if it exists in the table.""" + # For known exact matches. + if request in self._dispatch_table: + return request + + # For known non-exact matches. + # (self._dispatch cache does not contain exact matches) + if request in self._dispatch_cache: + # Move to the front of LRU cache. + result = self._dispatch_cache.pop(request) + self._dispatch_cache[request] = result + return result + + most_specific_supertype = None + for other in self._dispatch_table: + if request.is_supertype_of(other): + if most_specific_supertype is None or other.is_supertype_of( + most_specific_supertype): + most_specific_supertype = other + + self._cache_dispatch(request, most_specific_supertype) + return most_specific_supertype + + def _cache_dispatch(self, request, target): + """Caches the dispatch lookup result for a target.""" + if target is not None: + # LRU Cache removes oldest item + if len(self._dispatch_cache) > _MAX_DISPATCH_CACHE: + self._dispatch_cache.popitem(last=False) + self._dispatch_cache[request] = target + + def try_generalizing_function_type( + self, target: function_type.FunctionType) -> function_type.FunctionType: + """Returns a generalized subtype of the one given. + + This heuristic aims to reduce the number of future traces by computing a + type that represents more general function inputs. + + The original "experimental_relax_shapes" heuristic identified a known type + which shared a common subtype with the current unknown type and then + traced with that common subtype. However, the notion of "common subtype" + was only limited to shapes. This heuristic extends that to FunctionType. + + Returns `target` if a generalized subtype can not be found. + + Args: + target: The FunctionType to generalize + """ + relaxed = target + for other in self._dispatch_table: + subtype = relaxed.most_specific_common_subtype([other]) + if subtype is not None: + relaxed = subtype + return relaxed diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2ae2583b4274b6051e1efe537ce89e2d133cb00a --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__init__.py @@ -0,0 +1,38 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Trace-time type system for tf.function (TraceType). + +Trace-time types describe things like tf.function signatures and type +constraints in some ops. + +This module provides utilities and concrete tf.types.experimental.TraceType +definitions for common Python types like containers, along with a generic +implementation for Python objects. +See also: tf.types.experimental.TraceType + +Other implementations of TraceType include tf.TypeSpec and its subclasses. +""" + +from tensorflow.core.function.trace_type.default_types import register_tensor_type +from tensorflow.core.function.trace_type.default_types import Weakref +from tensorflow.core.function.trace_type.serialization import deserialize +from tensorflow.core.function.trace_type.serialization import register_serializable +from tensorflow.core.function.trace_type.serialization import Serializable +from tensorflow.core.function.trace_type.serialization import serialize +from tensorflow.core.function.trace_type.serialization import SerializedTraceType +from tensorflow.core.function.trace_type.trace_type_builder import from_value +from tensorflow.core.function.trace_type.trace_type_builder import InternalCastContext +from tensorflow.core.function.trace_type.trace_type_builder import InternalPlaceholderContext +from tensorflow.core.function.trace_type.trace_type_builder import InternalTracingContext diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/serialization_test_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/serialization_test_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c21f6eb97a15b2a4f55222858f3ca7389653a93 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/serialization_test_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/trace_type_builder.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/trace_type_builder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..909713c7a8c263ad934f397740f99edc11272af1 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/trace_type_builder.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/util.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e4fa6665e041452a7981b74e12329ebaaf9316d4 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/__pycache__/util.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/custom_nest_trace_type.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/custom_nest_trace_type.py new file mode 100644 index 0000000000000000000000000000000000000000..28216bfe8340a423c08c3895528fcb116a4afaeb --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/custom_nest_trace_type.py @@ -0,0 +1,143 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TraceType implementations for classes thatimplement the CustomNestProtocol.""" + +from typing import Any, Iterator, List as PythonList, Optional, Sequence, Tuple as PythonTuple, Type + +from tensorflow.core.function.trace_type import util +from tensorflow.python.types import trace +from tensorflow.python.util import custom_nest_protocol + + +class CustomNestTraceType(trace.TraceType): + """Represents the TraceType of a class implmenting the CustomNestProtocol.""" + + def __init__( + self, + value_type: Type[Any], + metadata: Any, + components: PythonTuple[trace.TraceType], + ): + if not issubclass(value_type, custom_nest_protocol.CustomNestProtocol): + raise ValueError(f"{value_type!r} does not implement CustomNestProtocol.") + self.value_type = value_type + self.metadata = metadata + self.components = components + + def is_subtype_of(self, other: trace.TraceType) -> bool: + if not self._is_same_trace_type(other): + return False + for c_self, c_other in zip(self.components, other.components): # pytype: disable=attribute-error + if not c_self.is_subtype_of(c_other): + return False + return True + + def most_specific_common_supertype( + self, others: Sequence[trace.TraceType] + ) -> Optional["CustomNestTraceType"]: + for other in others: + if not self._is_same_trace_type(other): + return None + + others_components = [other.components for other in others] # pytype: disable=attribute-error + supertyped_components = tuple( + self_component.most_specific_common_supertype(others_component) + for self_component, *others_component in zip( + self.components, *others_components + ) + ) + return CustomNestTraceType( + self.value_type, self.metadata, supertyped_components + ) + + def __eq__(self, other: trace.TraceType) -> bool: + return ( + isinstance(other, CustomNestTraceType) + and self.value_type == other.value_type + and self.metadata == other.metadata + and self.components == other.components + ) + + def __hash__(self) -> int: + # The hash computation doesn't use self.metadata, so unhashable metadata can + # be used. The `self.__eq__` method is used instead to differentiate between + # two objects with the same components but different metadata. + return hash((self.value_type, self.components)) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__} [metadata={self.metadata!r}, " + f"components={self.components!r}]" + ) + + def placeholder_value(self, placeholder_context: Any) -> Any: + components_placeholder_value = tuple( + c.placeholder_value(placeholder_context) for c in self.components + ) + return self.value_type.__tf_unflatten__( + self.metadata, components_placeholder_value + ) + + def to_tensors(self, value: Any) -> PythonList[Any]: + if not isinstance(value, self.value_type): + raise TypeError(f"{value!r} is not of type {self.value_type}.") + _, value_components = value.__tf_flatten__() + flattened_values = [] + for value_comp, type_comp in zip(value_components, self.components): + flattened_values.extend(type_comp.to_tensors(value_comp)) + return flattened_values + + def from_tensors(self, tensors: Iterator[Any]) -> Any: + return self.value_type.__tf_unflatten__( + self.metadata, tuple(c.from_tensors(tensors) for c in self.components) + ) + + def flatten(self) -> PythonList[trace.TraceType]: + flat_list = [] + for c in self.components: + flat_list.extend(c.flatten()) + return flat_list + + def cast(self, value: Any, casting_context: Any) -> Any: + if not isinstance(value, self.value_type): + raise TypeError(f"[{value!r}] is not of type {self.value_type}.") + value_metadata, value_components = value.__tf_flatten__() + if self.metadata != value_metadata: + raise ValueError( + f"Metadata mismatch: [{self.metadata!r}] != [{value_metadata!r}]." + ) + if len(self.components) != len(value_components): + raise ValueError( + f"Lengths of components mismatch: {len(self.components)} != " + f"{len(value_components)}." + ) + + casted_value_components, was_casted = util.cast_and_return_whether_casted( + self.components, value_components, casting_context + ) + if was_casted: + return self.value_type.__tf_unflatten__( + self.metadata, casted_value_components + ) + else: + return value + + def _is_same_trace_type(self, other: trace.TraceType) -> bool: + return ( + isinstance(other, CustomNestTraceType) + and self.value_type == other.value_type + and self.metadata == other.metadata + and len(self.components) == len(other.components) + ) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types.py new file mode 100644 index 0000000000000000000000000000000000000000..d07bd43c8929d88d6822653128affbba72a5e8c9 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types.py @@ -0,0 +1,826 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TraceType implementations for common Python types.""" + +import collections +import math +import numbers +from typing import Any, Dict as PythonDict, Hashable, List as PythonList, Optional, Sequence, Tuple as PythonTuple, Type +import weakref + +from tensorflow.core.function.trace_type import default_types_pb2 +from tensorflow.core.function.trace_type import serialization +from tensorflow.core.function.trace_type import util +from tensorflow.python.types import trace + +# Register the TraceType of Tensor (aka TensorSpec) to avoid cyclic dependency. +TENSOR = None + + +def register_tensor_type(tensor_type): + global TENSOR + if not TENSOR: + TENSOR = tensor_type + else: + raise AssertionError("Tensor type is already registered.") + +NanMarker = object() + + +def is_nan(x): + """Checks if given value is a Python NaN.""" + if not isinstance(x, numbers.Number): + return False + + if isinstance(x, complex): + return math.isnan(x.real) or math.isnan(x.imag) + else: + return math.isnan(x) + + +class Literal(trace.TraceType, serialization.Serializable): + """Represents a Literal type like bool, int or string.""" + + def __init__(self, value: Any): + # We match nan values against each other even though Python doesn't. + if is_nan(value): + value = NanMarker + + self.value = value + self._value_hash = hash(value) + + def is_subtype_of(self, other: trace.TraceType) -> bool: + return self == other + + def most_specific_common_supertype( + self, types: Sequence[trace.TraceType]) -> Optional["Literal"]: + return self if all(self == other for other in types) else None + + @classmethod + def experimental_type_proto(cls) -> Type[default_types_pb2.SerializedLiteral]: + return default_types_pb2.SerializedLiteral + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedLiteral) -> "Literal": + if proto.HasField("bool_value"): + return Literal(proto.bool_value) + + if proto.HasField("int_value"): + return Literal(proto.int_value) + + if proto.HasField("float_value"): + return Literal(proto.float_value) + + if proto.HasField("str_value"): + return Literal(proto.str_value) + + if proto.HasField("none_value"): + return Literal(None) + + raise ValueError("Malformed Literal proto can not be deserialized") + + def experimental_as_proto(self) -> default_types_pb2.SerializedLiteral: + if isinstance(self.value, bool): + return default_types_pb2.SerializedLiteral(bool_value=self.value) + + if isinstance(self.value, int): + return default_types_pb2.SerializedLiteral(int_value=self.value) + + if isinstance(self.value, float): + return default_types_pb2.SerializedLiteral(float_value=self.value) + + if isinstance(self.value, str): + return default_types_pb2.SerializedLiteral(str_value=self.value) + + if self.value is None: + return default_types_pb2.SerializedLiteral( + none_value=default_types_pb2.SerializedLiteral.NoneValue()) + + raise ValueError("Can not serialize Literal of type " + + type(self.value).__name__) + + def placeholder_value(self, placeholder_context) -> Any: + # TODO(b/263505796): Remove this check when a range's placeholder output + # is expected to be a range and not a list. + if isinstance(self.value, range): + return list(self.value) + + if self.value is NanMarker: + return float("nan") + + return self.value + + def cast(self, value: Any, casting_context: Any) -> Any: + if self.value is NanMarker and is_nan(value): + return value + + if value == self.value: + return value + else: + raise ValueError(f"Can not cast {value!r} to {self!r}") + + def __eq__(self, other) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + return isinstance(other, Literal) and self.value == other.value + + def __hash__(self) -> int: + return self._value_hash + + def __repr__(self) -> str: + return f"{self.__class__.__name__}[{self.value!r}]" + + +class Weakref(trace.TraceType): + """Represents weakref of an arbitrary Python object. + + When a function argument is a custom class, instead of making a copy of it + just for the sake of function cache, a weakref is instead kept to save memory. + """ + + def __init__(self, ref: weakref.ReferenceType): + self._ref = ref + self._ref_hash = hash(ref) + + def is_subtype_of(self, other: trace.TraceType) -> bool: + return self == other + + def most_specific_common_supertype( + self, types: Sequence[trace.TraceType]) -> Optional["Weakref"]: + return self if all(self == other for other in types) else None + + def placeholder_value(self, placeholder_context) -> Any: + return self._ref() + + def cast(self, value, _): + if value is self._ref() or value == self._ref(): + return value + + # We unwrap objects when generating the TraceType so we allow matching now. + while hasattr(value, "__wrapped__"): + value = value.__wrapped__ + if value is self._ref(): + return value + + raise ValueError(f"Can not cast {value!r} to {self!r}") + + def __eq__(self, other): + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, Weakref): + return False + + if self._ref() is None or other._ref() is None: + return False + + if self._ref() is other._ref(): + return True + + return self._ref == other._ref + + def __hash__(self): + return self._ref_hash + + def __repr__(self) -> str: + return f"{self.__class__.__name__}[{self._ref!r}])" + + +class Tuple(trace.TraceType, serialization.Serializable): + """Represents a tuple of TraceType objects.""" + + def __init__(self, *components: trace.TraceType): + self.components = components + + def is_subtype_of(self, other: trace.TraceType) -> bool: + if (not isinstance(other, Tuple) or + len(self.components) != len(other.components)): + return False + + return all( + self_component.is_subtype_of(other_component) for self_component, + other_component in zip(self.components, other.components)) + + def most_specific_common_supertype( + self, others: Sequence[trace.TraceType]) -> Optional["Tuple"]: + """See base class.""" + if not all( + isinstance(other, Tuple) and + len(self.components) == len(other.components) for other in others): + return None + + supertyped_components = [] + for i, component in enumerate(self.components): + supertyped_component = component.most_specific_common_supertype( + [other.components[i] for other in others]) + if supertyped_component is None: + return None + supertyped_components.append(supertyped_component) + + return Tuple(*supertyped_components) + + @classmethod + def experimental_type_proto(cls) -> Type[default_types_pb2.SerializedTuple]: + return default_types_pb2.SerializedTuple + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedTuple) -> "Tuple": + return Tuple(*[serialization.deserialize(c) for c in proto.components]) + + def experimental_as_proto(self) -> default_types_pb2.SerializedTuple: + return default_types_pb2.SerializedTuple( + components=[serialization.serialize(c) for c in self.components]) + + def placeholder_value(self, placeholder_context) -> Any: + components = [ + component.placeholder_value(placeholder_context) + for component in self.components + ] + return tuple(components) + + def to_tensors(self, value) -> Any: + assert isinstance(value, tuple) + flattened_values = [] + for comp_value, comp_type in zip(value, self.components): + flattened_values.extend(comp_type.to_tensors(comp_value)) + return flattened_values + + def from_tensors(self, tensors) -> Any: + return tuple(c.from_tensors(tensors) for c in self.components) + + def flatten(self) -> PythonList[trace.TraceType]: + flattened_types = [] + for component in self.components: + flattened_types.extend(component.flatten()) + return flattened_types + + def cast(self, value: Any, casting_context) -> Any: + assert isinstance(value, tuple), f"Can not cast {value!r} to tuple type." + assert len(value) == len( + self.components + ), f"Expected {value} to have length of {len(self.components)}" + + casted_values, was_casted = util.cast_and_return_whether_casted( + self.components, value, casting_context + ) + if was_casted: + return tuple(casted_values) + else: + return value + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, Tuple): + return False + + return self.components == other.components + + def __hash__(self) -> int: + return hash(self.components) + + def __repr__(self) -> str: + return f"Tuple[{', '.join(map(repr, self.components))}]" + + +class List(trace.TraceType, serialization.Serializable): + """Represents a list of TraceType objects.""" + + def __init__(self, *components: trace.TraceType): + self.components_tuple = Tuple(*components) + + def is_subtype_of(self, other: trace.TraceType) -> bool: + if not isinstance(other, List): + return False + + return self.components_tuple.is_subtype_of(other.components_tuple) + + def most_specific_common_supertype( + self, others: Sequence[trace.TraceType]) -> Optional["Tuple"]: + """See base class.""" + if not all(isinstance(other, List) for other in others): + return None + + supertyped_components_tuple = ( + self.components_tuple.most_specific_common_supertype( + [other.components_tuple for other in others] + ) + ) + + if supertyped_components_tuple is None: + return None + + return List(*supertyped_components_tuple.components) + + @classmethod + def experimental_type_proto(cls) -> Type[default_types_pb2.SerializedList]: + return default_types_pb2.SerializedList + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedList) -> "List": + return List( + *Tuple.experimental_from_proto(proto.components_tuple).components) + + def experimental_as_proto(self) -> default_types_pb2.SerializedList: + return default_types_pb2.SerializedList( + components_tuple=self.components_tuple.experimental_as_proto()) + + def placeholder_value(self, placeholder_context) -> Any: + return list(self.components_tuple.placeholder_value(placeholder_context)) + + def to_tensors(self, value): + assert isinstance(value, list) + return self.components_tuple.to_tensors(tuple(value)) + + def from_tensors(self, tensors) -> Any: + return list(self.components_tuple.from_tensors(tensors)) + + def flatten(self) -> PythonList[trace.TraceType]: + return self.components_tuple.flatten() + + def cast(self, value: Any, casting_context) -> Any: + assert isinstance(value, list), f"Can not cast {value!r} to list type." + assert len(value) == len( + self.components_tuple.components + ), f"Expected {value} to have length of {len(self.components_tuple)}" + + casted_values, was_casted = util.cast_and_return_whether_casted( + self.components_tuple.components, value, casting_context + ) + if was_casted: + return list(casted_values) + else: + return value + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, List): + return False + + return self.components_tuple == other.components_tuple + + def __hash__(self) -> int: + return hash(self.components_tuple) + + def __repr__(self) -> str: + return f"List[{', '.join(map(repr, self.components_tuple.components))}]" + + +class NamedTuple(trace.TraceType, serialization.Serializable): + """Represents a NamedTuple of TraceType objects.""" + + def __init__(self, + type_name: str, + attribute_names: PythonTuple[str], + attributes: PythonTuple[trace.TraceType], + placeholder_type: Optional[Type[Any]] = None): + self.type_name = type_name + self.attribute_names = attribute_names + self.attributes = Tuple(*attributes) + self._placeholder_type = placeholder_type + + @classmethod + def from_type_and_attributes( + cls, named_tuple_type: Any, + attributes: PythonTuple[trace.TraceType]) -> "NamedTuple": + return NamedTuple(named_tuple_type.__name__, named_tuple_type._fields, + attributes, named_tuple_type) + + def is_subtype_of(self, other: trace.TraceType) -> bool: + if not isinstance(other, NamedTuple): + return False + + return (self.type_name == other.type_name and + self.attribute_names == other.attribute_names and + self.attributes.is_subtype_of(other.attributes)) + + def most_specific_common_supertype( + self, others: Sequence[trace.TraceType]) -> Optional["NamedTuple"]: + """See base class.""" + if not all( + isinstance(other, NamedTuple) and self.type_name == other.type_name and + self.attribute_names == other.attribute_names for other in others): + return None + + supertyped_attributes = self.attributes.most_specific_common_supertype( + [other.attributes for other in others]) + + if supertyped_attributes is None: + return None + + return NamedTuple(self.type_name, self.attribute_names, + supertyped_attributes.components, self._placeholder_type) + + @classmethod + def experimental_type_proto( + cls) -> Type[default_types_pb2.SerializedNamedTuple]: + return default_types_pb2.SerializedNamedTuple + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedNamedTuple) -> "NamedTuple": + return NamedTuple( + proto.type_name, tuple(proto.attribute_names), + Tuple.experimental_from_proto(proto.attributes).components) + + def experimental_as_proto(self) -> default_types_pb2.SerializedNamedTuple: + return default_types_pb2.SerializedNamedTuple( + type_name=self.type_name, + attribute_names=list(self.attribute_names), + attributes=self.attributes.experimental_as_proto()) + + def placeholder_value(self, placeholder_context) -> Any: + if self._placeholder_type is None: + # We don't need to trace after serialization so it is not needed but we + # can generate a placeholder type using the description if ever needed. + raise ValueError("Can not generate placeholder value for NamedTuple with" + " unspecified placeholder_type. Note: placeholder_type " + "is lost during serialization.") + attribute_placeholders = [ + attribute.placeholder_value(placeholder_context) + for attribute in self.attributes.components + ] + return self._placeholder_type(*attribute_placeholders) + + def to_tensors(self, value: Any): + assert util.is_namedtuple(value) + flattened_values = [] + for attribute_name, attribute_type in zip( + self.attribute_names, self.attributes.components): + attribute_value = getattr(value, attribute_name) + flattened_values.extend(attribute_type.to_tensors(attribute_value)) + return flattened_values + + def from_tensors(self, tensors) -> Any: + if self._placeholder_type is None: + raise ValueError("Packing serialized NamedTuples is not supported.") + + return self._placeholder_type( + *[c.from_tensors(tensors) for c in self.attributes.components] + ) + + def flatten(self) -> PythonList[trace.TraceType]: + flattened_types = [] + + for component in self.attributes.components: + flattened_types.extend(component.flatten()) + + return flattened_types + + def cast(self, value: Any, casting_context) -> Any: + # Value must have same attributes with the TraceType + assert util.is_namedtuple( + value + ), f"Cannot cast {value!r} to type {self._placeholder_type!r}." + value_dict = value._asdict() + assert set(value_dict.keys()) == set( + self.attribute_names + ), f"{value!r} has different attributes with the TraceType {self!r}" + + casted_values, was_casted = util.cast_and_return_whether_casted( + self.attributes.components, + [getattr(value, name) for name in self.attribute_names], + casting_context, + ) + if was_casted: + return self._placeholder_type(*casted_values) + else: + return value + + def __hash__(self) -> int: + return hash((self.type_name, self.attribute_names, self.attributes)) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, NamedTuple): + return False + + return (self.type_name == other.type_name and + self.attribute_names == other.attribute_names and + self.attributes == other.attributes) + + def __repr__(self) -> str: + paired = [ + f"[{n!r}, {c!r}]" + for n, c in zip(self.attribute_names, self.attributes.components) + ] + return f"{self.type_name}[{', '.join(paired)}]" + + +class Attrs(trace.TraceType): + """Represents a class annotated by attr.s.""" + + def __init__(self, + type_name: str, + attribute_names: PythonTuple[str], + attributes: PythonTuple[trace.TraceType], + placeholder_type: Optional[Type[Any]] = None): + self.named_attributes = NamedTuple(type_name, attribute_names, attributes) + self._placeholder_type = placeholder_type + + @classmethod + def from_type_and_attributes( + cls, attrs_type: Any, + attributes: PythonTuple[trace.TraceType]) -> "Attrs": + return Attrs(attrs_type.__name__, + tuple(attr.name for attr in attrs_type.__attrs_attrs__), + attributes, attrs_type) + + def is_subtype_of(self, other: trace.TraceType) -> bool: + if not isinstance(other, Attrs): + return False + + return self.named_attributes.is_subtype_of(other.named_attributes) + + def most_specific_common_supertype( + self, others: Sequence[trace.TraceType]) -> Optional["Attrs"]: + """See base class.""" + if not all(isinstance(other, Attrs) for other in others): + return None + + supertyped_attributes = ( + self.named_attributes.most_specific_common_supertype( + [other.named_attributes for other in others] + ) + ) + + if supertyped_attributes is None: + return None + + return Attrs(self.named_attributes.type_name, + self.named_attributes.attribute_names, + supertyped_attributes.attributes.components, + self._placeholder_type) + + @classmethod + def experimental_type_proto(cls) -> Type[default_types_pb2.SerializedAttrs]: + return default_types_pb2.SerializedAttrs + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedAttrs) -> "Attrs": + return Attrs( + proto.named_attributes.type_name, + tuple(proto.named_attributes.attribute_names), + Tuple.experimental_from_proto( + proto.named_attributes.attributes).components) + + def experimental_as_proto(self) -> default_types_pb2.SerializedAttrs: + return default_types_pb2.SerializedAttrs( + named_attributes=self.named_attributes.experimental_as_proto()) + + def placeholder_value(self, placeholder_context) -> Any: + if self._placeholder_type is None: + # We don't need to trace after serialization so it is not needed but we + # can generate a placeholder type using the description if ever needed. + raise ValueError("Can not generate placeholder value for Attrs with" + " unspecified placeholder_type. Note: placeholder_type " + "is lost during serialization.") + attribute_placeholders = [ + attribute.placeholder_value(placeholder_context) + for attribute in self.named_attributes.attributes.components + ] + return self._placeholder_type(*attribute_placeholders) + + def to_tensors(self, value: Any): + assert util.is_attrs(value) + flattened_values = [] + for attribute_name, attribute_type in zip( + self.named_attributes.attribute_names, + self.named_attributes.attributes.components): + attribute_value = getattr(value, attribute_name) + flattened_values.extend(attribute_type.to_tensors(attribute_value)) + return flattened_values + + def from_tensors(self, tensors): + if self._placeholder_type is None: + raise ValueError("Packing serialized NamedTuples is not supported.") + + return self._placeholder_type( + *[ + c.from_tensors(tensors) + for c in self.named_attributes.attributes.components + ] + ) + + def flatten(self) -> PythonList[trace.TraceType]: + flattened_types = [] + + for component in self.named_attributes.attributes.components: + flattened_types.extend(component.flatten()) + + return flattened_types + + def cast(self, value: Any, casting_context) -> Any: + assert util.is_attrs(value) + + attr_names = self.named_attributes.attribute_names + casted_values, was_casted = util.cast_and_return_whether_casted( + self.named_attributes.attributes.components, + [getattr(value, name) for name in attr_names], + casting_context, + ) + + if was_casted: + return self._placeholder_type(*casted_values) + else: + return value + + def __hash__(self) -> int: + return hash(self.named_attributes) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, Attrs): + return False + + return self.named_attributes == other.named_attributes + + def __repr__(self) -> str: + name_component_zip = zip( + self.named_attributes.attribute_names, + self.named_attributes.attributes.components, + ) + paired = [f"[{n!r}, {c!r}]" for n, c in name_component_zip] + return f"{self.named_attributes.type_name}[{', '.join(paired)}]" + + +class Dict(trace.TraceType, serialization.Serializable): + """Represents a dictionary of TraceType objects. + + Attributes: + mapping: A mapping from keys to corresponding TraceTypes of the dict values. + """ + + def __init__(self, + mapping: PythonDict[Hashable, trace.TraceType], + placeholder_type: Optional[Type[Any]] = None): + self.mapping = mapping + self._placeholder_type = placeholder_type + + def _has_same_structure(self, other): + if not isinstance(other, Dict): + return False + + return self.mapping.keys() == other.mapping.keys() + + def is_subtype_of(self, other: trace.TraceType) -> bool: + """See base class.""" + if not self._has_same_structure(other): + return False + + # We need all keys to be present because there can be logic relying on + # their existence or lack thereof and hence can not guarantee subtype based + # on a subset or superset of keys. + # Only the tracing code can explicitly check for key dependencies and inform + # that decision. + return all(self.mapping[key].is_subtype_of(other.mapping[key]) + for key in self.mapping) + + def most_specific_common_supertype( + self, types: Sequence[trace.TraceType]) -> Optional["Dict"]: + """See base class.""" + if not all(self._has_same_structure(other) for other in types): + return None + + new_mapping = {} + for key in self.mapping.keys(): + common = self.mapping[key].most_specific_common_supertype( + [other.mapping[key] for other in types]) + if common is None: + return None + else: + new_mapping[key] = common + + return Dict(new_mapping, self._placeholder_type) + + @classmethod + def experimental_type_proto(cls) -> Type[default_types_pb2.SerializedDict]: + return default_types_pb2.SerializedDict + + @classmethod + def experimental_from_proto( + cls, proto: default_types_pb2.SerializedDict) -> "Dict": + return Dict({ + Literal.experimental_from_proto(k).value: serialization.deserialize(v) + for k, v in zip(proto.keys, proto.values) + }) + + def experimental_as_proto(self) -> default_types_pb2.SerializedDict: + return default_types_pb2.SerializedDict( + keys=[Literal(k).experimental_as_proto() for k in self.mapping.keys()], + values=[serialization.serialize(v) for v in self.mapping.values()]) + + def placeholder_value(self, placeholder_context) -> Any: + if self._placeholder_type is None: + raise ValueError("Can not generate placeholder value for Dict with" + " unspecified placeholder_type. Note: placeholder_type " + "is lost during serialization.") + attribute_placeholders = [ + (key, value.placeholder_value(placeholder_context)) + for key, value in self.mapping.items() + ] + if self._placeholder_type is collections.defaultdict: + return dict(attribute_placeholders) + return self._placeholder_type(attribute_placeholders) + + def to_tensors(self, value: Any): + assert isinstance(value, collections.abc.Mapping) + flattened_values = [] + for key in sorted(self.mapping.keys()): + comp_value, comp_type = value[key], self.mapping[key] + flattened_values.extend(comp_type.to_tensors(comp_value)) + return flattened_values + + def from_tensors(self, tensors): + if self._placeholder_type is None: + raise ValueError("Packing serialized Dict is not supported.") + + sorted_traversal = { + key: self.mapping[key].from_tensors(tensors) + for key in sorted(self.mapping) + } + + if self._placeholder_type is collections.defaultdict: + return {key: sorted_traversal[key] for key in self.mapping} + + return self._placeholder_type( + (key, sorted_traversal[key]) for key in self.mapping + ) + + def flatten(self) -> PythonList[trace.TraceType]: + flattened_types = [] + + for key in sorted(self.mapping.keys()): + flattened_types.extend(self.mapping[key].flatten()) + + return flattened_types + + def cast(self, value: Any, casting_context) -> Any: + # Value must have same keys with the TraceType + assert isinstance( + value, collections.abc.Mapping + ), f"Can not cast {value!r} to a Dict type." + assert set(value.keys()) == set( + self.mapping.keys() + ), f"{value!r} has different keys with the TraceType {self!r}." + + casted_values, was_casted = util.cast_and_return_whether_casted( + self.mapping.values(), + [value[k] for k in self.mapping.keys()], + casting_context, + ) + + if was_casted: + return self._placeholder_type( + **{k: v for k, v in zip(self.mapping.keys(), casted_values)} + ) + else: + return value + + def __eq__(self, other) -> bool: + if not isinstance(other, trace.TraceType): + return NotImplemented + + if not isinstance(other, Dict): + return False + + return self.mapping == other.mapping + + def __hash__(self) -> int: + return hash(frozenset(self.mapping.keys())) + + def __repr__(self) -> str: + paired = [f"[{n!r}, {t!r}]" for n, t in self.mapping.items()] + return f"{self.__class__.__name__}[{', '.join(paired)}]" + + +serialization.register_serializable(Literal) +serialization.register_serializable(Tuple) +serialization.register_serializable(List) +serialization.register_serializable(NamedTuple) +serialization.register_serializable(Attrs) +serialization.register_serializable(Dict) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..316c7ab33442978631fa41ae73bc09db7b2c9856 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/default_types_pb2.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/function/trace_type/default_types.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from tensorflow.core.function.trace_type import serialization_pb2 as tensorflow_dot_core_dot_function_dot_trace__type_dot_serialization__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n7tensorflow/core/function/trace_type/default_types.proto\x12\x31tensorflow.core.function.trace_type.default_types\x1a\x37tensorflow/core/function/trace_type/serialization.proto\"\xe6\x01\n\x11SerializedLiteral\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x01H\x00\x12\x13\n\tstr_value\x18\x04 \x01(\tH\x00\x12\x64\n\nnone_value\x18\x05 \x01(\x0b\x32N.tensorflow.core.function.trace_type.default_types.SerializedLiteral.NoneValueH\x00\x1a\x0b\n\tNoneValueB\x07\n\x05value\"m\n\x0fSerializedTuple\x12Z\n\ncomponents\x18\x01 \x03(\x0b\x32\x46.tensorflow.core.function.trace_type.serialization.SerializedTraceType\"n\n\x0eSerializedList\x12\\\n\x10\x63omponents_tuple\x18\x01 \x01(\x0b\x32\x42.tensorflow.core.function.trace_type.default_types.SerializedTuple\"\x9a\x01\n\x14SerializedNamedTuple\x12\x11\n\ttype_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61ttribute_names\x18\x02 \x03(\t\x12V\n\nattributes\x18\x03 \x01(\x0b\x32\x42.tensorflow.core.function.trace_type.default_types.SerializedTuple\"t\n\x0fSerializedAttrs\x12\x61\n\x10named_attributes\x18\x01 \x01(\x0b\x32G.tensorflow.core.function.trace_type.default_types.SerializedNamedTuple\"\xbc\x01\n\x0eSerializedDict\x12R\n\x04keys\x18\x01 \x03(\x0b\x32\x44.tensorflow.core.function.trace_type.default_types.SerializedLiteral\x12V\n\x06values\x18\x02 \x03(\x0b\x32\x46.tensorflow.core.function.trace_type.serialization.SerializedTraceType') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.core.function.trace_type.default_types_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SERIALIZEDLITERAL._serialized_start=168 + _SERIALIZEDLITERAL._serialized_end=398 + _SERIALIZEDLITERAL_NONEVALUE._serialized_start=378 + _SERIALIZEDLITERAL_NONEVALUE._serialized_end=389 + _SERIALIZEDTUPLE._serialized_start=400 + _SERIALIZEDTUPLE._serialized_end=509 + _SERIALIZEDLIST._serialized_start=511 + _SERIALIZEDLIST._serialized_end=621 + _SERIALIZEDNAMEDTUPLE._serialized_start=624 + _SERIALIZEDNAMEDTUPLE._serialized_end=778 + _SERIALIZEDATTRS._serialized_start=780 + _SERIALIZEDATTRS._serialized_end=896 + _SERIALIZEDDICT._serialized_start=899 + _SERIALIZEDDICT._serialized_end=1087 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization.py new file mode 100644 index 0000000000000000000000000000000000000000..a1943e828a06025b6ee23f5488154cc6cb36715f --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization.py @@ -0,0 +1,100 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utils for serializing and deserializing TraceTypes.""" + +import abc +from typing import Type + +from google.protobuf import message +from tensorflow.core.function.trace_type import serialization_pb2 + +SerializedTraceType = serialization_pb2.SerializedTraceType + +PROTO_CLASS_TO_PY_CLASS = {} + + +class Serializable(metaclass=abc.ABCMeta): + """TraceTypes implementing this additional interface are portable.""" + + @classmethod + @abc.abstractmethod + def experimental_type_proto(cls) -> Type[message.Message]: + """Returns the unique type of proto associated with this class.""" + raise NotImplementedError + + @classmethod + @abc.abstractmethod + def experimental_from_proto(cls, proto: message.Message) -> "Serializable": + """Returns an instance based on a proto.""" + raise NotImplementedError + + @abc.abstractmethod + def experimental_as_proto(self) -> message.Message: + """Returns a proto representing this instance.""" + raise NotImplementedError + + +def register_serializable(cls: Type[Serializable]): + """Registers a Python class to support serialization. + + Only register standard TF types. Custom types should NOT be registered. + + Args: + cls: Python class to register. + """ + if cls.experimental_type_proto() in PROTO_CLASS_TO_PY_CLASS: + raise ValueError( + "Existing Python class " + + PROTO_CLASS_TO_PY_CLASS[cls.experimental_type_proto()].__name__ + + " already has " + cls.experimental_type_proto().__name__ + + " as its associated proto representation. Please ensure " + + cls.__name__ + " has a unique proto representation.") + + PROTO_CLASS_TO_PY_CLASS[cls.experimental_type_proto()] = cls + + +def serialize(to_serialize: Serializable) -> SerializedTraceType: + """Converts Serializable to a proto SerializedTraceType.""" + + if not isinstance(to_serialize, Serializable): + raise ValueError("Can not serialize " + type(to_serialize).__name__ + + " since it is not Serializable. For object " + + str(to_serialize)) + actual_proto = to_serialize.experimental_as_proto() + + if not isinstance(actual_proto, to_serialize.experimental_type_proto()): + raise ValueError( + type(to_serialize).__name__ + + " returned different type of proto than specified by " + + "experimental_type_proto()") + + serialized = SerializedTraceType() + serialized.representation.Pack(actual_proto) + return serialized + + +def deserialize(proto: SerializedTraceType) -> Serializable: + """Converts a proto SerializedTraceType to instance of Serializable.""" + for proto_class in PROTO_CLASS_TO_PY_CLASS: + if proto.representation.Is(proto_class.DESCRIPTOR): + actual_proto = proto_class() + proto.representation.Unpack(actual_proto) + return PROTO_CLASS_TO_PY_CLASS[proto_class].experimental_from_proto( + actual_proto) + + raise ValueError( + "Can not deserialize proto of url: ", proto.representation.type_url, + " since no matching Python class could be found. For value ", + proto.representation.value) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..404a00e4114a676e114ad17e8e21b7e663140685 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/function/trace_type/serialization.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n7tensorflow/core/function/trace_type/serialization.proto\x12\x31tensorflow.core.function.trace_type.serialization\x1a\x19google/protobuf/any.proto\"C\n\x13SerializedTraceType\x12,\n\x0erepresentation\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.core.function.trace_type.serialization_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SERIALIZEDTRACETYPE._serialized_start=137 + _SERIALIZEDTRACETYPE._serialized_end=204 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_test_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_test_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..5488f681fafc129a1956ae2f0f9bd1177c4db5f3 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/serialization_test_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/core/function/trace_type/serialization_test.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from tensorflow.core.function.trace_type import serialization_pb2 as tensorflow_dot_core_dot_function_dot_trace__type_dot_serialization__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n Hashable: + if global_id not in self._global_to_local_id: + self._global_to_local_id[global_id] = len(self._global_to_local_id) + + return self._global_to_local_id[global_id] + + def add_placeholder(self, alias_id: Hashable, variable) -> None: + self._alias_id_to_placeholder[alias_id] = variable + + def get_placeholder_mapping(self) -> Dict[Hashable, Any]: + return self._alias_id_to_placeholder + + @property + def is_legacy_signature(self) -> bool: + """If the value is from a legacy signature representation. + + Legacy signature representations include tf.function.input_signature and + ConcreteFunction.structured_input_signature. + """ + return self._is_legacy_signature + + +class InternalPlaceholderContext(trace.PlaceholderContext): + """Container with mappings shared across TraceTypes for placeholder values.""" + + def __init__(self, + context_graph=None, + placeholder_mapping=None, + unnest_only=False, + with_none_control_dependencies=False, + composite_device_name=None): + self._alias_id_to_placeholder = placeholder_mapping or {} + self._naming_scope = None + self._context_graph = context_graph + self._unnest_only = unnest_only + self._with_none_control_dependencies = with_none_control_dependencies + self._composite_device_name = composite_device_name + + def has_placeholder(self, alias_id: Hashable) -> bool: + return alias_id in self._alias_id_to_placeholder + + def get_placeholder(self, alias_id: Hashable) -> Hashable: + if not self.has_placeholder(alias_id): + raise KeyError(f"alias_id: {alias_id} not found in this instance of " + "placeholder context.") + return self._alias_id_to_placeholder[alias_id] + + def add_placeholder(self, alias_id: Hashable, placeholder: Hashable) -> None: + if alias_id in self._alias_id_to_placeholder: + raise KeyError(f"alias id: {alias_id} is already stored in this " + "instance of placeholder context.") + self._alias_id_to_placeholder[alias_id] = placeholder + + def update_naming_scope(self, naming_scope: Optional[str]) -> None: + self._naming_scope = naming_scope + + @property + def naming_scope(self) -> Optional[str]: + return self._naming_scope + + @property + def context_graph(self): + return self._context_graph + + @property + def unnest_only(self) -> bool: + return self._unnest_only + + @property + def with_none_control_dependencies(self) -> bool: + return self._with_none_control_dependencies + + @property + def composite_device_name(self) -> Any: + return self._composite_device_name + + +class InternalCastContext(trace.CastContext): + """Default casting behaviors.""" + + def __init__(self, allow_specs=False): + self._allow_specs = allow_specs + + @property + def allow_specs(self) -> bool: + """Allow TypeSpecs to be casted (instead of the actual CompositeTensors).""" + # Public APIs like get_concrete_function allow users to pass in specs + # instead which need to pass through input binding etc. + return self._allow_specs + + +def from_value(value: Any, + context: trace.TracingContext = None) -> trace.TraceType: + """Returns a TraceType corresponding to the value based on the context. + + Args: + value: The value to generate a TraceType for. + context: The TracingContext to be shared during protocol calls. + + Returns: + A TraceType object representing the given value. + """ + + if context is None: + context = InternalTracingContext() + + if context.is_legacy_signature and isinstance(value, trace.TraceType): + return value + elif isinstance(value, trace.SupportsTracingProtocol): + generated_type = value.__tf_tracing_type__(context) + if not isinstance(generated_type, trace.TraceType): + raise TypeError( + "Expected an instance of TraceType for Tracing Protocol call to " + + str(value) + " but got " + str(generated_type)) + return generated_type + + # TODO(b/183107079): Allow these once they're handled properly. + if isinstance(value, weakref.ref): + raise TypeError( + f"weakref input {value} not supported for tf.function." + ) + + if hasattr(value, "__wrapped__"): + return from_value(value.__wrapped__, context) + + if isinstance(value, list): + return default_types.List(*(from_value(c, context) for c in value)) + + if isinstance(value, tuple): + if util.is_namedtuple(value): + named_tuple_type = type(value) + return default_types.NamedTuple.from_type_and_attributes( + named_tuple_type, tuple(from_value(c, context) for c in value)) + else: + return default_types.Tuple(*(from_value(c, context) for c in value)) + + if isinstance(value, collections.abc.Mapping): + mapping_type = type(value) + return default_types.Dict( + {k: from_value(value[k], context) for k in value}, mapping_type) + + if util.is_attrs(value): + return default_types.Attrs.from_type_and_attributes( + type(value), + tuple( + from_value(getattr(value, a.name), context) + for a in value.__attrs_attrs__)) + + if util.is_np_ndarray(value): + ndarray = value.__array__() + return default_types.TENSOR(ndarray.shape, ndarray.dtype) + + if isinstance(value, custom_nest_protocol.CustomNestProtocol): + metadata, components = value.__tf_flatten__() + return custom_nest_trace_type.CustomNestTraceType( + type(value), metadata, tuple(from_value(c, context) for c in components) + ) + + try: + ref = weakref.ref(value) + if ref is None: + raise TypeError( + f"Deleted objects are not valid tf.function arguments, Got {value!r}") + else: + return default_types.Weakref(ref) + except TypeError: + try: + return default_types.Literal(value) + except: + raise TypeError( # pylint: disable=raise-missing-from + f"Could not generate a generic TraceType for {value!r}." + f"Please verify that it is immutable/hashable. Otheriwse, consider " + f"implementing the Tracing Protocol for it.") diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/util.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/util.py new file mode 100644 index 0000000000000000000000000000000000000000..899936b3d66a3b0d16acaa3e456d10ed9a0278e3 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/core/function/trace_type/util.py @@ -0,0 +1,52 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utilities for the trace_type module.""" + +from typing import Any, List, Tuple + +import numpy as np + + +# TODO(b/225045380): Depend on the abstracted `leaf` lib from 'nest'. +def is_namedtuple(obj): + return hasattr(obj, "_fields") and all( + isinstance(field, str) for field in obj._fields) + + +# TODO(b/225045380): Depend on the abstracted `leaf` lib from 'nest'. +def is_attrs(obj): + return hasattr(type(obj), "__attrs_attrs__") + + +# TODO(b/225045380): Depend on the abstracted `leaf` lib from 'nest'. +def is_np_ndarray(value): + return hasattr(value, "__array__") and not ( + # For legacy reasons we do not automatically promote Numpy strings. + isinstance(value, np.str_) + # NumPy dtypes have __array__ as unbound methods. + or isinstance(value, type)) + + +def cast_and_return_whether_casted( + trace_types, values, context +) -> Tuple[List[Any], bool]: + did_cast = False + casted_values = [] + for t, v in zip(trace_types, values): + casted_v = t.cast(v, context) + casted_values.append(casted_v) + if casted_v is not v: + did_cast = True + return casted_values, did_cast diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef5456e486b59a46376e539a78d0388c73bf9899 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/toco_flags_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/toco_flags_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..882925ccf92c5d9517236be9ecbeb52e1ce35d05 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/toco_flags_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/types_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/types_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77a93e2777bfcf986b3ea60a81fa2596f8366f8c Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/__pycache__/types_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..241b2226b4db6cf170d5a941cf0818fc66567c77 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/gen_html.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/gen_html.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c0a73aa7952f9984389b64e8a5cc493a87265c6 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/gen_html.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/toco_conversion_log_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/toco_conversion_log_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88ada49abc7f31b17b078c16bfef1e0040781d93 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/__pycache__/toco_conversion_log_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/gen_html.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/gen_html.py new file mode 100644 index 0000000000000000000000000000000000000000..02f2a83f8fc15dc4c5aedbe16d758b79bb69c3c7 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/gen_html.py @@ -0,0 +1,265 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""A utility class to generate the report HTML based on a common template.""" + +import io +import os + +from tensorflow.lite.toco.logging import toco_conversion_log_pb2 as _toco_conversion_log_pb2 +from tensorflow.python.lib.io import file_io as _file_io +from tensorflow.python.platform import resource_loader as _resource_loader + +html_escape_table = { + "&": "&", + '"': """, + "'": "'", + ">": ">", + "<": "<", +} + + +def html_escape(text): + return "".join(html_escape_table.get(c, c) for c in text) + + +def get_input_type_from_signature(op_signature): + """Parses op_signature and returns a string denoting the input tensor type. + + Args: + op_signature: a string specifying the signature of a particular operator. + The signature of an operator contains the input tensor's shape and type, + output tensor's shape and type, operator's name and its version. It has + the following schema: + INPUT:input_1_shape::input_1_type::input_2_shape::input_2_type::.. + ::OUTPUT:output_1_shape::output_1_type::output_2_shape::output_2_type:: + ..::NAME:operator_name ::VERSION:operator_version + An example of an operator signature is: + INPUT:[1,73,73,160]::float::[64,1,1,160]::float::[64]::float:: + OUTPUT:[1,73,73,64]::float::NAME:Conv::VERSION:1 + + Returns: + A string denoting the input tensors' type. In the form of shape/type + separated + by comma. For example: + shape:[1,73,73,160],type:float,shape:[64,1,1,160],type:float,shape:[64], + type:float + """ + start = op_signature.find(":") + end = op_signature.find("::OUTPUT") + inputs = op_signature[start + 1:end] + lst = inputs.split("::") + out_str = "" + for i in range(len(lst)): + if i % 2 == 0: + out_str += "shape:" + else: + out_str += "type:" + out_str += lst[i] + out_str += "," + return out_str[:-1] + + +def get_operator_type(op_name, conversion_log): + if op_name in conversion_log.built_in_ops: + return "BUILT-IN" + elif op_name in conversion_log.custom_ops: + return "CUSTOM OP" + else: + return "SELECT OP" + + +class HTMLGenerator: + """Utility class to generate an HTML report.""" + + def __init__(self, html_template_path, export_report_path): + """Reads the HTML template content. + + Args: + html_template_path: A string, path to the template HTML file. + export_report_path: A string, path to the generated HTML report. This path + should point to a '.html' file with date and time in its name. + e.g. 2019-01-01-10:05.toco_report.html. + + Raises: + IOError: File doesn't exist. + """ + # Load the template HTML. + if not _file_io.file_exists(html_template_path): + raise IOError("File '{0}' does not exist.".format(html_template_path)) + with _file_io.FileIO(html_template_path, "r") as f: + self.html_template = f.read() + + _file_io.recursive_create_dir(os.path.dirname(export_report_path)) + self.export_report_path = export_report_path + + def generate(self, + toco_conversion_log_before, + toco_conversion_log_after, + post_training_quant_enabled, + dot_before, + dot_after, + toco_err_log="", + tflite_graph_path=""): + """Generates the HTML report and writes it to local directory. + + This function uses the fields in `toco_conversion_log_before` and + `toco_conversion_log_after` to populate the HTML content. Certain markers + (placeholders) in the HTML template are then substituted with the fields + from the protos. Once finished it will write the HTML file to the specified + local file path. + + Args: + toco_conversion_log_before: A `TocoConversionLog` protobuf generated + before the model is converted by TOCO. + toco_conversion_log_after: A `TocoConversionLog` protobuf generated after + the model is converted by TOCO. + post_training_quant_enabled: A boolean, whether post-training quantization + is enabled. + dot_before: A string, the dot representation of the model + before the conversion. + dot_after: A string, the dot representation of the model after + the conversion. + toco_err_log: A string, the logs emitted by TOCO during conversion. Caller + need to ensure that this string is properly anonymized (any kind of + user data should be eliminated). + tflite_graph_path: A string, the filepath to the converted TFLite model. + + Raises: + RuntimeError: When error occurs while generating the template. + """ + html_dict = {} + html_dict[""] = ( + r'Fail' + ) if toco_err_log else r'Success' + html_dict[""] = str( + toco_conversion_log_before.model_size) + html_dict[""] = str( + toco_conversion_log_after.model_size) + html_dict[""] = str( + sum(toco_conversion_log_after.built_in_ops.values())) + html_dict[""] = str( + sum(toco_conversion_log_after.select_ops.values())) + html_dict[""] = str( + sum(toco_conversion_log_after.custom_ops.values())) + html_dict[""] = ( + "is" if post_training_quant_enabled else "isn't") + + pre_op_profile = "" + post_op_profile = "" + + # Generate pre-conversion op profiles as a list of HTML table rows. + for i in range(len(toco_conversion_log_before.op_list)): + # Append operator name column. + pre_op_profile += "" + toco_conversion_log_before.op_list[ + i] + "" + # Append input type column. + if i < len(toco_conversion_log_before.op_signatures): + pre_op_profile += "" + get_input_type_from_signature( + toco_conversion_log_before.op_signatures[i]) + "" + else: + pre_op_profile += "" + + # Generate post-conversion op profiles as a list of HTML table rows. + for op in toco_conversion_log_after.op_list: + supported_type = get_operator_type(op, toco_conversion_log_after) + post_op_profile += ("" + op + "" + supported_type + + "") + + html_dict[""] = pre_op_profile + html_dict[""] = post_op_profile + html_dict[""] = dot_before + html_dict[""] = dot_after + if toco_err_log: + html_dict[""] = html_escape(toco_err_log) + else: + success_info = ("TFLite graph conversion successful. You can preview the " + "converted model at: ") + tflite_graph_path + html_dict[""] = html_escape(success_info) + + # Replace each marker (as keys of html_dict) with the actual text (as values + # of html_dict) in the HTML template string. + template = self.html_template + for marker in html_dict: + template = template.replace(marker, html_dict[marker], 1) + # Check that the marker text is replaced. + if template.find(marker) != -1: + raise RuntimeError("Could not populate marker text %r" % marker) + + with _file_io.FileIO(self.export_report_path, "w") as f: + f.write(template) + + +def gen_conversion_log_html(conversion_log_dir, quantization_enabled, + tflite_graph_path): + """Generates an HTML report about the conversion process. + + Args: + conversion_log_dir: A string specifying the file directory of the conversion + logs. It's required that before calling this function, the + `conversion_log_dir` + already contains the following files: `toco_log_before.pb`, + `toco_log_after.pb`, `toco_tf_graph.dot`, + `toco_tflite_graph.dot`. + quantization_enabled: A boolean, passed from the tflite converter to + indicate whether post-training quantization is enabled during conversion. + tflite_graph_path: A string, the filepath to the converted TFLite model. + + Raises: + IOError: When any of the required files doesn't exist. + """ + template_filename = _resource_loader.get_path_to_datafile("template.html") + if not os.path.exists(template_filename): + raise IOError("Failed to generate HTML: file '{0}' doesn't exist.".format( + template_filename)) + + toco_log_before_path = os.path.join(conversion_log_dir, "toco_log_before.pb") + toco_log_after_path = os.path.join(conversion_log_dir, "toco_log_after.pb") + dot_before_path = os.path.join(conversion_log_dir, "toco_tf_graph.dot") + dot_after_path = os.path.join(conversion_log_dir, "toco_tflite_graph.dot") + if not os.path.exists(toco_log_before_path): + raise IOError("Failed to generate HTML: file '{0}' doesn't exist.".format( + toco_log_before_path)) + if not os.path.exists(toco_log_after_path): + raise IOError("Failed to generate HTML: file '{0}' doesn't exist.".format( + toco_log_after_path)) + if not os.path.exists(dot_before_path): + raise IOError("Failed to generate HTML: file '{0}' doesn't exist.".format( + dot_before_path)) + if not os.path.exists(dot_after_path): + raise IOError("Failed to generate HTML: file '{0}' doesn't exist.".format( + dot_after_path)) + + html_generator = HTMLGenerator( + template_filename, + os.path.join(conversion_log_dir, "toco_conversion_summary.html")) + + # Parse the generated `TocoConversionLog`. + toco_conversion_log_before = _toco_conversion_log_pb2.TocoConversionLog() + toco_conversion_log_after = _toco_conversion_log_pb2.TocoConversionLog() + with open(toco_log_before_path, "rb") as f: + toco_conversion_log_before.ParseFromString(f.read()) + with open(toco_log_after_path, "rb") as f: + toco_conversion_log_after.ParseFromString(f.read()) + + # Read the dot file before/after the conversion. + with io.open(dot_before_path, "r", encoding="utf-8") as f: + dot_before = f.read().rstrip() + with io.open(dot_after_path, "r", encoding="utf-8") as f: + dot_after = f.read().rstrip() + + html_generator.generate(toco_conversion_log_before, toco_conversion_log_after, + quantization_enabled, dot_before, dot_after, + toco_conversion_log_after.toco_err_logs, + tflite_graph_path) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/toco_conversion_log_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/toco_conversion_log_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..9c45e243b45fb305038a76bbb936a0b3bc705728 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/logging/toco_conversion_log_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tensorflow/lite/toco/logging/toco_conversion_log.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6tensorflow/lite/toco/logging/toco_conversion_log.proto\x12\x04toco\"\xc9\x04\n\x11TocoConversionLog\x12\x0f\n\x07op_list\x18\x01 \x03(\t\x12=\n\x0c\x62uilt_in_ops\x18\x02 \x03(\x0b\x32\'.toco.TocoConversionLog.BuiltInOpsEntry\x12:\n\ncustom_ops\x18\x03 \x03(\x0b\x32&.toco.TocoConversionLog.CustomOpsEntry\x12:\n\nselect_ops\x18\x04 \x03(\x0b\x32&.toco.TocoConversionLog.SelectOpsEntry\x12\x15\n\rop_signatures\x18\x05 \x03(\t\x12\x1a\n\x12input_tensor_types\x18\x06 \x03(\t\x12\x1b\n\x13output_tensor_types\x18\x07 \x03(\t\x12\x19\n\x11log_generation_ts\x18\x08 \x01(\x03\x12\x12\n\nmodel_size\x18\t \x01(\x05\x12\x17\n\x0ftf_lite_version\x18\n \x01(\t\x12\x12\n\nos_version\x18\x0b \x01(\t\x12\x12\n\nmodel_hash\x18\x0c \x01(\t\x12\x15\n\rtoco_err_logs\x18\r \x01(\t\x1a\x31\n\x0f\x42uiltInOpsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0e\x43ustomOpsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eSelectOpsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tensorflow.lite.toco.logging.toco_conversion_log_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TOCOCONVERSIONLOG_BUILTINOPSENTRY._options = None + _TOCOCONVERSIONLOG_BUILTINOPSENTRY._serialized_options = b'8\001' + _TOCOCONVERSIONLOG_CUSTOMOPSENTRY._options = None + _TOCOCONVERSIONLOG_CUSTOMOPSENTRY._serialized_options = b'8\001' + _TOCOCONVERSIONLOG_SELECTOPSENTRY._options = None + _TOCOCONVERSIONLOG_SELECTOPSENTRY._serialized_options = b'8\001' + _TOCOCONVERSIONLOG._serialized_start=65 + _TOCOCONVERSIONLOG._serialized_end=650 + _TOCOCONVERSIONLOG_BUILTINOPSENTRY._serialized_start=501 + _TOCOCONVERSIONLOG_BUILTINOPSENTRY._serialized_end=550 + _TOCOCONVERSIONLOG_CUSTOMOPSENTRY._serialized_start=552 + _TOCOCONVERSIONLOG_CUSTOMOPSENTRY._serialized_end=600 + _TOCOCONVERSIONLOG_SELECTOPSENTRY._serialized_start=602 + _TOCOCONVERSIONLOG_SELECTOPSENTRY._serialized_end=650 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8e1d82211020545633b8fa699bc121bad293bbb Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/toco_from_protos.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/toco_from_protos.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d175520fb57cf58163b6075756371602d5f2508b Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/__pycache__/toco_from_protos.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/toco_from_protos.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/toco_from_protos.py new file mode 100644 index 0000000000000000000000000000000000000000..1f315457b63f0bf8bd8c14c697de1cb3665198ef --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/lite/toco/python/toco_from_protos.py @@ -0,0 +1,74 @@ +# Copyright 2017 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Python console command to invoke TOCO from serialized protos.""" +import argparse +import sys + +# We need to import pywrap_tensorflow prior to the toco wrapper. +# pylint: disable=invalid-import-order,g-bad-import-order +from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import +from tensorflow.python import _pywrap_toco_api +from absl import app + +FLAGS = None + + +def execute(unused_args): + """Runs the converter.""" + with open(FLAGS.model_proto_file, "rb") as model_file: + model_str = model_file.read() + + with open(FLAGS.toco_proto_file, "rb") as toco_file: + toco_str = toco_file.read() + + with open(FLAGS.model_input_file, "rb") as input_file: + input_str = input_file.read() + + output_str = _pywrap_toco_api.TocoConvert( + model_str, + toco_str, + input_str, + False, # extended_return + ) + open(FLAGS.model_output_file, "wb").write(output_str) + sys.exit(0) + + +def main(): + global FLAGS + parser = argparse.ArgumentParser( + description="Invoke toco using protos as input.") + parser.add_argument( + "model_proto_file", + type=str, + help="File containing serialized proto that describes the model.") + parser.add_argument( + "toco_proto_file", + type=str, + help="File containing serialized proto describing how TOCO should run.") + parser.add_argument( + "model_input_file", type=str, help="Input model is read from this file.") + parser.add_argument( + "model_output_file", + type=str, + help="Result of applying TOCO conversion is written here.") + + FLAGS, unparsed = parser.parse_known_args() + + app.run(main=execute, argv=[sys.argv[0]] + unparsed) + + +if __name__ == "__main__": + main() diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..663bf28f2807fae453f14036927b0541df42c4fd Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ebe448b6510019912b8c15742900105329a97a8a Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e3a460d322bd7976de51a0af7296a51abc92725 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profile_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profile_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e3600cabb53306be0e63ca1319175f1f140fb63 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profile_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profiler_options_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profiler_options_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..768d80702799af21c6450cf0d319d901670883fc Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/profiler_options_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/trace_events_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/trace_events_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b456496396c3b5fbb5a3c9b26ad43a659abf22f Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/trace_events_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/xplane_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/xplane_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..070535e7b52084c18b4079107feeeec0d09ae6f2 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/__pycache__/xplane_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profile_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profile_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..38e93c7ceda0c3431e62659dd34e4a05e79d4d6c --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profile_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/profiler/protobuf/profile.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#tsl/profiler/protobuf/profile.proto\x12\x17tensorflow.tfprof.pprof\"\xf3\x03\n\x07Profile\x12\x37\n\x0bsample_type\x18\x01 \x03(\x0b\x32\".tensorflow.tfprof.pprof.ValueType\x12/\n\x06sample\x18\x02 \x03(\x0b\x32\x1f.tensorflow.tfprof.pprof.Sample\x12\x31\n\x07mapping\x18\x03 \x03(\x0b\x32 .tensorflow.tfprof.pprof.Mapping\x12\x33\n\x08location\x18\x04 \x03(\x0b\x32!.tensorflow.tfprof.pprof.Location\x12\x33\n\x08\x66unction\x18\x05 \x03(\x0b\x32!.tensorflow.tfprof.pprof.Function\x12\x14\n\x0cstring_table\x18\x06 \x03(\t\x12\x13\n\x0b\x64rop_frames\x18\x07 \x01(\x03\x12\x13\n\x0bkeep_frames\x18\x08 \x01(\x03\x12\x12\n\ntime_nanos\x18\t \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\n \x01(\x03\x12\x37\n\x0bperiod_type\x18\x0b \x01(\x0b\x32\".tensorflow.tfprof.pprof.ValueType\x12\x0e\n\x06period\x18\x0c \x01(\x03\x12\x0f\n\x07\x63omment\x18\r \x03(\x03\x12\x1b\n\x13\x64\x65\x66\x61ult_sample_type\x18\x0e \x01(\x03\"\'\n\tValueType\x12\x0c\n\x04type\x18\x01 \x01(\x03\x12\x0c\n\x04unit\x18\x02 \x01(\x03\"[\n\x06Sample\x12\x13\n\x0blocation_id\x18\x01 \x03(\x04\x12\r\n\x05value\x18\x02 \x03(\x03\x12-\n\x05label\x18\x03 \x03(\x0b\x32\x1e.tensorflow.tfprof.pprof.Label\".\n\x05Label\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0b\n\x03str\x18\x02 \x01(\x03\x12\x0b\n\x03num\x18\x03 \x01(\x03\"\xdd\x01\n\x07Mapping\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_start\x18\x02 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x03 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x04 \x01(\x04\x12\x10\n\x08\x66ilename\x18\x05 \x01(\x03\x12\x10\n\x08\x62uild_id\x18\x06 \x01(\x03\x12\x15\n\rhas_functions\x18\x07 \x01(\x08\x12\x15\n\rhas_filenames\x18\x08 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\t \x01(\x08\x12\x19\n\x11has_inline_frames\x18\n \x01(\x08\"h\n\x08Location\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x12\n\nmapping_id\x18\x02 \x01(\x04\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\x04\x12+\n\x04line\x18\x04 \x03(\x0b\x32\x1d.tensorflow.tfprof.pprof.Line\")\n\x04Line\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\x04\x12\x0c\n\x04line\x18\x02 \x01(\x03\"_\n\x08\x46unction\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\x03\x12\x13\n\x0bsystem_name\x18\x03 \x01(\x03\x12\x10\n\x08\x66ilename\x18\x04 \x01(\x03\x12\x12\n\nstart_line\x18\x05 \x01(\x03\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.profiler.protobuf.profile_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _PROFILE._serialized_start=65 + _PROFILE._serialized_end=564 + _VALUETYPE._serialized_start=566 + _VALUETYPE._serialized_end=605 + _SAMPLE._serialized_start=607 + _SAMPLE._serialized_end=698 + _LABEL._serialized_start=700 + _LABEL._serialized_end=746 + _MAPPING._serialized_start=749 + _MAPPING._serialized_end=970 + _LOCATION._serialized_start=972 + _LOCATION._serialized_end=1076 + _LINE._serialized_start=1078 + _LINE._serialized_end=1119 + _FUNCTION._serialized_start=1121 + _FUNCTION._serialized_end=1216 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profiler_options_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profiler_options_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..5ec6b5c1ce3ccae9291b81c00f8a5030afbf2178 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/profiler_options_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/profiler/protobuf/profiler_options.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,tsl/profiler/protobuf/profiler_options.proto\x12\ntensorflow\"\x83\x03\n\x0eProfileOptions\x12\x0f\n\x07version\x18\x05 \x01(\r\x12:\n\x0b\x64\x65vice_type\x18\x06 \x01(\x0e\x32%.tensorflow.ProfileOptions.DeviceType\x12\x1b\n\x13include_dataset_ops\x18\x01 \x01(\x08\x12\x19\n\x11host_tracer_level\x18\x02 \x01(\r\x12\x1b\n\x13\x64\x65vice_tracer_level\x18\x03 \x01(\r\x12\x1b\n\x13python_tracer_level\x18\x04 \x01(\r\x12\x18\n\x10\x65nable_hlo_proto\x18\x07 \x01(\x08\x12\x1a\n\x12start_timestamp_ns\x18\x08 \x01(\x04\x12\x13\n\x0b\x64uration_ms\x18\t \x01(\x04\x12\x17\n\x0frepository_path\x18\n \x01(\t\"N\n\nDeviceType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x07\n\x03\x43PU\x10\x01\x12\x07\n\x03GPU\x10\x02\x12\x07\n\x03TPU\x10\x03\x12\x14\n\x10PLUGGABLE_DEVICE\x10\x04\"\xd0\x01\n#RemoteProfilerSessionManagerOptions\x12\x34\n\x10profiler_options\x18\x01 \x01(\x0b\x32\x1a.tensorflow.ProfileOptions\x12\x19\n\x11service_addresses\x18\x02 \x03(\t\x12%\n\x1dsession_creation_timestamp_ns\x18\x03 \x01(\x04\x12\x1f\n\x17max_session_duration_ms\x18\x04 \x01(\x04\x12\x10\n\x08\x64\x65lay_ms\x18\x05 \x01(\x04\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.profiler.protobuf.profiler_options_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _PROFILEOPTIONS._serialized_start=61 + _PROFILEOPTIONS._serialized_end=448 + _PROFILEOPTIONS_DEVICETYPE._serialized_start=370 + _PROFILEOPTIONS_DEVICETYPE._serialized_end=448 + _REMOTEPROFILERSESSIONMANAGEROPTIONS._serialized_start=451 + _REMOTEPROFILERSESSIONMANAGEROPTIONS._serialized_end=659 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/trace_events_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/trace_events_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..826ff181c70935b1fba8ba2930d2d335eea76da9 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/trace_events_pb2.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/profiler/protobuf/trace_events.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(tsl/profiler/protobuf/trace_events.proto\x12\x0ctsl.profiler\"\xb0\x01\n\x05Trace\x12\x31\n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32 .tsl.profiler.Trace.DevicesEntry\x12.\n\x0ctrace_events\x18\x04 \x03(\x0b\x32\x18.tsl.profiler.TraceEvent\x1a\x44\n\x0c\x44\x65vicesEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.tsl.profiler.Device:\x02\x38\x01\"\xab\x01\n\x06\x44\x65vice\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tdevice_id\x18\x02 \x01(\r\x12\x36\n\tresources\x18\x03 \x03(\x0b\x32#.tsl.profiler.Device.ResourcesEntry\x1aH\n\x0eResourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tsl.profiler.Resource:\x02\x38\x01\"A\n\x08Resource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bresource_id\x18\x02 \x01(\r\x12\x12\n\nsort_index\x18\x03 \x01(\r\"\xcc\x01\n\nTraceEvent\x12\x11\n\tdevice_id\x18\x01 \x01(\r\x12\x13\n\x0bresource_id\x18\x02 \x01(\r\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x14\n\x0ctimestamp_ps\x18\t \x01(\x04\x12\x13\n\x0b\x64uration_ps\x18\n \x01(\x04\x12\x30\n\x04\x61rgs\x18\x0b \x03(\x0b\x32\".tsl.profiler.TraceEvent.ArgsEntry\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42|\n\x18org.tensorflow.frameworkB\x11TraceEventsProtosP\x01ZHgithub.com/tensorflow/tensorflow/tensorflow/go/core/core_protos_go_proto\xf8\x01\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.profiler.protobuf.trace_events_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\021TraceEventsProtosP\001ZHgithub.com/tensorflow/tensorflow/tensorflow/go/core/core_protos_go_proto\370\001\001' + _TRACE_DEVICESENTRY._options = None + _TRACE_DEVICESENTRY._serialized_options = b'8\001' + _DEVICE_RESOURCESENTRY._options = None + _DEVICE_RESOURCESENTRY._serialized_options = b'8\001' + _TRACEEVENT_ARGSENTRY._options = None + _TRACEEVENT_ARGSENTRY._serialized_options = b'8\001' + _TRACE._serialized_start=59 + _TRACE._serialized_end=235 + _TRACE_DEVICESENTRY._serialized_start=167 + _TRACE_DEVICESENTRY._serialized_end=235 + _DEVICE._serialized_start=238 + _DEVICE._serialized_end=409 + _DEVICE_RESOURCESENTRY._serialized_start=337 + _DEVICE_RESOURCESENTRY._serialized_end=409 + _RESOURCE._serialized_start=411 + _RESOURCE._serialized_end=476 + _TRACEEVENT._serialized_start=479 + _TRACEEVENT._serialized_end=683 + _TRACEEVENT_ARGSENTRY._serialized_start=640 + _TRACEEVENT_ARGSENTRY._serialized_end=683 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/xplane_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/xplane_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..048b3c97467988bbc309af7b87bea0d8b8088984 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/profiler/protobuf/xplane_pb2.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/profiler/protobuf/xplane.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"tsl/profiler/protobuf/xplane.proto\x12\x13tensorflow.profiler\"j\n\x06XSpace\x12+\n\x06planes\x18\x01 \x03(\x0b\x32\x1b.tensorflow.profiler.XPlane\x12\x0e\n\x06\x65rrors\x18\x02 \x03(\t\x12\x10\n\x08warnings\x18\x03 \x03(\t\x12\x11\n\thostnames\x18\x04 \x03(\t\"\xba\x03\n\x06XPlane\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12)\n\x05lines\x18\x03 \x03(\x0b\x32\x1a.tensorflow.profiler.XLine\x12\x46\n\x0e\x65vent_metadata\x18\x04 \x03(\x0b\x32..tensorflow.profiler.XPlane.EventMetadataEntry\x12\x44\n\rstat_metadata\x18\x05 \x03(\x0b\x32-.tensorflow.profiler.XPlane.StatMetadataEntry\x12)\n\x05stats\x18\x06 \x03(\x0b\x32\x1a.tensorflow.profiler.XStat\x1aY\n\x12\x45ventMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.tensorflow.profiler.XEventMetadata:\x02\x38\x01\x1aW\n\x11StatMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".tensorflow.profiler.XStatMetadata:\x02\x38\x01\"\xbb\x01\n\x05XLine\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x12\n\ndisplay_id\x18\n \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x0b \x01(\t\x12\x14\n\x0ctimestamp_ns\x18\x03 \x01(\x03\x12\x13\n\x0b\x64uration_ps\x18\t \x01(\x03\x12+\n\x06\x65vents\x18\x04 \x03(\x0b\x32\x1b.tensorflow.profiler.XEventJ\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08J\x04\x08\x08\x10\t\"\x95\x01\n\x06XEvent\x12\x13\n\x0bmetadata_id\x18\x01 \x01(\x03\x12\x13\n\toffset_ps\x18\x02 \x01(\x03H\x00\x12\x19\n\x0fnum_occurrences\x18\x05 \x01(\x03H\x00\x12\x13\n\x0b\x64uration_ps\x18\x03 \x01(\x03\x12)\n\x05stats\x18\x04 \x03(\x0b\x32\x1a.tensorflow.profiler.XStatB\x06\n\x04\x64\x61ta\"\xad\x01\n\x05XStat\x12\x13\n\x0bmetadata_id\x18\x01 \x01(\x03\x12\x16\n\x0c\x64ouble_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cuint64_value\x18\x03 \x01(\x04H\x00\x12\x15\n\x0bint64_value\x18\x04 \x01(\x03H\x00\x12\x13\n\tstr_value\x18\x05 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x06 \x01(\x0cH\x00\x12\x13\n\tref_value\x18\x07 \x01(\x04H\x00\x42\x07\n\x05value\"\x8f\x01\n\x0eXEventMetadata\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x03 \x01(\x0c\x12)\n\x05stats\x18\x05 \x03(\x0b\x32\x1a.tensorflow.profiler.XStat\x12\x10\n\x08\x63hild_id\x18\x06 \x03(\x03\">\n\rXStatMetadata\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xf8\x01\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.profiler.protobuf.xplane_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\370\001\001' + _XPLANE_EVENTMETADATAENTRY._options = None + _XPLANE_EVENTMETADATAENTRY._serialized_options = b'8\001' + _XPLANE_STATMETADATAENTRY._options = None + _XPLANE_STATMETADATAENTRY._serialized_options = b'8\001' + _XSPACE._serialized_start=59 + _XSPACE._serialized_end=165 + _XPLANE._serialized_start=168 + _XPLANE._serialized_end=610 + _XPLANE_EVENTMETADATAENTRY._serialized_start=432 + _XPLANE_EVENTMETADATAENTRY._serialized_end=521 + _XPLANE_STATMETADATAENTRY._serialized_start=523 + _XPLANE_STATMETADATAENTRY._serialized_end=610 + _XLINE._serialized_start=613 + _XLINE._serialized_end=800 + _XEVENT._serialized_start=803 + _XEVENT._serialized_end=952 + _XSTAT._serialized_start=955 + _XSTAT._serialized_end=1128 + _XEVENTMETADATA._serialized_start=1131 + _XEVENTMETADATA._serialized_end=1274 + _XSTATMETADATA._serialized_start=1276 + _XSTATMETADATA._serialized_end=1338 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__init__.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/__init__.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23c79a1ccd583e7d310d1182b46e32cc489eeecd Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/__init__.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/coordination_config_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/coordination_config_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5430acc1425fa03c07919eeed16617f083a20387 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/coordination_config_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/distributed_runtime_payloads_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/distributed_runtime_payloads_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ccb0ba8446ed269c9c9c23de05e1e477e8474ab Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/distributed_runtime_payloads_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/error_codes_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/error_codes_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2273cecbaa48e34a4ffd983dccfd6f020b8b380d Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/error_codes_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/histogram_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/histogram_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af9ea854c33cb0f10c788090d2cc8651caa439fc Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/histogram_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/rpc_options_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/rpc_options_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4777082565d805e9d1842b3a8472e3eb782bf225 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/rpc_options_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/status_pb2.cpython-310.pyc b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/status_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c3d56fa8b09167b87de9082c32329e8be0f36a5 Binary files /dev/null and b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/__pycache__/status_pb2.cpython-310.pyc differ diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/coordination_config_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/coordination_config_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..7b8b923ca06e6ad48f1eeec76abc9d529cd2929b --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/coordination_config_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/protobuf/coordination_config.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&tsl/protobuf/coordination_config.proto\x12\ntensorflow\"1\n\x0e\x43oordinatedJob\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnum_tasks\x18\x02 \x01(\x05\"\xd0\x03\n\x19\x43oordinationServiceConfig\x12\x14\n\x0cservice_type\x18\x01 \x01(\t\x12\x16\n\x0eservice_leader\x18\x02 \x01(\t\x12\x1b\n\x13\x65nable_health_check\x18\x03 \x01(\x08\x12&\n\x1e\x63luster_register_timeout_in_ms\x18\x04 \x01(\x03\x12\x1f\n\x17heartbeat_timeout_in_ms\x18\x05 \x01(\x03\x12\x38\n\x14\x63oordinated_job_list\x18\n \x03(\x0b\x32\x1a.tensorflow.CoordinatedJob\x12&\n\x1eshutdown_barrier_timeout_in_ms\x18\x07 \x01(\x03\x12*\n\"agent_destruction_without_shutdown\x18\x08 \x01(\x08\x12\x18\n\x10recoverable_jobs\x18\t \x03(\t\x12*\n\"allow_new_incarnation_to_reconnect\x18\x0b \x01(\x08\x12\x15\n\rforce_disable\x18\x0c \x01(\x08\x12.\n&poll_for_error_from_service_at_startup\x18\r \x01(\x08J\x04\x08\x06\x10\x07\x42WZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_protob\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.protobuf.coordination_config_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto' + _COORDINATEDJOB._serialized_start=54 + _COORDINATEDJOB._serialized_end=103 + _COORDINATIONSERVICECONFIG._serialized_start=106 + _COORDINATIONSERVICECONFIG._serialized_end=570 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..f75c723014dbe70b8dafa8816d6d4f033a8f5251 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/distributed_runtime_payloads_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/protobuf/distributed_runtime_payloads.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n/tsl/protobuf/distributed_runtime_payloads.proto\x12\x1etensorflow.distributed_runtime\"\x9d\x01\n\x14GrpcPayloadContainer\x12T\n\x08payloads\x18\x01 \x03(\x0b\x32\x42.tensorflow.distributed_runtime.GrpcPayloadContainer.PayloadsEntry\x1a/\n\rPayloadsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\x12\n\x10GrpcPayloadsLost\"\x19\n\x17WorkerPossiblyRestartedBAZgithub.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.protobuf.error_codes_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\020ErrorCodesProtosP\001Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto\370\001\001' + _CODE._serialized_start=53 + _CODE._serialized_end=441 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/histogram_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/histogram_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..084338bb3904c1e7d01a584bdc564aa320b223ca --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/histogram_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/protobuf/histogram.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctsl/protobuf/histogram.proto\x12\ntensorflow\"\x87\x01\n\x0eHistogramProto\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01\x12\x0b\n\x03num\x18\x03 \x01(\x01\x12\x0b\n\x03sum\x18\x04 \x01(\x01\x12\x13\n\x0bsum_squares\x18\x05 \x01(\x01\x12\x18\n\x0c\x62ucket_limit\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x12\n\x06\x62ucket\x18\x07 \x03(\x01\x42\x02\x10\x01\x42\\\n\x18org.tensorflow.frameworkP\x01Z;github.com/google/tsl/tsl/go/core/protobuf/summary_go_proto\xf8\x01\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.protobuf.histogram_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkP\001Z;github.com/google/tsl/tsl/go/core/protobuf/summary_go_proto\370\001\001' + _HISTOGRAMPROTO.fields_by_name['bucket_limit']._options = None + _HISTOGRAMPROTO.fields_by_name['bucket_limit']._serialized_options = b'\020\001' + _HISTOGRAMPROTO.fields_by_name['bucket']._options = None + _HISTOGRAMPROTO.fields_by_name['bucket']._serialized_options = b'\020\001' + _HISTOGRAMPROTO._serialized_start=45 + _HISTOGRAMPROTO._serialized_end=180 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/rpc_options_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/rpc_options_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..7fbb53257e648b0c8d0a92863f2a287a15de5003 --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/rpc_options_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/protobuf/rpc_options.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1etsl/protobuf/rpc_options.proto\x12\ntensorflow\"\xd5\x01\n\nRPCOptions\x12$\n\x1cuse_rpc_for_inprocess_master\x18\x01 \x01(\x08\x12\x1d\n\x15\x63ompression_algorithm\x18\x02 \x01(\t\x12\x19\n\x11\x63ompression_level\x18\x03 \x01(\x05\x12\x1a\n\x12\x63\x61\x63he_rpc_response\x18\x04 \x01(\x08\x12*\n\"disable_session_connection_sharing\x18\x05 \x01(\x08\x12\x1f\n\x17num_channels_per_target\x18\x06 \x01(\x05\x42@Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_protob\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.protobuf.rpc_options_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto' + _RPCOPTIONS._serialized_start=47 + _RPCOPTIONS._serialized_end=260 +# @@protoc_insertion_point(module_scope) diff --git a/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/status_pb2.py b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/status_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..ad05de8373b9e57f7d0d057599f80d6a2d8f265a --- /dev/null +++ b/SwarmUI/dlbackend/ComfyUI/venv/lib/python3.10/site-packages/tensorflow/tsl/protobuf/status_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: tsl/protobuf/status.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from tensorflow.tsl.protobuf import error_codes_pb2 as tsl_dot_protobuf_dot_error__codes__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19tsl/protobuf/status.proto\x12\ntensorflow\x1a\x1etsl/protobuf/error_codes.proto\"D\n\x0bStatusProto\x12$\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x16.tensorflow.error.Code\x12\x0f\n\x07message\x18\x02 \x01(\tB_\n\x18org.tensorflow.frameworkP\x01Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tsl.protobuf.status_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkP\001Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto\370\001\001' + _STATUSPROTO._serialized_start=73 + _STATUSPROTO._serialized_end=141 +# @@protoc_insertion_point(module_scope)