language
stringclasses
1 value
repo
stringclasses
346 values
path
stringlengths
6
201
class_span
dict
source
stringlengths
21
2.38M
target
stringlengths
1
96
python
matplotlib__matplotlib
lib/matplotlib/ticker.py
{ "start": 9750, "end": 10788 }
class ____(Formatter): """ Return fixed strings for tick labels based only on position, not value. .. note:: `.FixedFormatter` should only be used together with `.FixedLocator`. Otherwise, the labels may end up in unexpected positions. """ def __init__(self, seq): """Set the sequence *seq* of strings that will be used for labels.""" self.seq = seq self.offset_string = '' def __call__(self, x, pos=None): """ Return the label that matches the position, regardless of the value. For positions ``pos < len(seq)``, return ``seq[i]`` regardless of *x*. Otherwise return empty string. ``seq`` is the sequence of strings that this object was initialized with. """ if pos is None or pos >= len(self.seq): return '' else: return self.seq[pos] def get_offset(self): return self.offset_string def set_offset_string(self, ofs): self.offset_string = ofs
FixedFormatter
python
google__jax
jax/experimental/jax2tf/tests/flax_models/seq2seq_lstm.py
{ "start": 910, "end": 2169 }
class ____(nn.Module): """EncoderLSTM Module wrapped in a lifted scan transform.""" eos_id: int @functools.partial( nn.scan, variable_broadcast='params', in_axes=1, out_axes=1, split_rngs={'params': False}) @nn.compact def __call__(self, carry: tuple[Array, Array], x: Array) -> tuple[tuple[Array, Array], Array]: """Applies the module.""" lstm_state, is_eos = carry new_lstm_state, y = nn.LSTMCell(features=lstm_state[0].shape[-1])( lstm_state, x ) # Pass forward the previous state if EOS has already been reached. def select_carried_state(new_state, old_state): return jnp.where(is_eos[:, np.newaxis], old_state, new_state) # LSTM state is a tuple (c, h). carried_lstm_state = tuple( select_carried_state(*s) for s in zip(new_lstm_state, lstm_state)) # Update `is_eos`. is_eos = jnp.logical_or(is_eos, x[:, self.eos_id]) return (carried_lstm_state, is_eos), y @staticmethod def initialize_carry(batch_size: int, hidden_size: int): # Use a dummy key since the default state init fn is just zeros. return nn.LSTMCell(hidden_size, parent=None).initialize_carry( jax.random.PRNGKey(0), (batch_size, 1) )
EncoderLSTM
python
pytorch__pytorch
test/dynamo/test_autograd_function.py
{ "start": 4318, "end": 4437 }
class ____(torch.nn.Module): def forward(self, x): return CustomFuncStrideBwd.apply(x)
CustomFuncStrideModule
python
pytorch__pytorch
torch/onnx/_internal/exporter/_core.py
{ "start": 4197, "end": 68735 }
class ____(ir.Tensor): def __init__(self, tensor: torch.Tensor, name: str | None = None) -> None: # Pass the tensor as the raw data to ir.Tensor's constructor if tensor.dtype == torch.float4_e2m1fn_x2: # Change the shape to the unpacked shape shape = ir.Shape(_type_casting.get_float4_shape(tensor), frozen=True) else: # The base class will set the shape to the tensor's shape shape = None super().__init__( tensor, dtype=torch_dtype_to_onnx_dtype(tensor.dtype), shape=shape, name=name, ) def numpy(self) -> npt.NDArray: self.raw: torch.Tensor # Handle dtypes that are not natively supported by NumPy: # We pick an uint dtype that has the same size as the original dtype, # view the tensor as that dtype so that it is convertible to NumPy, # and then view it back to the proper dtype (using ml_dtypes obtained by # calling dtype.numpy()). # pyrefly: ignore [missing-attribute] if self.dtype == ir.DataType.BFLOAT16: return ( # pyrefly: ignore [missing-attribute] self.raw.view(torch.uint16).numpy(force=True).view(self.dtype.numpy()) ) if self.dtype in { ir.DataType.FLOAT8E4M3FN, ir.DataType.FLOAT8E4M3FNUZ, ir.DataType.FLOAT8E5M2, ir.DataType.FLOAT8E5M2FNUZ, }: # pyrefly: ignore [missing-attribute] return self.raw.view(torch.uint8).numpy(force=True).view(self.dtype.numpy()) if self.dtype == ir.DataType.FLOAT4E2M1: return _type_casting.unpack_float4x2_as_uint8(self.raw).view( # pyrefly: ignore [missing-attribute] self.dtype.numpy() ) return self.raw.numpy(force=True) def __array__(self, dtype: Any = None, copy: bool | None = None) -> npt.NDArray: del copy # Unused, but needed for the signature if dtype is None: return self.numpy() return self.numpy().__array__(dtype) def _get_cbytes(self): """Get a ctypes byte array pointing to the tensor data.""" import torch._subclasses.fake_tensor with torch._subclasses.fake_tensor.unset_fake_temporarily(): # Disable any fake mode so calling detach() etc. will return a real tensor tensor = self.raw.detach().cpu().contiguous() if isinstance(tensor, torch._subclasses.fake_tensor.FakeTensor): raise TypeError( # pyrefly: ignore [missing-attribute] f"Cannot take content out from the FakeTensor ('{self.name}'). Please replace the tensor " "with a tensor backed by real data using ONNXProgram.apply_weights() " "or save the model without initializers by setting include_initializers=False." ) # Return the tensor to ensure it is not garbage collected while the ctypes array is in use return tensor, ( ctypes.c_ubyte * tensor.element_size() * tensor.numel() ).from_address(tensor.data_ptr()) def tobytes(self) -> bytes: # On big-endian machines, call the super's tobytes() which returns a little-endian result. if sys.byteorder == "big": return super().tobytes() # Implement tobytes to support native PyTorch types so we can use types like bloat16 # Reading from memory directly is also more efficient because # it avoids copying to a NumPy array _, data = self._get_cbytes() return bytes(data) def tofile(self, file) -> None: # On big-endian machines, call the super's tofile() which returns a little-endian result. if sys.byteorder == "big": return super().tofile(file) _, data = self._get_cbytes() return file.write(data) # https://github.com/pytorch/pytorch/blob/ee6cb6daa173896f8ea1876266a19775aaa4f610/torch/export/graph_signature.py#L56C1-L62C19 # class InputKind(Enum): # USER_INPUT = auto() # PARAMETER = auto() # BUFFER = auto() # CONSTANT_TENSOR = auto() # CUSTOM_OBJ = auto() # TOKEN = auto() # https://github.com/pytorch/pytorch/blob/ee6cb6daa173896f8ea1876266a19775aaa4f610/torch/export/graph_signature.py#L89C1-L96C19 # class OutputKind(Enum): # USER_OUTPUT = auto() # LOSS_OUTPUT = auto() # BUFFER_MUTATION = auto() # GRADIENT_TO_PARAMETER = auto() # GRADIENT_TO_USER_INPUT = auto() # USER_INPUT_MUTATION = auto() # TOKEN = auto() def _set_shape_types( values: Sequence[ir.Value], meta_vals: Sequence[torch.Tensor], complex_to_float: bool = True, ) -> None: if not isinstance(meta_vals, Sequence): logger.warning( "Expected meta_vals to be a sequence, but got %s. There may be an internal error.", meta_vals, ) meta_vals = (meta_vals,) for value, meta_val in zip(values, meta_vals): _set_shape_type(value, meta_val, complex_to_float=complex_to_float) def _set_shape_type( value: ir.Value, meta_val: torch.Tensor | torch.SymBool | torch.SymInt | torch.SymFloat | tuple[torch.Tensor], complex_to_float: bool, ) -> None: if isinstance(meta_val, tuple): logger.warning("Setting shape and type of tensors is not supported yet") if isinstance(meta_val, torch.Tensor): dims = [] shape: tuple[int, ...] if meta_val.dtype == torch.float4_e2m1fn_x2: # Change the shape to the unpacked shape shape = _type_casting.get_float4_shape(meta_val) else: shape = meta_val.shape for dim in shape: if isinstance(dim, int): dims.append(dim) else: # pyrefly: ignore [bad-argument-type] dims.append(str(dim.node)) # If the dtype is set already (e.g. by the onnx_symbolic ops), # we don't need to set it again. # # When a user specifies complex in onnx_symbolic, we consider that to # be the intention even though non of the ONNX ops deals with complex values. # In this case, we don't change the dtype or the shape of the tensor. if value.dtype is None: value.dtype = torch_dtype_to_onnx_dtype(meta_val.dtype) if complex_to_float: if meta_val.dtype == torch.complex64: value.dtype = ir.DataType.FLOAT # Add 2 as the last dimension if the tensor is complex to hold the real/imag parts dims.append(2) elif meta_val.dtype == torch.complex128: value.dtype = ir.DataType.DOUBLE # Add 2 as the last dimension if the tensor is complex to hold the real/imag parts dims.append(2) value.shape = ir.Shape(dims) elif isinstance(meta_val, (int, torch.SymInt)): # aten::sym_size output is a int, not a tensor, which stands # for the size of one dim. We treat it as a scalar. value.dtype = ir.DataType.INT64 value.shape = ir.Shape([]) elif isinstance(meta_val, (bool, torch.SymBool)): value.dtype = ir.DataType.BOOL value.shape = ir.Shape([]) elif isinstance(meta_val, (float, torch.SymFloat)): value.dtype = ir.DataType.FLOAT value.shape = ir.Shape([]) def _get_qualified_module_name(cls: Any) -> str: if isinstance(cls, str): return cls module = cls.__module__ if module is None or module == str.__class__.__module__: return cls.__name__ return module + "." + cls.__name__ def _get_node_namespace(node: torch.fx.Node) -> tuple[str, list[str], list[str]]: """Get the namespace and scope of the node. Example:: { 'L__self__': ('', <class 'torchvision.models.resnet.ResNet'>), 'L__self___avgpool': ('avgpool', <class 'torch.nn.modules.pooling.AdaptiveAvgPool2d'>) } Will yield namespace: ": torchvision.models.resnet.ResNet/avgpool: torch.nn.modules.pooling.AdaptiveAvgPool2d/node_name: node_target" class_hierarchy: ["torchvision.models.resnet.ResNet", "torch.nn.modules.pooling.AdaptiveAvgPool2d", <node_target>] name_scopes: ["", "avgpool", <node_name>] Args: node: The node to get the namespace and scope of. Returns: (namespace, class_hierarchy, name_scope) """ nn_module_stack = node.meta.get("nn_module_stack") logger.debug("%s", nn_module_stack) if nn_module_stack is None: logger.warning( "nn_module_stack not found for node '%s'. Skip adding metadata...", node.name, ) return f"{node.name}: {node.target}", [str(node.target)], [node.name] namespaces = [] class_hierarchy = [] name_scopes = [] for name, nn_module in nn_module_stack.values(): name_scopes.append(name) nn_module_name = _get_qualified_module_name(nn_module) class_hierarchy.append(nn_module_name) namespaces.append(f"{name}: {_get_qualified_module_name(nn_module)}") namespaces.append(f"{node.name}: {node.target}") class_hierarchy.append(str(node.target)) name_scopes.append(node.name) return "/".join(namespaces), class_hierarchy, name_scopes def _set_node_metadata(fx_node: torch.fx.Node, ir_node: ir.Node) -> None: """Adds namespace and other node metadata to the ONNX node.""" namespace, class_hierarchy, name_scopes = _get_node_namespace(fx_node) ir_node.metadata_props["namespace"] = namespace ir_node.metadata_props["pkg.torch.onnx.class_hierarchy"] = repr(class_hierarchy) ir_node.metadata_props["pkg.torch.onnx.name_scopes"] = repr(name_scopes) ir_node.metadata_props["pkg.torch.onnx.fx_node"] = str(fx_node.format_node()) ir_node.metadata_props["pkg.torch.onnx.stack_trace"] = fx_node.meta.get( "stack_trace", "" ) def _handle_getitem_node( node: torch.fx.Node, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]] ) -> ir.Value: """Handle a getitem node. Add the input value it is getting to the mapping, then return the value. There are two cases for this node: 1. The output is a Sequence (traced), we can simply get the value from the sequence 2. The output is produced by a SplitToSequence node, we need to get the value from the sequence value This function only handles the first case """ assert len(node.all_input_nodes) == 1 source = node.all_input_nodes[0] source_outputs = node_name_to_values[source.name] assert isinstance(source_outputs, Sequence), ( f"Expected {source.name} to output sequence, got {node_name_to_values[source.name]}" ) index = typing.cast(int, node.args[1]) value = source_outputs[index] # Save the getitem value to the values mapping to in case # it is one of the graph outputs node_name_to_values[node.name] = value # Rename the name of value with the getitem name. value.name = node.name return value def _handle_call_function_node( graph_like: ir.Graph | ir.Function, node: torch.fx.Node, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]], ) -> None: """Handle a call_function node. Args: graph: The ONNX graph at construction. node: The FX node to translate. node_name_to_values: A mapping of FX node names to their produced ir.Value. """ if node.target is operator.getitem: _handle_getitem_node(node, node_name_to_values) # Add op to the graph op = str(node.target) fx_inputs, attributes, input_names, output_names = _get_inputs_and_attributes(node) inputs: list[ir.Value | None] = [] for i, input_ in enumerate(fx_inputs): if input_ is None: inputs.append(None) elif hasattr(input_, "name"): if isinstance(input_, torch.fx.Node) and input_.target is operator.getitem: actual_input = _handle_getitem_node(input_, node_name_to_values) inputs.append(actual_input) else: value = node_name_to_values[input_.name] assert not isinstance(value, Sequence) inputs.append(value) else: attributes[f"arg_{i}"] = input_ outputs = [ir.Value(name=name) for name in output_names] if len(outputs) > 1: _set_shape_types(outputs, node.meta["val"], complex_to_float=False) node_name_to_values[node.name] = outputs else: _set_shape_type(outputs[0], node.meta["val"], complex_to_float=False) node_name_to_values[node.name] = outputs[0] ir_node = ir.Node( "pkg.torch.ops", op, inputs, attributes=ir_convenience.convert_attributes(attributes), outputs=outputs, name=node.name, ) ir_node.meta["node"] = node ir_node.metadata_props["pkg.torch.onnx.input_names"] = repr(input_names) # Record the nn.Module stack for the node _set_node_metadata(node, ir_node) graph_like.append(ir_node) def _convert_fx_arg_to_onnx_arg( arg, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]], node_name_to_local_functions: dict[str, ir.Function], ) -> Any: """Convert an FX argument to an ONNX compatible argument. This function - Converts a torch dtype to an integer - Converts a torch device/memory_format/layout to a string - Converts a torch.fx.Node to an ir.Value - Converts a sequence of torch.fx.Node to a sequence of ir.Value - Converts a get_attr node to an ir.Function """ if arg is None: # None arguments are not modified because when the arg is an ONNX input # we need to preserve the None value; when the arg is an ONNX attribute, # we want to drop the value. # The actual dropping of a None attribute value is done by OpRecorder return None if hasattr(arg, "name"): if isinstance(arg, torch.fx.Node) and arg.target is operator.getitem: source = arg.all_input_nodes[0] source_outputs = node_name_to_values[source.name] if isinstance(source_outputs, Sequence): # If the node is getting an input from another node, get the actual value the node is retrieving return _handle_getitem_node(arg, node_name_to_values) else: # `source_outputs` is a sequence(tensor()) value and we need to # use SequenceAt to get the value. This is handled by torchlib pass if isinstance(arg, torch.fx.Node) and arg.op == "get_attr": return node_name_to_local_functions[arg.name] # If the input is a node, get the value from the mapping return node_name_to_values[arg.name] if isinstance(arg, (list, tuple)): return [ _convert_fx_arg_to_onnx_arg( elem, node_name_to_values, node_name_to_local_functions ) for elem in arg ] if isinstance(arg, (torch.device, torch.memory_format, torch.layout)): return str(arg) if isinstance(arg, torch.dtype): return torch_dtype_to_onnx_dtype(arg) # Maybe a Python value return arg def _get_onnxscript_opset(opset_version: int) -> onnxscript.values.Opset: return onnxscript.values.Opset("", opset_version) def _is_onnx_op(op: Any) -> bool: """Whether the op overload is an ONNX custom op implemented with PyTorch.""" if not isinstance(op, torch._ops.OpOverload): return False return op.name().startswith("onnx::") def _parse_onnx_op(op: torch._ops.OpOverload) -> tuple[str, int]: """Parse the ONNX custom op overload name to get the op type and opset version.""" name = op.name()[len("onnx::") :] name, _, opset = name.partition(".opset") return name, int(opset) def _handle_call_function_node_with_lowering( model: ir.Model, node: torch.fx.Node, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]], *, graph_like: ir.Graph | ir.Function, constant_farm: dict[Any, ir.Value], registry: _registration.ONNXRegistry, opset: onnxscript.values.Opset, node_name_to_local_functions: dict[str, ir.Function], ) -> None: """Translate a call_function node to an ONNX node. Args: model: The ONNX model at construction. node: The FX node to translate. node_name_to_values: A mapping of FX node names to their produced ONNX ``Value``. graph_like: The current ONNX graph at construction. Must add nodes to this graph because it can be a subgraph that is currently being constructed. constant_farm: A mapping of constant values to existing ONNX ``Value``s. registry: The registry of all aten to ONNX decomposition functions. opset: The ONNX Script opset object for constructing ONNX nodes. node_name_to_local_functions: A mapping of subgraph names to the corresponding ONNX functions. """ if node.target is operator.getitem: source = node.all_input_nodes[0] source_outputs = node_name_to_values[source.name] if isinstance(source_outputs, Sequence): _handle_getitem_node(node, node_name_to_values) return else: # `source_outputs` is a sequence(tensor()) value and we need to # use SequenceAt to get the value. This is handled by torchlib pass # Map FX inputs to ONNX inputs and fill optional inputs. # torch_args and torch_kwargs are for op-level validation fx_args = node.args fx_kwargs = node.kwargs # Replace the input FX nodes with ONNX values onnx_args = [ _convert_fx_arg_to_onnx_arg( input_, node_name_to_values, node_name_to_local_functions ) for input_ in fx_args ] onnx_kwargs = {} for key, value in fx_kwargs.items(): onnx_kwargs[key] = _convert_fx_arg_to_onnx_arg( value, node_name_to_values, node_name_to_local_functions ) if key == "dtype" and onnx_kwargs[key] is None: # Set dtype to -1 if it is None # TODO(justinchuby): Maybe keep it as None? onnx_kwargs[key] = -1 if _is_onnx_op(node.target): # Handle torch.ops.onnx.* ops. These ops can be directly added to the graph op_type, opset_version = _parse_onnx_op(node.target) # type: ignore[arg-type] # If final inputs are None, strip them from the node inputs for input_ in reversed(onnx_args): if input_ is not None: break onnx_args.pop() onnx_node = ir.Node( "", op_type, onnx_args, ir.convenience.convert_attributes(onnx_kwargs), name=node.name, num_outputs=len(node.target._schema.returns), # type: ignore[union-attr] version=opset_version, ) # Store the single node in a list to be consistent with the rest of the code for further processing onnx_nodes = [onnx_node] if len(onnx_node.outputs) == 1: outputs = onnx_node.outputs[0] else: outputs = onnx_node.outputs # type: ignore[assignment] else: # Find the matching ONNX overload for the node # TODO: Log the message here to expose false positives onnx_function, message = _dispatching.dispatch(node, registry) if onnx_function is None: raise _errors.DispatchError( f"No ONNX function found for {node.target!r}. Failure message: {message}" ) with onnxscript.evaluator.default_as( tracer := _building.OpRecorder(opset, constant_farm) ): global current_tracer current_tracer = tracer try: outputs = onnx_function(*onnx_args, **onnx_kwargs) except Exception as e: raise _errors.GraphConstructionError( f"Error when calling function '{onnx_function}' with args '{onnx_args}' and kwargs '{onnx_kwargs}'" ) from e finally: current_tracer = None # Add the defined functions to the model for identifier, onnxscript_function in tracer.functions.items(): if identifier in model.functions: continue if isinstance(onnxscript_function, ir.Function): ir_function = onnxscript_function else: # TODO: Get IR function directly when onnxscript is updated proto = onnxscript_function.to_function_proto() ir_function = ir.serde.deserialize_function(proto) model.functions[identifier] = ir_function # Opset imports are added to the model in the final add_opset_imports pass onnx_nodes = tracer.nodes del tracer # tracer is no longer needed # NOTE: Instead of using the output names from node.target._schema, # we always use the index if there are more than one outputs so the # names can be programmatically reconstructed. This is useful for # comparing values from the ONNX graph with those from the FX graph. # # When there are multiple outputs, the output names will be # node_name__0, node_name__1, etc. if isinstance(outputs, Sequence): _set_shape_types(outputs, node.meta["val"], complex_to_float=True) node_name_to_values[node.name] = outputs for i, output in enumerate(outputs): output.name = f"{node.name}__{i}" # Set the name of the producing node using the value name for correspondence producer = output.producer() if producer is not None: producer.name = f"node_{output.name}" else: _set_shape_type(outputs, node.meta["val"], complex_to_float=True) node_name_to_values[node.name] = outputs outputs.name = node.name producer = outputs.producer() if producer is not None: producer.name = f"node_{outputs.name}" for ir_node in onnx_nodes: ir_node.meta["node"] = node # Record the nn.Module stack for the node _set_node_metadata(node, ir_node) # Add the traced nodes to the current graph # Must add nodes to this graph, not model.graph, because it can be a subgraph that is currently being constructed graph_like.extend(onnx_nodes) def _handle_placeholder_node( node: torch.fx.Node, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]], *, graph_like: ir.Graph | ir.Function, lower: str, opset: onnxscript.values.Opset, ) -> None: # Placeholder nodes are user inputs # We need to create a new tensor for each user input # and add it to the graph's inputs name = node.name input_ = _tensors.SymbolicTensor(opset, name=name) input_.meta["node"] = node _set_shape_type(input_, node.meta["val"], complex_to_float=lower != "none") node_name_to_values[name] = input_ # The inputs should be add to the graph here graph_like.inputs.append(input_) def _handle_get_attr_node( node: torch.fx.Node, *, owned_graphs: Mapping[str, ir.Function], node_name_to_local_functions: dict[str, ir.Function], ) -> None: """Handle a get_attr node by assigning the corresponding ONNX function to the node name. An example ExportedProgram that has uses get_attr nodes is: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, arg0_1: "f32[5]"): true_graph_0 = self.true_graph_0 # get_attr false_graph_0 = self.false_graph_0 # get_attr conditional = torch.ops.higher_order.cond(False, true_graph_0, false_graph_0, [arg0_1]); true_graph_0 = false_graph_0 = arg0_1 = None getitem: "f32[5]" = conditional[0]; conditional = None return (getitem,) class <lambda>(torch.nn.Module): def forward(self, arg0_1: "f32[5]"): cos: "f32[5]" = torch.ops.aten.cos.default(arg0_1); arg0_1 = None return (cos,) class <lambda>(torch.nn.Module): def forward(self, arg0_1: "f32[5]"): sin: "f32[5]" = torch.ops.aten.sin.default(arg0_1); arg0_1 = None return (sin,) Args: node: The FX node to translate. owned_graphs: A mapping of subgraph names to the corresponding ONNX functions. node_name_to_local_functions: A mapping of local function names to their corresponding ONNX functions. """ if not isinstance(node.target, str): logger.warning( "Expected node.target for the node %s to be a string, but got '%s'. There may be an internal error.", node, type(node.target), ) return function = owned_graphs[node.target] node_name_to_local_functions[node.name] = function def _handle_output_node( node: torch.fx.Node, node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]], graph_like: ir.Graph | ir.Function, ) -> None: """Handle an output node by adding the output to the graph's outputs. Args: node: The FX node to translate. node_name_to_values: A mapping of FX node names to their produced ONNX ``Value``. graph_like: The ONNX graph at construction. """ # node.args[0] can be a tuple with more than one elements. This happens when, # for example, a subgraph has multiple outputs. We flatten them all as ONNX graph outputs for output in node.args[0]: # type: ignore[index,union-attr] if output is None: logger.warning( "Output node %s has None output. The output is ignored in the exported graph. Please ensure the graph output order is expected", node.name, ) continue output_value_name = output.name # type: ignore[union-attr] assert isinstance(output_value_name, str), ( f"Bug: Expected {output_value_name!r} to be a string" ) values = node_name_to_values[output_value_name] if isinstance(values, Sequence): graph_like.outputs.extend(values) return graph_like.outputs.append(values) def _translate_fx_graph( fx_graph: torch.fx.Graph, model: ir.Model, *, graph_like: ir.Graph | ir.Function, owned_graphs: Mapping[str, ir.Function], lower: Literal["at_conversion", "none"], registry: _registration.ONNXRegistry, ) -> dict[str, ir.Value | Sequence[ir.Value]]: """Translate a submodule to an ONNX function. Any functions used by the traced functions will be added to the model. Args: fx_graph: The FX graph module to translate. model: The ONNX model at construction. current_scope: The current name scope of the submodule, excluding the current module name. E.g. "true_graph_0.false_graph_0". graph_name: The name of the submodule. E.g. "true_graph_0". graph: The ONNX graph at construction. owned_graphs: The subgraphs owned by the current graph. lower: The lowering strategy to use. registry: The registry of all aten to ONNX decomposition functions. Returns: A mapping of FX node names to their produced ONNX ``Value``. """ node_name_to_values: dict[str, ir.Value | Sequence[ir.Value]] = {} # The reason we need node_name_to_local_functions in addition to owned_graphs # is because the get_attr nodes may assign a different name than the GraphModule name # to the subgraph. This is not typical but is valid Python. node_name_to_local_functions: dict[str, ir.Function] = {} constant_farm: dict[Any, ir.Value] = {} opset = _get_onnxscript_opset(registry.opset_version) for node in fx_graph.nodes: logger.debug( "%s", (node.name, node.args, node.target, node.op, node.type, node.kwargs) ) try: if node.op == "placeholder": _handle_placeholder_node( node, node_name_to_values, graph_like=graph_like, lower=lower, opset=opset, ) elif node.op == "call_function": if lower == "at_conversion": _handle_call_function_node_with_lowering( model, node, node_name_to_values, graph_like=graph_like, constant_farm=constant_farm, registry=registry, opset=opset, node_name_to_local_functions=node_name_to_local_functions, ) else: # No lowering _handle_call_function_node(graph_like, node, node_name_to_values) elif node.op == "get_attr": _handle_get_attr_node( node, owned_graphs=owned_graphs, node_name_to_local_functions=node_name_to_local_functions, ) elif node.op == "output": _handle_output_node( node, node_name_to_values, graph_like=graph_like, ) except Exception as e: raise _errors.ConversionError( f"Error when translating node {node.format_node()}. See the stack trace for more information." ) from e return node_name_to_values def _get_inputs_and_attributes( node: torch.fx.Node, ) -> tuple[list[torch.fx.Node | None], dict[str, Any], list[str], list[str]]: """Find and Fill in the not provided kwargs with default values. Returns: (inputs, attributes, input_names, output_names) """ if inspect.isbuiltin(node.target) or isinstance(node.target, str): inputs = list(node.args) return inputs, {}, [], [node.name] # type: ignore[return-value] # The target should be an ATen operator now assert hasattr(node.target, "_schema"), ( f"The target should be an ATen operator now, but node target {node.target} has no schema" ) node_schema: torch.FunctionSchema = node.target._schema # This function assumes the order of arguments in FX op is the # same as the order of arguments in TorchScript op. inputs: list[Any] = [] # type: ignore[no-redef] input_names: list[str] = [] attributes: dict[str, Any] = {} if inspect.isbuiltin(node.target): inputs = list(node.args) else: for arg, schema_arg in zip(node.args, node_schema.arguments): if arg is None or isinstance(arg, torch.fx.Node): inputs.append(arg) input_names.append(schema_arg.name) elif isinstance(arg, Sequence) and all( elem is None or isinstance(elem, torch.fx.Node) for elem in arg ): inputs.extend(arg) input_names.extend([schema_arg.name] * len(arg)) elif isinstance(arg, torch.device): attributes[schema_arg.name] = str(arg) elif isinstance(arg, torch.dtype): attributes[schema_arg.name] = torch_dtype_to_onnx_dtype(arg) else: attributes[schema_arg.name] = arg for schema_arg in node_schema.arguments: if schema_arg.name not in node.kwargs: continue kwarg = node.kwargs[schema_arg.name] if schema_arg.name in { "layout", "device", "requires_grad", "memory_format", "implicit", } or isinstance(kwarg, torch.device): attr = str(kwarg) elif isinstance(kwarg, torch.dtype): attr = torch_dtype_to_onnx_dtype(kwarg) # type: ignore[assignment] else: attr = kwarg # type: ignore[assignment] attributes[schema_arg.name] = attr output_names = [f"{node.name}_{output.name}" for output in node_schema.returns] return inputs, attributes, input_names, output_names # type: ignore[return-value] def _maybe_start_profiler(should_profile: bool) -> Any: if should_profile: import pyinstrument # type: ignore[import-not-found] profiler = pyinstrument.Profiler(async_mode="disabled") profiler.start() return profiler return None def _maybe_stop_profiler_and_get_result(profiler) -> str | None: if profiler is None: return None profiler.stop() return profiler.output_text(unicode=True) def _format_exception(e: Exception) -> str: """Format the full traceback as Python would show it.""" return "\n".join(traceback.format_exception(type(e), e, e.__traceback__)) def _summarize_exception_stack(e: BaseException) -> str: """Format the exception stack by showing the text of each exception.""" causes = [e] while e.__cause__ is not None: causes.append(e.__cause__) e = e.__cause__ return ( "\n\n## Exception summary\n\n" + "⬆️\n".join([f"{type(e)}: {e}\n" for e in reversed(causes)]) + "\n(Refer to the full stack trace above for more information.)" ) def _format_exceptions_for_all_strategies( results: list[_capture_strategies.Result], ) -> str: """Format all the exceptions from the capture strategies.""" return "\n".join( [ f"# ⚠️ Errors from strategy '{result.strategy}': -----------------------\n\n" f"{_format_exception(result.exception)}\n" for result in results if result.exception is not None ] ) def exported_program_to_ir( exported_program: torch.export.ExportedProgram, *, registry: _registration.ONNXRegistry | None = None, lower: Literal["at_conversion", "none"] = "at_conversion", ) -> ir.Model: """Convert an exported program to an ONNX IR model. Reference: - ExportedProgram spec: https://pytorch.org/docs/stable/export.ir_spec.html Args: exported_program: The exported program to convert. lower: Whether to lower the graph to core ONNX operators. at_conversion: Lower when translating the FX graph to ONNX IR. none: Do not lower the graph. registry: The registry of all ONNX Script decomposition. """ if registry is None: registry = _registration.ONNXRegistry.from_torchlib() if lower != "none": exported_program = _prepare_exported_program_for_export( exported_program, registry=registry ) return _exported_program_to_onnx_program( exported_program, registry=registry, lower=lower ).model def _prepare_exported_program_for_export( exported_program: torch.export.ExportedProgram, *, registry: _registration.ONNXRegistry, ) -> torch.export.ExportedProgram: """Decompose and apply pre-export transformations to the exported program.""" with ( # Support the dynamism with 0/1 input dim torch.fx.experimental._config.patch(backed_size_oblivious=True), # type: ignore[attr-defined] ): # Decompose the graph given the implemented torch ops in ONNX exported_program = _fx_passes.decompose_with_registry( exported_program, registry ) graph_module = exported_program.graph_module # Include explicit type promotion nodes _fx_passes.insert_type_promotion_nodes(graph_module) graph_module = _fx_passes.remove_assertion_nodes(graph_module) # Reassign the graph module to save some runtime. exported_program._graph_module = graph_module return exported_program def _get_scope_name(scoped_name: str) -> tuple[str, str]: """Get the scope and name of a node. Examples:: >>> _get_scope_name('') ('', '') >>> _get_scope_name('true_graph') ('', 'true_graph') >>> _get_scope_name('true_graph.false_graph') ('true_graph', 'false_graph') >>> _get_scope_name('true_graph.false_graph.some_graph') ('true_graph.false_graph', 'some_graph') Args: scoped_name: The scoped name of the node. Returns: (scope, name) """ if "." in scoped_name: scope, name = scoped_name.rsplit(".", 1) else: scope, name = "", scoped_name return scope, name def _exported_program_to_onnx_program( exported_program: torch.export.ExportedProgram, *, registry: _registration.ONNXRegistry, lower: Literal["at_conversion", "none"] = "at_conversion", ) -> _onnx_program.ONNXProgram: """Convert an exported program to an ONNX Program. The exported_program field in the returned ONNXProgram is one that is after decompositions have been applied. Reference: - ExportedProgram spec: https://pytorch.org/docs/stable/export.ir_spec.html Args: exported_program: The exported program to convert. The exported program should be the one that is after decompositions have been applied. lower: Whether to lower the graph to core ONNX operators. at_conversion: Lower when translating the FX graph to ONNX IR. none: Do not lower the graph. registry: The registry of all ONNX Script decomposition. """ model = ir.Model( graph=ir.Graph( [], [], nodes=[], # Opset imports are added to the model in the final add_opset_imports pass name="main_graph", metadata_props={ "pkg.torch.export.ExportedProgram.graph_signature": str( exported_program.graph_signature ), "pkg.torch.export.ExportedProgram.range_constraints": str( exported_program.range_constraints ), }, ), ir_version=_constants.ONNX_IR_VERSION, producer_name="pytorch", producer_version=torch.__version__, ) # A dictionary storing the translated subgraphs as ONNX functions made available to outer graphs # {<subgraph_scope>: {<subgraph_name>: <IR function>}} scoped_subgraphs: dict[str, dict[str, ir.Function]] = {} values = None # 1. Translate all nodes in all subgraphs and the main graph # Create a dictionary of values for the main graph for step 2-3 to add inputs and outputs module: torch.fx.GraphModule # Reverse the order of the modules so that the innermost module is processed first # and made available to the outer module for name, module in reversed( tuple(exported_program.graph_module.named_modules(remove_duplicate=False)) ): # Obtain the graphs (previously built) owned by the current module owned_graphs = scoped_subgraphs.setdefault(name, {}) fx_graph = module.graph graph_like: ir.Graph | ir.Function if name == "": # Root graph graph_like = model.graph else: function_name = name.replace(".", "__") # Inputs and outputs will be created within _translate_fx_graph func = ir.Function( domain=_constants.LOCAL_FUNCTION_DOMAIN, name=function_name, graph=ir.Graph((), (), nodes=()), attributes=(), ) # Make this function available to the outer graph scope, subgraph_name = _get_scope_name(name) scoped_subgraphs.setdefault(scope, {})[subgraph_name] = func model.functions[func.identifier()] = func graph_like = func values = _translate_fx_graph( fx_graph, model, graph_like=graph_like, owned_graphs=owned_graphs, lower=lower, registry=registry, ) assert name == "", "The last module processed should be the root module" assert values is not None # Clear the input/output of the main graph and add them back in step 2-3 # using the more accurate graph signature model.graph.inputs.clear() model.graph.outputs.clear() # 2. Add user inputs and all parameters/buffers to the graph. # Since the node names and the tensor names are different, we need to rename # the nodes to match the tensor names later. For now we will just use the node names. user_inputs = [ spec for spec in exported_program.graph_signature.input_specs if spec.kind == graph_signature.InputKind.USER_INPUT ] non_user_inputs = [ spec for spec in exported_program.graph_signature.input_specs if spec.kind != graph_signature.InputKind.USER_INPUT ] for spec in itertools.chain(user_inputs, non_user_inputs): # Put the user inputs first and then the parameters/buffers if isinstance(spec.arg, graph_signature.ConstantArgument): logger.debug("Skipping constant argument %s", spec.arg) continue value_name = spec.arg.name input_kind = spec.kind persistent = spec.persistent value = values[value_name] assert not isinstance(value, Sequence), ( f"Input '{value_name}' should not be a sequence. This is unexpected." ) value.metadata_props["pkg.torch.export.graph_signature.InputSpec.kind"] = ( input_kind.name ) value.metadata_props[ "pkg.torch.export.graph_signature.InputSpec.persistent" ] = str(persistent) if input_kind == graph_signature.InputKind.USER_INPUT: # Add only user inputs to the graph # Subsequent passes can decide if they want to add initializers as inputs model.graph.inputs.append(value) else: model.graph.initializers[value_name] = value # 3. Add user outputs to the graph and assign metadata to all outputs user_outputs = [ spec for spec in exported_program.graph_signature.output_specs if spec.kind == graph_signature.OutputKind.USER_OUTPUT ] non_user_outputs = [ spec for spec in exported_program.graph_signature.output_specs if spec.kind != graph_signature.OutputKind.USER_OUTPUT ] for spec in itertools.chain(user_outputs, non_user_outputs): if isinstance(spec.arg, graph_signature.ConstantArgument): logger.warning("Skipping constant argument %s", spec.arg) continue value_name = spec.arg.name output_kind = spec.kind value = values[value_name] if not isinstance(value, (ir.Value, Sequence)): raise TypeError( f"Output '{value_name}' should be an ir.Value. Actual type is '{type(value)}': {value!r}. " "This may be due to an incorrect implementation of the ONNX function that produced this output." ) # The output value may be a sequence, meaning the operator has multiple outputs _values = (value,) if not isinstance(value, Sequence) else value if len(_values) > 1: logger.warning( "Model output '%s' has multiple values: %s (output spec: %s). Please make sure this is expected.", value_name, _values, spec, ) for value in _values: value.metadata_props["pkg.torch.export.graph_signature.OutputSpec.kind"] = ( output_kind.name ) if output_kind == graph_signature.OutputKind.USER_OUTPUT: model.graph.outputs.append(value) # 4. Rename the initializers to match the tensor names for name, param_name in itertools.chain( exported_program.graph_signature.inputs_to_parameters.items(), exported_program.graph_signature.inputs_to_buffers.items(), exported_program.graph_signature.inputs_to_lifted_tensor_constants.items(), ): initializer = model.graph.initializers.pop(name) initializer.name = param_name # Record the original name so users can search the metadata and correspond # with the FX graph initializer.metadata_props["pkg.torch.onnx.original_node_name"] = name model.graph.initializers[param_name] = initializer # 5. Add initializers to the graph # ExportedProgram stores parameters and buffers in state_dict, # but non_persistent_buffers and lifted_tensor_constants are not there # so we need to get them from the name_* apis. for name, torch_tensor in itertools.chain( exported_program.named_parameters(), # pyrefly: ignore [bad-argument-type] exported_program.named_buffers(), exported_program.constants.items(), ): initializer = model.graph.initializers.get(name) # type: ignore[assignment] if initializer is None: logger.warning("Tensor '%s' is not one of the initializers", name) continue if not isinstance(torch_tensor, torch.Tensor): raise NotImplementedError( f"Tensor '{name}' should be a torch.Tensor. Actual type is '{type(torch_tensor)}': {torch_tensor!r}. " "This is unexpected and not yet supported." ) ir_tensor = TorchTensor(torch_tensor, name=name) initializer.const_value = ir_tensor _set_shape_type( initializer, torch_tensor, complex_to_float=lower != "none", ) # TODO: Decide if we should keep mutated buffers as inputs/outputs # Collect and add opset imports to the model _ir_passes.add_opset_imports(model) return _onnx_program.ONNXProgram(model, exported_program) def _verbose_printer(verbose: bool | None) -> Callable[..., None]: """Prints messages based on `verbose`.""" if verbose is False: return lambda *_, **__: None # pyrefly: ignore [not-iterable] return lambda *args, **kwargs: print("[torch.onnx]", *args, **kwargs) @_flags.set_onnx_exporting_flag def export( model: torch.nn.Module | torch.export.ExportedProgram | torch.fx.GraphModule | torch.jit.ScriptModule | torch.jit.ScriptFunction, args: tuple[Any, ...] = (), kwargs: dict[str, Any] | None = None, *, registry: _registration.ONNXRegistry | None = None, dynamic_shapes: dict[str, Any] | tuple[Any, ...] | list[Any] | None = None, input_names: Sequence[str] | None = None, output_names: Sequence[str] | None = None, report: bool = False, verify: bool = False, profile: bool = False, dump_exported_program: bool = False, artifacts_dir: str | os.PathLike = ".", verbose: bool | None = None, ) -> _onnx_program.ONNXProgram: """Export a PyTorch model to ONNXProgram. Args: model: The model to export. This can be a PyTorch nn.Module or an ExportedProgram. args: The arguments to pass to the model. kwargs: The keyword arguments to pass to the model. registry: The registry of all ONNX decompositions. dynamic_shapes: Dynamic shapes in the graph. input_names: If provided, rename the inputs. output_names: If provided, rename the outputs. report: Whether to generate an error report if the export fails. verify: Whether to verify the ONNX model after exporting. profile: Whether to profile the export process. When report is True, the profile result will be saved in the report. Otherwise, the profile result will be printed. dump_exported_program: Whether to save the exported program to a file. artifacts_dir: The directory to save the exported program and error reports. verbose: Whether to print verbose messages. If None (default), some messages will be printed. Returns: The ONNXProgram with the exported IR graph. Raises: TorchExportError: If the export process fails with torch.export. ConversionError: If the ExportedProgram to ONNX translation fails. """ # Set up the error reporting facilities timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S-%f") profiler = _maybe_start_profiler(profile) # Create the artifacts directory if it does not exist artifacts_dir = pathlib.Path(artifacts_dir) if report or profile or dump_exported_program: artifacts_dir.mkdir(parents=True, exist_ok=True) verbose_print = _verbose_printer(verbose) export_status = _reporting.ExportStatus() failed_results: list[_capture_strategies.Result] = [] program: torch.export.ExportedProgram | None = None capture_strategy: str | None = None # Step 1: Export the model with torch.export.export if the model is not already an ExportedProgram if isinstance(model, torch.export.ExportedProgram): # We know the model is already exported program, so the args, kwargs, and dynamic_shapes # are not used. program = model # torch.export.export has strict default to False export_status.torch_export_non_strict = True else: # Convert an nn.Module to an ExportedProgram # Try everything 🐰 (all paths for getting an ExportedProgram) # When input is a JIT module, the last strategy will succeed so it is handled result: _capture_strategies.Result | None = None for strategy_class in _capture_strategies.CAPTURE_STRATEGIES: strategy = strategy_class( # type: ignore[abstract] verbose=verbose is not False, # Treat None as verbose dump=dump_exported_program, artifacts_dir=artifacts_dir, timestamp=timestamp, ) result = strategy(model, args, kwargs, dynamic_shapes=dynamic_shapes) # Record the status if strategy_class is _capture_strategies.TorchExportNonStrictStrategy: export_status.torch_export_non_strict = result.success elif strategy_class is _capture_strategies.TorchExportStrictStrategy: export_status.torch_export_strict = result.success elif strategy_class is _capture_strategies.TorchExportDraftExportStrategy: export_status.torch_export_draft_export = result.success if result.exception is not None: failed_results.append(result) if result.success: assert result.exported_program is not None program = result.exported_program break assert result is not None capture_strategy = result.strategy if result.exported_program is None: # If all strategies fail, produce an error report and raise the first error profile_result = _maybe_stop_profiler_and_get_result(profiler) if report: report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) try: _reporting.create_torch_export_error_report( report_path, _format_exceptions_for_all_strategies(failed_results), export_status=export_status, profile_result=profile_result, ) except Exception as e_report: verbose_print( f"Failed to save error report due to an error: {e_report}" ) else: report_path = None first_error = failed_results[0].exception assert first_error is not None # NOTE: We only throw the torch.export (first) exception because we want to # focus on the torch.export.export error. Errors from other strategies like # torch.jit.trace is due to the fallback and can be confusing to users. # We save all errors in the error report. raise _errors.TorchExportError( _STEP_ONE_ERROR_MESSAGE + ( f"\nError report has been saved to '{report_path}'." if report else "" ) + _summarize_exception_stack(first_error) ) from first_error assert program is not None if dump_exported_program: verbose_print("Dumping ExportedProgram because `dump_exported_program=True`...") program_path = artifacts_dir / f"onnx_export_{timestamp}.pt2" try: torch.export.save(program, program_path) except Exception as e: verbose_print(f"Failed to save ExportedProgram due to an error: {e}") else: verbose_print(f"ExportedProgram has been saved to '{program_path}'.") # Step 2: Decompose the exported program and insert type promotion nodes verbose_print("Run decomposition...") try: # Build the ONNX function registry if registry is None: registry = _registration.ONNXRegistry.from_torchlib() # Process the exported program to run decompositions and type promotions etc. decomposed_program = _prepare_exported_program_for_export( program, registry=registry ) except Exception as e: export_status.decomposition = False verbose_print("Run decomposition... ❌") profile_result = _maybe_stop_profiler_and_get_result(profiler) if report: report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) # Run the analysis to get the error report try: _reporting.create_onnx_export_report( report_path, f"{_format_exceptions_for_all_strategies(failed_results)}\n\n{_format_exception(e)}", program, export_status=export_status, profile_result=profile_result, registry=registry, ) except Exception: logger.exception("Failed to save report due to an error.") else: report_path = None raise _errors.ConversionError( _STEP_TWO_ERROR_MESSAGE + (f"\nError report has been saved to '{report_path}'." if report else "") + _summarize_exception_stack(e) ) from e else: export_status.decomposition = True verbose_print("Run decomposition... ✅") # Step 3: Translate the decomposed program to ONNX and produce ONNXProgram verbose_print("Translate the graph into ONNX...") if report or profile: pre_decomp_unique_ops, post_decomp_unique_ops = _analysis.compare_ops( program, decomposed_program ) else: pre_decomp_unique_ops = None post_decomp_unique_ops = None try: # Convert the exported program to an ONNX model onnx_program = _exported_program_to_onnx_program( decomposed_program, registry=registry ) # Record the strategy used for getting the exported program for unit test assertions onnx_program._capture_strategy = capture_strategy # Run the ONNX passes if input_names: _ir_passes.rename_inputs(onnx_program.model, input_names) if output_names: _ir_passes.rename_outputs(onnx_program.model, output_names) export_status.onnx_translation = True verbose_print("Translate the graph into ONNX... ✅") except Exception as e: export_status.onnx_translation = False verbose_print("Translate the graph into ONNX... ❌") profile_result = _maybe_stop_profiler_and_get_result(profiler) if report: report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) try: assert pre_decomp_unique_ops is not None assert post_decomp_unique_ops is not None # Run the analysis to get the error report _reporting.create_onnx_export_report( report_path, f"{_format_exceptions_for_all_strategies(failed_results)}\n\n{_format_exception(e)}", decomposed_program, decomp_comparison=_reporting.format_decomp_comparison( pre_decomp_unique_ops, post_decomp_unique_ops ), export_status=export_status, profile_result=profile_result, registry=registry, ) verbose_print(f"Export report has been saved to '{report_path}'.") except Exception: logger.exception("Failed to save report due to an error.") else: report_path = None raise _errors.ConversionError( _STEP_THREE_ERROR_MESSAGE + (f"\nError report has been saved to '{report_path}'." if report else "") + _summarize_exception_stack(e) ) from e profile_result = _maybe_stop_profiler_and_get_result(profiler) assert onnx_program.exported_program is not None if not verify: # Return if verification is not requested if report: try: assert pre_decomp_unique_ops is not None assert post_decomp_unique_ops is not None report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) _reporting.create_onnx_export_report( report_path, "No errors" if not failed_results else _format_exceptions_for_all_strategies(failed_results), onnx_program.exported_program, decomp_comparison=_reporting.format_decomp_comparison( pre_decomp_unique_ops, post_decomp_unique_ops ), export_status=export_status, profile_result=profile_result, model=onnx_program.model, registry=registry, ) verbose_print(f"Export report has been saved to '{report_path}'.") except Exception: logger.exception("Failed to save report due to an error.") elif profile and profile_result is not None: verbose_print("Profile result:") verbose_print(profile_result) return onnx_program # Step 4: (verify=True) Check the ONNX model with ONNX checker try: verbose_print("Check the ONNX model...") onnxscript_apis.check_model(onnx_program.model) export_status.onnx_checker = True verbose_print("Check the ONNX model... ✅") except Exception as e: export_status.onnx_checker = False verbose_print("Check the ONNX model... ❌") if report: try: assert pre_decomp_unique_ops is not None assert post_decomp_unique_ops is not None report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) _reporting.create_onnx_export_report( report_path, f"{_format_exceptions_for_all_strategies(failed_results)}\n\n{_format_exception(e)}", onnx_program.exported_program, decomp_comparison=_reporting.format_decomp_comparison( pre_decomp_unique_ops, post_decomp_unique_ops ), export_status=export_status, profile_result=profile_result, model=onnx_program.model, registry=registry, ) verbose_print(f"Export report has been saved to '{report_path}'.") except Exception: logger.exception("Failed to save report due to an error.") logger.warning( "Conversion successful but the ONNX model fails ONNX checker. " # noqa: G004 "Please create an issue " f"in the PyTorch GitHub repository against the {_BLUE}*onnx*{_END} component and " "attach the full error stack as well as reproduction scripts. ", exc_info=e, ) return onnx_program # Step 5: (verify=True) Execute the model with ONNX Runtime try: verbose_print("Execute the model with ONNX Runtime...") verification_results = _verification.verify_onnx_program(onnx_program) verbose_print("Execute the model with ONNX Runtime... ✅") export_status.onnx_runtime = True onnx_runtime_error_message = None except Exception as e: verbose_print("Execute the model with ONNX Runtime... ❌") export_status.onnx_runtime = False onnx_runtime_error_message = _format_exception(e) verification_message = None else: # Step 6: (verify=True) Validate the output values verbose_print("Verify output accuracy...") export_status.output_accuracy = True for verification_result in verification_results: # TODO(justinchuby): The threshold is arbitrary right now if verification_result.max_abs_diff >= 5e-3: logger.warning( "Output '%s' has a large absolute difference of %f. ", verification_result.name, verification_result.max_abs_diff, ) export_status.output_accuracy = False if verification_result.max_rel_diff >= 1e-1: logger.warning( "Output '%s' has a large relative difference of %f. ", verification_result.name, verification_result.max_rel_diff, ) export_status.output_accuracy = False if export_status.output_accuracy: verbose_print("Verify output accuracy... ✅") else: verbose_print("Verify output accuracy... ❌") verification_message = _reporting.format_verification_infos( verification_results ) if report: try: assert pre_decomp_unique_ops is not None assert post_decomp_unique_ops is not None traceback_lines = [] if failed_results: traceback_lines.append( _format_exceptions_for_all_strategies(failed_results) ) if onnx_runtime_error_message: traceback_lines.append("# ⚠️ ONNX Runtime error -----------------------") traceback_lines.append(onnx_runtime_error_message) if not traceback_lines: traceback_lines.append("No errors") report_path = artifacts_dir / _reporting.construct_report_file_name( timestamp, export_status ) _reporting.create_onnx_export_report( report_path, "\n\n".join(traceback_lines), onnx_program.exported_program, profile_result=profile_result, export_status=export_status, decomp_comparison=_reporting.format_decomp_comparison( pre_decomp_unique_ops, post_decomp_unique_ops ), model=onnx_program.model, registry=registry, verification_result=verification_message, ) verbose_print(f"Export report has been saved to '{report_path}'.") except Exception: logger.exception("Failed to save report due to an error.") # Release the inference session created during verification onnx_program.release() return onnx_program
TorchTensor
python
chroma-core__chroma
chromadb/telemetry/product/events.py
{ "start": 107, "end": 343 }
class ____(ProductTelemetryEvent): def __init__(self) -> None: super().__init__() # Lazy import to avoid circular imports from chromadb import is_in_colab self.in_colab = is_in_colab()
ClientStartEvent
python
kamyu104__LeetCode-Solutions
Python/substring-with-largest-variance.py
{ "start": 103, "end": 1030 }
class ____(object): def largestVariance(self, s): """ :type s: str :rtype: int """ def modified_kadane(a, x, y): result = curr = 0 lookup = [0]*2 remain = [a.count(x), a.count(y)] for c in a: if c not in (x, y): continue lookup[c != x] = 1 remain[c != x] -= 1 curr += 1 if c == x else -1 if curr < 0 and remain[0] and remain[1]: curr = lookup[0] = lookup[1] = 0 # reset states if the remain has both x, y if lookup[0] and lookup[1]: result = max(result, curr) # update result if x, y both exist return result alphabets = set(s) return max(modified_kadane(s, x, y) for x, y in itertools.permutations(alphabets, 2)) if len(alphabets) >= 2 else 0
Solution
python
bokeh__bokeh
src/bokeh/core/property/serialized.py
{ "start": 1486, "end": 2523 }
class ____(SingleParameterizedProperty[T]): """ A property which state won't be synced with the browser. """ _serialized = False def __init__(self, type_param: TypeOrInst[Property[T]], *, default: Init[T] = Intrinsic, help: str | None = None) -> None: super().__init__(type_param, default=default, help=help) #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- @register_type_link(NotSerialized) def _sphinx_type_link(obj: SingleParameterizedProperty[Any]) -> str: return f"{property_link(obj)}({type_link(obj.type_param)})"
NotSerialized
python
joke2k__faker
faker/providers/date_time/zh_TW/__init__.py
{ "start": 46, "end": 813 }
class ____(DateTimeProvider): MONTH_NAMES = { "01": "一月", "02": "二月", "03": "三月", "04": "四月", "05": "五月", "06": "六月", "07": "七月", "08": "八月", "09": "九月", "10": "十月", "11": "十一月", "12": "十二月", } DAY_NAMES = { "0": "星期日", "1": "星期一", "2": "星期二", "3": "星期三", "4": "星期四", "5": "星期五", "6": "星期六", } def day_of_week(self) -> str: day = self.date("%w") return self.DAY_NAMES[day] def month_name(self) -> str: month = self.month() return self.MONTH_NAMES[month] def minguo_year(self) -> str: year = self.year() return str(int(year) - 1911)
Provider
python
apache__airflow
providers/microsoft/azure/tests/unit/microsoft/azure/transfers/test_local_to_adls.py
{ "start": 1128, "end": 2913 }
class ____: @mock.patch("airflow.providers.microsoft.azure.transfers.local_to_adls.AzureDataLakeHook") def test_execute_success(self, mock_hook): operator = LocalFilesystemToADLSOperator( task_id=TASK_ID, local_path=LOCAL_PATH, remote_path=REMOTE_PATH ) operator.execute(None) mock_hook.return_value.upload_file.assert_called_once_with( local_path=LOCAL_PATH, remote_path=REMOTE_PATH, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304, ) @mock.patch("airflow.providers.microsoft.azure.transfers.local_to_adls.AzureDataLakeHook") def test_execute_raises_for_bad_glob_val(self, mock_hook): operator = LocalFilesystemToADLSOperator( task_id=TASK_ID, local_path=BAD_LOCAL_PATH, remote_path=REMOTE_PATH ) with pytest.raises(AirflowException) as ctx: operator.execute(None) assert str(ctx.value) == "Recursive glob patterns using `**` are not supported" @mock.patch("airflow.providers.microsoft.azure.transfers.local_to_adls.AzureDataLakeHook") def test_extra_options_is_passed(self, mock_hook): operator = LocalFilesystemToADLSOperator( task_id=TASK_ID, local_path=LOCAL_PATH, remote_path=REMOTE_PATH, extra_upload_options={"run": False}, ) operator.execute(None) mock_hook.return_value.upload_file.assert_called_once_with( local_path=LOCAL_PATH, remote_path=REMOTE_PATH, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304, run=False, # extra upload options )
TestADLSUploadOperator
python
numba__numba
numba/np/arrayobj.py
{ "start": 23387, "end": 24990 }
class ____(Indexer): """ Compute indices along an entire array dimension. """ def __init__(self, context, builder, aryty, ary, dim): self.context = context self.builder = builder self.aryty = aryty self.ary = ary self.dim = dim self.ll_intp = self.context.get_value_type(types.intp) def prepare(self): builder = self.builder self.size = builder.extract_value(self.ary.shape, self.dim) self.index = cgutils.alloca_once(builder, self.ll_intp) self.bb_start = builder.append_basic_block() self.bb_end = builder.append_basic_block() def get_size(self): return self.size def get_shape(self): return (self.size,) def get_index_bounds(self): # [0, size) return (self.ll_intp(0), self.size) def loop_head(self): builder = self.builder # Initialize loop variable self.builder.store(Constant(self.ll_intp, 0), self.index) builder.branch(self.bb_start) builder.position_at_end(self.bb_start) cur_index = builder.load(self.index) with builder.if_then(builder.icmp_signed('>=', cur_index, self.size), likely=False): builder.branch(self.bb_end) return cur_index, cur_index def loop_tail(self): builder = self.builder next_index = cgutils.increment_index(builder, builder.load(self.index)) builder.store(next_index, self.index) builder.branch(self.bb_start) builder.position_at_end(self.bb_end)
EntireIndexer
python
marshmallow-code__marshmallow
src/marshmallow/fields.py
{ "start": 32664, "end": 34878 }
class ____(Field[_NumT]): """Base class for number fields. This class should not be used within schemas. :param as_string: If `True`, format the serialized value as a string. :param kwargs: The same keyword arguments that :class:`Field` receives. .. versionchanged:: 3.24.0 `Number <marshmallow.fields.Number>` should no longer be used as a field within a `Schema <marshmallow.Schema>`. Use `Integer <marshmallow.fields.Integer>`, `Float <marshmallow.fields.Float>`, or `Decimal <marshmallow.fields.Decimal>` instead. """ num_type: type[_NumT] #: Default error messages. default_error_messages = { "invalid": "Not a valid number.", "too_large": "Number too large.", } def __init__(self, *, as_string: bool = False, **kwargs: Unpack[_BaseFieldKwargs]): self.as_string = as_string super().__init__(**kwargs) def _format_num(self, value) -> _NumT: """Return the number value for value, given this field's `num_type`.""" return self.num_type(value) # type: ignore[call-arg] def _validated(self, value: typing.Any) -> _NumT: """Format the value or raise a :exc:`ValidationError` if an error occurs.""" # (value is True or value is False) is ~5x faster than isinstance(value, bool) if value is True or value is False: raise self.make_error("invalid", input=value) try: return self._format_num(value) except (TypeError, ValueError) as error: raise self.make_error("invalid", input=value) from error except OverflowError as error: raise self.make_error("too_large", input=value) from error def _to_string(self, value: _NumT) -> str: return str(value) def _serialize(self, value, attr, obj, **kwargs) -> str | _NumT | None: """Return a string if `self.as_string=True`, otherwise return this field's `num_type`.""" if value is None: return None ret: _NumT = self._format_num(value) return self._to_string(ret) if self.as_string else ret def _deserialize(self, value, attr, data, **kwargs) -> _NumT: return self._validated(value)
Number
python
coleifer__peewee
tests/postgres_helpers.py
{ "start": 26, "end": 6013 }
class ____(object): # Subclasses must define these, as well as specifying requires[]. M = None # Json model. N = None # "Normal" model. def test_json_field(self): data = {'k1': ['a1', 'a2'], 'k2': {'k3': 'v3'}} j = self.M.create(data=data) j_db = self.M.get(j._pk_expr()) self.assertEqual(j_db.data, data) def test_joining_on_json_key(self): values = [ {'foo': 'bar', 'baze': {'nugget': 'alpha'}}, {'foo': 'bar', 'baze': {'nugget': 'beta'}}, {'herp': 'derp', 'baze': {'nugget': 'epsilon'}}, {'herp': 'derp', 'bar': {'nuggie': 'alpha'}}, ] for data in values: self.M.create(data=data) for value in ['alpha', 'beta', 'gamma', 'delta']: self.N.create(data=value) query = (self.M .select() .join(self.N, on=( self.N.data == self.M.data['baze']['nugget'])) .order_by(self.M.id)) results = [jm.data for jm in query] self.assertEqual(results, [ {'foo': 'bar', 'baze': {'nugget': 'alpha'}}, {'foo': 'bar', 'baze': {'nugget': 'beta'}}, ]) def test_json_lookup_methods(self): data = { 'gp1': { 'p1': {'c1': 'foo'}, 'p2': {'c2': 'bar'}}, 'gp2': {}} j = self.M.create(data=data) def assertLookup(lookup, expected): query = (self.M .select(lookup) .where(j._pk_expr()) .dicts()) self.assertEqual(query.get(), expected) expr = self.M.data['gp1']['p1'] assertLookup(expr.alias('p1'), {'p1': '{"c1": "foo"}'}) assertLookup(expr.as_json().alias('p2'), {'p2': {'c1': 'foo'}}) expr = self.M.data['gp1']['p1']['c1'] assertLookup(expr.alias('c1'), {'c1': 'foo'}) assertLookup(expr.as_json().alias('c2'), {'c2': 'foo'}) j.data = [ {'i1': ['foo', 'bar', 'baz']}, ['nugget', 'mickey']] j.save() expr = self.M.data[0]['i1'] assertLookup(expr.alias('i1'), {'i1': '["foo", "bar", "baz"]'}) assertLookup(expr.as_json().alias('i2'), {'i2': ['foo', 'bar', 'baz']}) expr = self.M.data[1][1] assertLookup(expr.alias('l1'), {'l1': 'mickey'}) assertLookup(expr.as_json().alias('l2'), {'l2': 'mickey'}) def test_json_cast(self): self.M.create(data={'foo': {'bar': 3}}) self.M.create(data={'foo': {'bar': 5}}) query = (self.M .select(Cast(self.M.data['foo']['bar'], 'float') * 1.5) .order_by(self.M.id) .tuples()) self.assertEqual(query[:], [(4.5,), (7.5,)]) def test_json_path(self): data = { 'foo': { 'baz': { 'bar': ['i1', 'i2', 'i3'], 'baze': ['j1', 'j2'], }}} j = self.M.create(data=data) def assertPath(path, expected): query = (self.M .select(path) .where(j._pk_expr()) .dicts()) self.assertEqual(query.get(), expected) expr = self.M.data.path('foo', 'baz', 'bar') assertPath(expr.alias('p1'), {'p1': '["i1", "i2", "i3"]'}) assertPath(expr.as_json().alias('p2'), {'p2': ['i1', 'i2', 'i3']}) expr = self.M.data.path('foo', 'baz', 'baze', 1) assertPath(expr.alias('p1'), {'p1': 'j2'}) assertPath(expr.as_json().alias('p2'), {'p2': 'j2'}) expr = self.M.data['foo'].path('baz', 'bar') assertPath(expr.alias('p1'), {'p1': '["i1", "i2", "i3"]'}) assertPath(expr.as_json().alias('p2'), {'p2': ['i1', 'i2', 'i3']}) def test_json_field_sql(self): j = (self.M .select() .where(self.M.data == {'foo': 'bar'})) table = self.M._meta.table_name self.assertSQL(j, ( 'SELECT "t1"."id", "t1"."data" ' 'FROM "%s" AS "t1" WHERE ("t1"."data" = CAST(? AS %s))') % (table, self.M.data._json_datatype)) j = (self.M .select() .where(self.M.data['foo'] == 'bar')) self.assertSQL(j, ( 'SELECT "t1"."id", "t1"."data" ' 'FROM "%s" AS "t1" WHERE ("t1"."data"->>? = ?)') % table) def assertItems(self, where, *items): query = (self.M .select() .where(where) .order_by(self.M.id)) self.assertEqual( [item.id for item in query], [item.id for item in items]) def test_lookup(self): t1 = self.M.create(data={'k1': 'v1', 'k2': {'k3': 'v3'}}) t2 = self.M.create(data={'k1': 'x1', 'k2': {'k3': 'x3'}}) t3 = self.M.create(data={'k1': 'v1', 'j2': {'j3': 'v3'}}) self.assertItems((self.M.data['k2']['k3'] == 'v3'), t1) self.assertItems((self.M.data['k1'] == 'v1'), t1, t3) # Valid key, no matching value. self.assertItems((self.M.data['k2'] == 'v1')) # Non-existent key. self.assertItems((self.M.data['not-here'] == 'v1')) # Non-existent nested key. self.assertItems((self.M.data['not-here']['xxx'] == 'v1')) self.assertItems((self.M.data['k2']['xxx'] == 'v1')) def test_json_bulk_update_top_level_list(self): m1 = self.M.create(data=['a', 'b', 'c']) m2 = self.M.create(data=['d', 'e', 'f']) m1.data = ['g', 'h', 'i'] m2.data = ['j', 'k', 'l'] self.M.bulk_update([m1, m2], fields=[self.M.data]) m1_db = self.M.get(self.M.id == m1.id) m2_db = self.M.get(self.M.id == m2.id) self.assertEqual(m1_db.data, ['g', 'h', 'i']) self.assertEqual(m2_db.data, ['j', 'k', 'l']) # Contains additional test-cases suitable for the JSONB data-type.
BaseJsonFieldTestCase
python
doocs__leetcode
solution/1600-1699/1627.Graph Connectivity With Threshold/Solution.py
{ "start": 563, "end": 899 }
class ____: def areConnected( self, n: int, threshold: int, queries: List[List[int]] ) -> List[bool]: uf = UnionFind(n + 1) for a in range(threshold + 1, n + 1): for b in range(a + a, n + 1, a): uf.union(a, b) return [uf.find(a) == uf.find(b) for a, b in queries]
Solution
python
sqlalchemy__sqlalchemy
lib/sqlalchemy/orm/collections.py
{ "start": 6488, "end": 6689 }
class ____(Protocol): _sa_adapter: CollectionAdapter _sa_appender: Callable[..., Any] _sa_remover: Callable[..., Any] _sa_iterator: Callable[..., Iterable[Any]]
_AdaptedCollectionProtocol
python
coleifer__peewee
playhouse/sqlite_ext.py
{ "start": 7853, "end": 8448 }
class ____(JSONField): field_type = 'JSONB' Path = JSONBPath def db_value(self, value): if value is not None: if not isinstance(value, Node): value = fn.jsonb(self._json_dumps(value)) return value def json(self): return fn.json(self) def extract(self, *paths): paths = [Value(p, converter=False) for p in paths] return fn.jsonb_extract(self, *paths) def remove(self, *paths): if not paths: return self.Path(self).remove() return fn.jsonb_remove(self, *paths)
JSONBField
python
spack__spack
lib/spack/spack/vendor/ruamel/yaml/tokens.py
{ "start": 10185, "end": 10530 }
class ____(Token): __slots__ = 'value', 'plain', 'style' id = '<scalar>' def __init__(self, value, plain, start_mark, end_mark, style=None): # type: (Any, Any, Any, Any, Any) -> None Token.__init__(self, start_mark, end_mark) self.value = value self.plain = plain self.style = style
ScalarToken
python
getsentry__sentry
src/sentry/release_health/tasks.py
{ "start": 9322, "end": 10331 }
class ____(TypedDict): environment: str project_id: int version: str def valid_environment(environment_name: str, environment_session_count: int) -> bool: """An environment is valid if it has a name and has at least one session.""" return bool(environment_name) and environment_session_count > 0 def valid_and_adopted_release( release_name: str, release_session_count: int, environment_session_count: int ) -> bool: """A release is valid if it has the correct name and it has been adopted.""" return Release.is_valid_version(release_name) and has_been_adopted( environment_session_count, release_session_count ) def has_been_adopted(total_sessions: int, total_sessions_for_release: int) -> bool: """If the release's sessions exceed 10% of total sessions it is considered adopted. https://docs.sentry.io/product/releases/health/#adoption-stages """ threshold = total_sessions * 0.1 return total_sessions_for_release >= threshold
AdoptedRelease
python
pandas-dev__pandas
pandas/tests/extension/date/array.py
{ "start": 494, "end": 1222 }
class ____(ExtensionDtype): @property def type(self): return dt.date @property def name(self): return "DateDtype" @classmethod def construct_from_string(cls, string: str): if not isinstance(string, str): raise TypeError( f"'construct_from_string' expects a string, got {type(string)}" ) if string == cls.__name__: return cls() else: raise TypeError(f"Cannot construct a '{cls.__name__}' from '{string}'") def construct_array_type(self): return DateArray @property def na_value(self): return dt.date.min def __repr__(self) -> str: return self.name
DateDtype
python
kubernetes-client__python
kubernetes/client/models/v1_namespace_list.py
{ "start": 383, "end": 7092 }
class ____(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_version': 'str', 'items': 'list[V1Namespace]', 'kind': 'str', 'metadata': 'V1ListMeta' } attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501 """V1NamespaceList - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._items = None self._kind = None self._metadata = None self.discriminator = None if api_version is not None: self.api_version = api_version self.items = items if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata @property def api_version(self): """Gets the api_version of this V1NamespaceList. # noqa: E501 APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501 :return: The api_version of this V1NamespaceList. # noqa: E501 :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """Sets the api_version of this V1NamespaceList. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501 :param api_version: The api_version of this V1NamespaceList. # noqa: E501 :type: str """ self._api_version = api_version @property def items(self): """Gets the items of this V1NamespaceList. # noqa: E501 Items is the list of Namespace objects in the list. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/ # noqa: E501 :return: The items of this V1NamespaceList. # noqa: E501 :rtype: list[V1Namespace] """ return self._items @items.setter def items(self, items): """Sets the items of this V1NamespaceList. Items is the list of Namespace objects in the list. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/ # noqa: E501 :param items: The items of this V1NamespaceList. # noqa: E501 :type: list[V1Namespace] """ if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501 raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501 self._items = items @property def kind(self): """Gets the kind of this V1NamespaceList. # noqa: E501 Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501 :return: The kind of this V1NamespaceList. # noqa: E501 :rtype: str """ return self._kind @kind.setter def kind(self, kind): """Sets the kind of this V1NamespaceList. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501 :param kind: The kind of this V1NamespaceList. # noqa: E501 :type: str """ self._kind = kind @property def metadata(self): """Gets the metadata of this V1NamespaceList. # noqa: E501 :return: The metadata of this V1NamespaceList. # noqa: E501 :rtype: V1ListMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """Sets the metadata of this V1NamespaceList. :param metadata: The metadata of this V1NamespaceList. # noqa: E501 :type: V1ListMeta """ self._metadata = metadata def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1NamespaceList): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1NamespaceList): return True return self.to_dict() != other.to_dict()
V1NamespaceList
python
TheAlgorithms__Python
data_structures/heap/max_heap.py
{ "start": 0, "end": 2413 }
class ____: """ A max-heap implementation in Python >>> binary_heap = BinaryHeap() >>> binary_heap.insert(6) >>> binary_heap.insert(10) >>> binary_heap.insert(15) >>> binary_heap.insert(12) >>> binary_heap.pop() 15 >>> binary_heap.pop() 12 >>> binary_heap.get_list [10, 6] >>> len(binary_heap) 2 """ def __init__(self): self.__heap = [0] self.__size = 0 def __swap_up(self, i: int) -> None: """Swap the element up""" temporary = self.__heap[i] while i // 2 > 0: if self.__heap[i] > self.__heap[i // 2]: self.__heap[i] = self.__heap[i // 2] self.__heap[i // 2] = temporary i //= 2 def insert(self, value: int) -> None: """Insert new element""" self.__heap.append(value) self.__size += 1 self.__swap_up(self.__size) def __swap_down(self, i: int) -> None: """Swap the element down""" while self.__size >= 2 * i: if 2 * i + 1 > self.__size: # noqa: SIM114 bigger_child = 2 * i elif self.__heap[2 * i] > self.__heap[2 * i + 1]: bigger_child = 2 * i else: bigger_child = 2 * i + 1 temporary = self.__heap[i] if self.__heap[i] < self.__heap[bigger_child]: self.__heap[i] = self.__heap[bigger_child] self.__heap[bigger_child] = temporary i = bigger_child def pop(self) -> int: """Pop the root element""" max_value = self.__heap[1] self.__heap[1] = self.__heap[self.__size] self.__size -= 1 self.__heap.pop() self.__swap_down(1) return max_value @property def get_list(self): return self.__heap[1:] def __len__(self): """Length of the array""" return self.__size if __name__ == "__main__": import doctest doctest.testmod() # create an instance of BinaryHeap binary_heap = BinaryHeap() binary_heap.insert(6) binary_heap.insert(10) binary_heap.insert(15) binary_heap.insert(12) # pop root(max-values because it is max heap) print(binary_heap.pop()) # 15 print(binary_heap.pop()) # 12 # get the list and size after operations print(binary_heap.get_list) print(len(binary_heap))
BinaryHeap
python
sphinx-doc__sphinx
tests/roots/test-ext-autodoc/target/autoclass_content.py
{ "start": 169, "end": 284 }
class ____: """A class having __init__, no __new__""" def __init__(self): """__init__ docstring"""
C
python
sqlalchemy__sqlalchemy
test/orm/test_recursive_loaders.py
{ "start": 2080, "end": 4845 }
class ____(_NodeTest, fixtures.MappedTest): @classmethod def insert_data(cls, connection): Node = cls.classes.Node n1 = Node(data="n1") n1.append(Node(data="n11")) n1.append(Node(data="n12")) n1.append(Node(data="n13")) n1.children[0].children = [Node(data="n111"), Node(data="n112")] n1.children[1].append(Node(data="n121")) n1.children[1].append(Node(data="n122")) n1.children[1].append(Node(data="n123")) n2 = Node(data="n2") n2.append(Node(data="n21")) n2.children[0].append(Node(data="n211")) n2.children[0].append(Node(data="n212")) with Session(connection) as sess: sess.add(n1) sess.add(n2) sess.commit() @testing.fixture def data_fixture(self): Node = self.classes.Node def go(sess): n1, n2 = sess.scalars( select(Node) .where(Node.data.in_(["n1", "n2"])) .order_by(Node.id) ).all() return n1, n2 return go def _full_structure(self): Node = self.classes.Node return [ Node( data="n1", children=[ Node(data="n11"), Node( data="n12", children=[ Node(data="n121"), Node(data="n122"), Node(data="n123"), ], ), Node(data="n13"), ], ), Node( data="n2", children=[ Node( data="n21", children=[ Node(data="n211"), Node(data="n212"), ], ) ], ), ] @testing.combinations( (selectinload, 4), (immediateload, 14), argnames="loader,expected_sql_count", ) def test_recursion_depth_opt( self, data_fixture, loader, expected_sql_count ): Node = self.classes.Node sess = fixture_session() n1, n2 = data_fixture(sess) def go(): return ( sess.query(Node) .filter(Node.data.in_(["n1", "n2"])) .options(loader(Node.children, recursion_depth=-1)) .order_by(Node.data) .all() ) result = self.assert_sql_count(testing.db, go, expected_sql_count) sess.close() eq_(result, self._full_structure())
ShallowRecursiveTest
python
dagster-io__dagster
python_modules/dagster/dagster/_core/definitions/asset_selection.py
{ "start": 38955, "end": 39640 }
class ____(AssetSelection): key: str value: str include_sources: bool def resolve_inner( self, asset_graph: BaseAssetGraph, allow_missing: bool ) -> AbstractSet[AssetKey]: base_nodes = { node.key: node for node in asset_graph.asset_nodes if self.include_sources or node.is_materializable } return {key for key, node in base_nodes.items() if node.tags.get(self.key) == self.value} def to_selection_str(self) -> str: if self.value: return f'tag:"{self.key}"="{self.value}"' else: return f'tag:"{self.key}"' @whitelist_for_serdes @record
TagAssetSelection
python
django__django
tests/template_tests/filter_tests/test_last.py
{ "start": 114, "end": 931 }
class ____(SimpleTestCase): @setup({"last01": "{{ a|last }} {{ b|last }}"}) def test_last01(self): output = self.engine.render_to_string( "last01", {"a": ["x", "a&b"], "b": ["x", mark_safe("a&b")]} ) self.assertEqual(output, "a&amp;b a&b") @setup( {"last02": "{% autoescape off %}{{ a|last }} {{ b|last }}{% endautoescape %}"} ) def test_last02(self): output = self.engine.render_to_string( "last02", {"a": ["x", "a&b"], "b": ["x", mark_safe("a&b")]} ) self.assertEqual(output, "a&b a&b") @setup({"empty_list": "{% autoescape off %}{{ a|last }}{% endautoescape %}"}) def test_empty_list(self): output = self.engine.render_to_string("empty_list", {"a": []}) self.assertEqual(output, "")
LastTests
python
django__django
tests/auth_tests/models/with_custom_email_field.py
{ "start": 145, "end": 423 }
class ____(BaseUserManager): def create_user(self, username, password, email): user = self.model(username=username) user.set_password(password) user.email_address = email user.save(using=self._db) return user
CustomEmailFieldUserManager
python
lazyprogrammer__machine_learning_examples
rl2/mountaincar/pg_theano.py
{ "start": 1813, "end": 4164 }
class ____: def __init__(self, D, ft, hidden_layer_sizes=[]): self.ft = ft ##### hidden layers ##### M1 = D self.hidden_layers = [] for M2 in hidden_layer_sizes: layer = HiddenLayer(M1, M2) self.hidden_layers.append(layer) M1 = M2 # final layer mean self.mean_layer = HiddenLayer(M1, 1, lambda x: x, use_bias=False, zeros=True) # final layer variance self.var_layer = HiddenLayer(M1, 1, T.nnet.softplus, use_bias=False, zeros=False) # get all params for gradient later params = self.mean_layer.params + self.var_layer.params for layer in self.hidden_layers: params += layer.params # inputs and targets X = T.matrix('X') actions = T.vector('actions') advantages = T.vector('advantages') target_value = T.vector('target_value') # get final hidden layer Z = X for layer in self.hidden_layers: Z = layer.forward(Z) mean = self.mean_layer.forward(Z).flatten() var = self.var_layer.forward(Z).flatten() + 1e-5 # smoothing # can't find Theano log pdf, we will make it def log_pdf(actions, mean, var): k1 = T.log(2*np.pi*var) k2 = (actions - mean)**2 / var return -0.5*(k1 + k2) def entropy(var): return 0.5*T.log(2*np.pi*np.e*var) log_probs = log_pdf(actions, mean, var) cost = -T.sum(advantages * log_probs + 0.1*entropy(var)) updates = adam(cost, params) # compile functions self.train_op = theano.function( inputs=[X, actions, advantages], updates=updates, allow_input_downcast=True ) # alternatively, we could create a RandomStream and sample from # the Gaussian using Theano code self.predict_op = theano.function( inputs=[X], outputs=[mean, var], allow_input_downcast=True ) def partial_fit(self, X, actions, advantages): X = np.atleast_2d(X) X = self.ft.transform(X) actions = np.atleast_1d(actions) advantages = np.atleast_1d(advantages) self.train_op(X, actions, advantages) def predict(self, X): X = np.atleast_2d(X) X = self.ft.transform(X) return self.predict_op(X) def sample_action(self, X): pred = self.predict(X) mu = pred[0][0] v = pred[1][0] a = np.random.randn()*np.sqrt(v) + mu return min(max(a, -1), 1) # approximates V(s)
PolicyModel
python
pytorch__pytorch
torch/_dynamo/variables/ctx_manager.py
{ "start": 17320, "end": 18945 }
class ____(ContextWrappingVariable): """Delay a call to warnings.catch_warnings""" @staticmethod def create( tx: "InstructionTranslator", catch_warnings_args: dict[str, VariableTracker] ) -> "CatchWarningsCtxManagerVariable": return CatchWarningsCtxManagerVariable( catch_warnings_args=catch_warnings_args, target_values=None, initial_values=None, ) def __init__( self, catch_warnings_args: dict[str, VariableTracker], target_values: Optional[Any] = None, initial_values: Optional[Any] = None, **kwargs: Any, ) -> None: assert isinstance(catch_warnings_args, dict), catch_warnings_args super().__init__( target_values=target_values, initial_values=initial_values, **kwargs ) self.catch_warnings_args = catch_warnings_args def enter(self, tx: "InstructionTranslator") -> VariableTracker: kwargs = { k: v.as_python_constant() for k, v in self.catch_warnings_args.items() } ctx_val = warnings.catch_warnings(**kwargs) self.set_cleanup_hook(tx, lambda: ctx_val.__exit__(None, None, None)) return variables.ConstantVariable.create(ctx_val.__enter__()) def reconstruct(self, cg: "PyCodegen") -> None: cg.add_push_null(lambda: cg.load_import_from("warnings", "catch_warnings")) cg.foreach(self.catch_warnings_args.values()) keys = tuple(self.catch_warnings_args.keys()) cg.extend_output(cg.create_call_function_kw(len(keys), keys, False))
CatchWarningsCtxManagerVariable
python
huggingface__transformers
src/transformers/models/visual_bert/modeling_visual_bert.py
{ "start": 14524, "end": 16149 }
class ____(nn.Module): def __init__(self, config): super().__init__() self.config = config self.layer = nn.ModuleList([VisualBertLayer(config) for _ in range(config.num_hidden_layers)]) self.gradient_checkpointing = False def forward( self, hidden_states, attention_mask=None, output_attentions=False, output_hidden_states=False, return_dict=True, ): all_hidden_states = () if output_hidden_states else None all_self_attentions = () if output_attentions else None for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) layer_outputs = layer_module(hidden_states, attention_mask, output_attentions) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple( v for v in [ hidden_states, all_hidden_states, all_self_attentions, ] if v is not None ) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions ) # Copied from transformers.models.bert.modeling_bert.BertPooler with Bert->VisualBert
VisualBertEncoder
python
pallets__jinja
src/jinja2/visitor.py
{ "start": 319, "end": 1733 }
class ____: """Walks the abstract syntax tree and call visitor functions for every node found. The visitor functions may return values which will be forwarded by the `visit` method. Per default the visitor functions for the nodes are ``'visit_'`` + class name of the node. So a `TryFinally` node visit function would be `visit_TryFinally`. This behavior can be changed by overriding the `get_visitor` function. If no visitor function exists for a node (return value `None`) the `generic_visit` visitor is used instead. """ def get_visitor(self, node: Node) -> "VisitCallable | None": """Return the visitor function for this node or `None` if no visitor exists for this node. In that case the generic visit function is used instead. """ return getattr(self, f"visit_{type(node).__name__}", None) def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: """Visit a node.""" f = self.get_visitor(node) if f is not None: return f(node, *args, **kwargs) return self.generic_visit(node, *args, **kwargs) def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: """Called if no explicit visitor function exists for a node.""" for child_node in node.iter_child_nodes(): self.visit(child_node, *args, **kwargs)
NodeVisitor
python
weaviate__weaviate-python-client
journey_tests/journeys.py
{ "start": 1255, "end": 2371 }
class ____: def __init__(self, client: WeaviateAsyncClient) -> None: self.__client = client @classmethod async def use(cls) -> "AsyncJourneys": client = use_async_with_local(port=8090, grpc_port=50061) await client.connect() return cls(client) async def close(self) -> None: await self.__client.close() async def simple(self) -> List[dict]: name = "FastAPIAsyncTestingCollection" if await self.__client.collections.exists(name): await self.__client.collections.delete(name) collection = await self.__client.collections.create( name=name, properties=[ Property(name="name", data_type=DataType.TEXT), Property(name="age", data_type=DataType.INT), ], ) await collection.data.insert_many([{"name": f"Person {i}", "age": i} for i in range(100)]) res = await collection.query.fetch_objects(limit=1000) await self.__client.collections.delete(name) return [cast(dict, obj.properties) for obj in res.objects]
AsyncJourneys
python
huggingface__transformers
src/transformers/models/gemma3n/modeling_gemma3n.py
{ "start": 96778, "end": 110638 }
class ____(Gemma3nPreTrainedModel): _checkpoint_conversion_mapping = {} # we are filtering the logits/labels so we shouldn't divide the loss based on num_items_in_batch accepts_loss_kwargs = False def __init__(self, config: Gemma3nConfig): super().__init__(config) self.vision_tower = AutoModel.from_config(config=config.vision_config) self.vocab_size = config.text_config.vocab_size language_model = AutoModel.from_config(config=config.text_config) self.language_model = language_model self.pad_token_id = self.config.pad_token_id if self.config.pad_token_id is not None else -1 self.vocab_size_per_layer_input = config.text_config.vocab_size_per_layer_input self.audio_tower = AutoModel.from_config(config.audio_config) self.embed_vision = Gemma3nMultimodalEmbedder(config.vision_config, config.text_config) self.embed_audio = Gemma3nMultimodalEmbedder(config.audio_config, config.text_config) self.post_init() def get_input_embeddings(self): return self.language_model.get_input_embeddings() def set_input_embeddings(self, value): self.language_model.set_input_embeddings(value) def get_image_features(self, pixel_values: torch.Tensor) -> torch.Tensor: """ Projects the last hidden state from the vision model into language model space. Args: pixel_values (`torch.FloatTensor]` of shape `(batch_size, channels, height, width)`) The tensors corresponding to the input images. Returns: image_features (`torch.Tensor`): Image feature tensor of shape `(num_images, image_length, embed_dim)`). """ vision_outputs = self.vision_tower( pixel_values=pixel_values, do_pooling=False, return_dict=True ).last_hidden_state # Convert from (batch, channels, height, width) to (batch, height * width, channels) where: # height == width and height * width == Gemma3nConfig.vision_soft_tokens_per_image. vision_outputs = vision_outputs.reshape( vision_outputs.shape[0], self.config.vision_config.hidden_size, self.config.vision_soft_tokens_per_image, ).permute(0, 2, 1) # Normalize and embed the soft tokens into language model space. vision_outputs *= self.config.vision_config.hidden_size**0.5 return self.embed_vision(inputs_embeds=vision_outputs) def get_placeholder_mask( self, input_ids: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, image_features: Optional[torch.FloatTensor] = None, audio_features: Optional[torch.FloatTensor] = None, ): """ Obtains multimodal placeholder mask from `input_ids` or `inputs_embeds`, and checks that the placeholder token count is equal to the length of multimodal features. If the lengths are different, an error is raised. """ if input_ids is None: special_image_mask = inputs_embeds == self.get_input_embeddings()( torch.tensor(self.config.image_token_id, dtype=torch.long, device=inputs_embeds.device) ) special_image_mask = special_image_mask.all(-1) special_audio_mask = ( inputs_embeds == self.get_input_embeddings()( torch.tensor(self.config.audio_token_id, dtype=torch.long, device=inputs_embeds.device) ) ).all(-1) else: special_image_mask = input_ids == self.config.image_token_id special_audio_mask = input_ids == self.config.audio_token_id n_image_tokens = special_image_mask.sum() special_image_mask = special_image_mask.unsqueeze(-1).expand_as(inputs_embeds).to(inputs_embeds.device) if image_features is not None and inputs_embeds[special_image_mask].numel() != image_features.numel(): raise ValueError( f"Image features and image tokens do not match: tokens: {n_image_tokens}, features {image_features.shape[0] * image_features.shape[1]}" ) n_audio_tokens = special_audio_mask.sum() special_audio_mask = special_audio_mask.unsqueeze(-1).expand_as(inputs_embeds).to(inputs_embeds.device) if audio_features is not None and inputs_embeds[special_audio_mask].numel() != audio_features.numel(): raise ValueError( f"Audio features and image tokens do not match: tokens: {n_audio_tokens}, features {audio_features.shape[0] * audio_features.shape[1]}" ) return special_image_mask, special_audio_mask @can_return_tuple def forward( self, input_ids: Optional[torch.LongTensor] = None, # text inputs pixel_values: Optional[torch.FloatTensor] = None, # vision inputs input_features: Optional[torch.FloatTensor] = None, # audio inputs attention_mask: Optional[torch.Tensor] = None, input_features_mask: Optional[torch.Tensor] = None, position_ids: Optional[torch.LongTensor] = None, past_key_values: Optional[Cache] = None, token_type_ids: Optional[torch.LongTensor] = None, cache_position: Optional[torch.LongTensor] = None, inputs_embeds: Optional[torch.FloatTensor] = None, labels: Optional[torch.LongTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, **lm_kwargs, ) -> Gemma3nCausalLMOutputWithPast: r""" labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.text_config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.text_config.vocab_size]`. Example: ```python >>> from PIL import Image >>> import requests >>> from transformers import AutoProcessor, Gemma3nForConditionalGeneration >>> model = Gemma3nForConditionalGeneration.from_pretrained("google/gemma3n2-3b-mix-224") >>> processor = AutoProcessor.from_pretrained("google/gemma3n2-3b-mix-224") >>> prompt = "Where is the cat standing?" >>> url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/pipeline-cat-chonk.jpeg" >>> image = Image.open(requests.get(url, stream=True).raw) >>> inputs = processor(images=image, text=prompt, return_tensors="pt") >>> # Generate >>> generate_ids = model.generate(**inputs,) >>> processor.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] "Where is the cat standing?\nsnow" ``` """ if (input_ids is None) ^ (inputs_embeds is not None): raise ValueError("You must specify exactly one of input_ids or inputs_embeds") output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) if input_ids is not None: inputs_embeds = self.get_input_embeddings()(input_ids) # Prepare per-layer inputs from inputs_ids per_layer_inputs_mask = torch.logical_and(input_ids >= 0, input_ids < self.vocab_size_per_layer_input) per_layer_inputs_tokens = torch.where(per_layer_inputs_mask, input_ids, torch.zeros_like(input_ids)) per_layer_inputs = self.language_model.get_per_layer_inputs(per_layer_inputs_tokens) # Handle vision tokens (>= embed_vision.vocab_offset and < embed_audio.vocab_offset) vision_mask = torch.logical_and( input_ids >= self.embed_vision.vocab_offset, input_ids < self.embed_audio.vocab_offset ) dummy_vision_token_id = self.embed_vision.vocab_offset + self.embed_vision.vocab_size - 1 vision_input_ids = torch.where(vision_mask, input_ids, dummy_vision_token_id).to(inputs_embeds.device) vision_embeds = self.embed_vision(input_ids=vision_input_ids) vision_embeds = vision_embeds.to(inputs_embeds.device, inputs_embeds.dtype) expanded_vision_mask = vision_mask.unsqueeze(-1).expand_as(inputs_embeds) inputs_embeds = torch.where(expanded_vision_mask, vision_embeds, inputs_embeds) # Handle audio tokens (>= embed_audio.vocab_offset) audio_mask = input_ids >= self.embed_audio.vocab_offset dummy_audio_token_id = self.embed_audio.vocab_offset + self.embed_audio.vocab_size - 1 audio_input_ids = torch.where(audio_mask, input_ids, dummy_audio_token_id).to(inputs_embeds.device) audio_embeds = self.embed_audio(input_ids=audio_input_ids) audio_embeds = audio_embeds.to(inputs_embeds.device, inputs_embeds.dtype) expanded_audio_mask = audio_mask.unsqueeze(-1).expand_as(inputs_embeds) inputs_embeds = torch.where(expanded_audio_mask, audio_embeds, inputs_embeds) else: per_layer_inputs = None # Merge text and images if pixel_values is not None: image_features = self.get_image_features(pixel_values) image_features = image_features.to(inputs_embeds.device, inputs_embeds.dtype) special_image_mask, _ = self.get_placeholder_mask( input_ids, inputs_embeds=inputs_embeds, image_features=image_features ) inputs_embeds = inputs_embeds.masked_scatter(special_image_mask, image_features) # Merge text and audio if input_features is not None and input_features_mask is not None: audio_features, audio_mask = self.get_audio_features(input_features, ~input_features_mask) # The Gemma3nProcessor expects all audio will be 30s in length and inserts 188 audio soft tokens into the # text to account for this. However, the audio preprocessing and encoder do not gurarantee they will # produce 188 soft tokens; they will produce at most that many tokens, but they may produce fewer tokens # depending on the length of the longest audio input in the batch. When we encounter this situation, we pad # the audio feature out to 188 soft tokens with the emebedding of the last token in the embed_audio vocab. audio_padding_toks = torch.tensor([[self.vocab_size - 1]], dtype=torch.long, device=audio_features.device) audio_padding_embs = self.embed_audio(input_ids=audio_padding_toks) audio_features = torch.where(audio_mask.unsqueeze(-1), audio_padding_embs, audio_features) audio_batch_size, audio_seq_len, audio_embed_dim = audio_features.shape extra_padding_tokens = self.config.audio_soft_tokens_per_image - audio_seq_len extra_padding_features = audio_padding_embs.expand(audio_batch_size, extra_padding_tokens, audio_embed_dim) audio_features = torch.cat((audio_features, extra_padding_features), dim=1) audio_features = audio_features.to(inputs_embeds.device, inputs_embeds.dtype) _, special_audio_mask = self.get_placeholder_mask( input_ids, inputs_embeds=inputs_embeds, audio_features=audio_features ) inputs_embeds = inputs_embeds.masked_scatter(special_audio_mask, audio_features) outputs = self.language_model( input_ids=None, per_layer_inputs=per_layer_inputs, attention_mask=attention_mask, position_ids=position_ids, past_key_values=past_key_values, inputs_embeds=inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=True, cache_position=cache_position, **lm_kwargs, ) return Gemma3nModelOutputWithPast( last_hidden_state=outputs.last_hidden_state, past_key_values=outputs.past_key_values if use_cache else None, hidden_states=outputs.hidden_states, attentions=outputs.attentions, image_hidden_states=image_features if pixel_values is not None else None, audio_hidden_states=audio_features if input_features is not None else None, ) def get_audio_features( self, input_features: torch.Tensor, input_features_mask: torch.Tensor ) -> tuple[torch.Tensor, torch.Tensor]: """ Projects the last hidden state from the audio encoder into language model space. Args: input_features (`torch.FloatTensor]` of shape `(num_images, seq_length, num_features)`): The tensors corresponding to the input audio. input_features_mask (`torch.FloatTensor]` of shape `(num_images, seq_length)`): The attention mask for the input audio. Returns: audio_features (`torch.Tensor`): Audio feature tensor of shape `(num_images, audio_length, embed_dim)`). """ audio_outputs, audio_mask = self.audio_tower(input_features, input_features_mask) return self.embed_audio(inputs_embeds=audio_outputs), audio_mask @auto_docstring( custom_intro=""" The base Gemma 3n model comprising a vision backbone, an audio backbone, a language model, and a language modeling head. """ )
Gemma3nModel
python
ray-project__ray
python/ray/util/state/common.py
{ "start": 28416, "end": 33049 }
class ____(StateSchema): """Task State""" #: The id of the task. task_id: str = state_column(filterable=True) #: The attempt (retry) number of the task. attempt_number: int = state_column(filterable=True) #: The name of the task if it is given by the name argument. name: str = state_column(filterable=True) #: The state of the task. #: #: Refer to src/ray/protobuf/common.proto for a detailed explanation of the state #: breakdowns and typical state transition flow. #: state: TypeTaskStatus = state_column(filterable=True) #: The job id of this task. job_id: str = state_column(filterable=True) #: The actor id that's associated with this task. #: It is empty if there's no relevant actors. actor_id: Optional[str] = state_column(filterable=True) #: The type of the task. #: #: - NORMAL_TASK: Tasks created by `func.remote()`` #: - ACTOR_CREATION_TASK: Actors created by `class.remote()` #: - ACTOR_TASK: Actor tasks submitted by `actor.method.remote()` #: - DRIVER_TASK: Driver (A script that calls `ray.init`). type: TypeTaskType = state_column(filterable=True) #: The name of the task. If is the name of the function #: if the type is a task or an actor task. #: It is the name of the class if it is a actor scheduling task. func_or_class_name: str = state_column(filterable=True) #: The parent task id. If the parent is a normal task, it will be the task's id. #: If the parent runs in a concurrent actor (async actor or threaded actor), #: it will be the actor's creation task id. parent_task_id: str = state_column(filterable=True) #: Id of the node that runs the task. If the task is retried, it could #: contain the node id of the previous executed task. #: If empty, it means the task hasn't been scheduled yet. node_id: Optional[str] = state_column(filterable=True) #: The worker id that's associated with this task. worker_id: Optional[str] = state_column(filterable=True) #: The worker's pid that's associated with this task. worker_pid: Optional[int] = state_column(filterable=True) #: Task error type. error_type: Optional[str] = state_column(filterable=True) #: The language of the task. E.g., Python, Java, or Cpp. language: Optional[str] = state_column(detail=True, filterable=True) #: The required resources to execute the task. required_resources: Optional[dict] = state_column(detail=True, filterable=False) #: The runtime environment information for the task. runtime_env_info: Optional[dict] = state_column(detail=True, filterable=False) #: The placement group id that's associated with this task. placement_group_id: Optional[str] = state_column(detail=True, filterable=True) #: The list of events of the given task. #: Refer to src/ray/protobuf/common.proto for a detailed explanation of the state #: breakdowns and typical state transition flow. events: Optional[List[dict]] = state_column( detail=True, filterable=False, format_fn=Humanify.events ) #: The list of profile events of the given task. profiling_data: Optional[dict] = state_column(detail=True, filterable=False) #: The time when the task is created. A Unix timestamp in ms. creation_time_ms: Optional[int] = state_column( detail=True, filterable=False, format_fn=Humanify.timestamp, ) #: The time when the task starts to run. A Unix timestamp in ms. start_time_ms: Optional[int] = state_column( detail=True, filterable=False, format_fn=Humanify.timestamp, ) #: The time when the task is finished or failed. A Unix timestamp in ms. end_time_ms: Optional[int] = state_column( detail=True, filterable=False, format_fn=Humanify.timestamp ) #: The task logs info, e.g. offset into the worker log file when the task #: starts/finishes. #: None if the task is from a concurrent actor (e.g. async actor or threaded actor) task_log_info: Optional[dict] = state_column(detail=True, filterable=False) #: Task error detail info. error_message: Optional[str] = state_column(detail=True, filterable=False) # Is task paused by the debugger is_debugger_paused: Optional[bool] = state_column(detail=True, filterable=True) #: The call site of the task. call_site: Optional[str] = state_column(detail=True, filterable=False) #: The label selector for the task. label_selector: Optional[dict] = state_column(detail=True, filterable=False) @dataclass(init=not IS_PYDANTIC_2)
TaskState
python
getsentry__sentry
src/sentry/models/tombstone.py
{ "start": 1956, "end": 2282 }
class ____(TombstoneBase): class Meta: app_label = "sentry" db_table = "sentry_controltombstone" indexes = [ models.Index( fields=["table_name", "object_identifier"], ) ] __repr__ = sane_repr("id", "table_name", "object_identifier")
ControlTombstone
python
python__mypy
mypy/subtypes.py
{ "start": 14876, "end": 100576 }
class ____(TypeVisitor[bool]): __slots__ = ( "right", "orig_right", "proper_subtype", "subtype_context", "options", "_subtype_kind", ) def __init__(self, right: Type, subtype_context: SubtypeContext, proper_subtype: bool) -> None: self.right = get_proper_type(right) self.orig_right = right self.proper_subtype = proper_subtype self.subtype_context = subtype_context self.options = subtype_context.options self._subtype_kind = SubtypeVisitor.build_subtype_kind(subtype_context, proper_subtype) @staticmethod def build_subtype_kind(subtype_context: SubtypeContext, proper_subtype: bool) -> SubtypeKind: return ( state.strict_optional, proper_subtype, subtype_context.ignore_type_params, subtype_context.ignore_pos_arg_names, subtype_context.ignore_declared_variance, subtype_context.always_covariant, subtype_context.ignore_promotions, subtype_context.erase_instances, subtype_context.keep_erased_types, ) def _is_subtype(self, left: Type, right: Type) -> bool: if self.proper_subtype: return is_proper_subtype(left, right, subtype_context=self.subtype_context) return is_subtype(left, right, subtype_context=self.subtype_context) def _all_subtypes(self, lefts: Iterable[Type], rights: Iterable[Type]) -> bool: return all(self._is_subtype(li, ri) for (li, ri) in zip(lefts, rights)) # visit_x(left) means: is left (which is an instance of X) a subtype of right? def visit_unbound_type(self, left: UnboundType) -> bool: # This can be called if there is a bad type annotation. The result probably # doesn't matter much but by returning True we simplify these bad types away # from unions, which could filter out some bogus messages. return True def visit_any(self, left: AnyType) -> bool: return isinstance(self.right, AnyType) if self.proper_subtype else True def visit_none_type(self, left: NoneType) -> bool: if state.strict_optional: if isinstance(self.right, NoneType) or is_named_instance( self.right, "builtins.object" ): return True if isinstance(self.right, Instance) and self.right.type.is_protocol: members = self.right.type.protocol_members # None is compatible with Hashable (and other similar protocols). This is # slightly sloppy since we don't check the signature of "__hash__". # None is also compatible with `SupportsStr` protocol. return not members or all(member in ("__hash__", "__str__") for member in members) return False else: return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: return True def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. return not self.subtype_context.keep_erased_types def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any and not self.proper_subtype: # NOTE: `None` is a *non-subclassable* singleton, therefore no class # can by a subtype of it, even with an `Any` fallback. # This special case is needed to treat descriptors in classes with # dynamic base classes correctly, see #5456. return not isinstance(self.right, NoneType) right = self.right if isinstance(right, TupleType) and right.partial_fallback.type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) if isinstance(right, TupleType): if len(right.items) == 1: # Non-normalized Tuple type (may be left after semantic analysis # because semanal_typearg visitor is not a type translator). item = right.items[0] if isinstance(item, UnpackType): unpacked = get_proper_type(item.type) if isinstance(unpacked, Instance): return self._is_subtype(left, unpacked) if left.type.has_base(right.partial_fallback.type.fullname): if not self.proper_subtype: # Special cases to consider: # * Plain tuple[Any, ...] instance is a subtype of all tuple types. # * Foo[*tuple[Any, ...]] (normalized) instance is a subtype of all # tuples with fallback to Foo (e.g. for variadic NamedTuples). mapped = map_instance_to_supertype(left, right.partial_fallback.type) if is_erased_instance(mapped): if ( mapped.type.fullname == "builtins.tuple" or mapped.type.has_type_var_tuple_type ): return True return False if isinstance(right, TypeVarTupleType): # tuple[Any, ...] is like Any in the world of tuples (see special case above). if left.type.has_base("builtins.tuple"): mapped = map_instance_to_supertype(left, right.tuple_fallback.type) if isinstance(get_proper_type(mapped.args[0]), AnyType): return not self.proper_subtype if isinstance(right, Instance): if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True if type_state.is_cached_negative_subtype_check(self._subtype_kind, left, right): return False if not self.subtype_context.ignore_promotions and not right.type.is_protocol: for base in left.type.mro: if base._promote and any( self._is_subtype(p, self.right) for p in base._promote ): type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return True # Special case: Low-level integer types are compatible with 'int'. We can't # use promotions, since 'int' is already promoted to low-level integer types, # and we can't have circular promotions. if left.type.alt_promote and left.type.alt_promote.type is right.type: return True rname = right.type.fullname # Always try a nominal check if possible, # there might be errors that a user wants to silence *once*. # NamedTuples are a special case, because `NamedTuple` is not listed # in `TypeInfo.mro`, so when `(a: NamedTuple) -> None` is used, # we need to check for `is_named_tuple` property if ( left.type.has_base(rname) or rname == "builtins.object" or ( rname in TYPED_NAMEDTUPLE_NAMES and any(l.is_named_tuple for l in left.type.mro) ) ) and not self.subtype_context.ignore_declared_variance: # Map left type to corresponding right instances. t = map_instance_to_supertype(left, right.type) if self.subtype_context.erase_instances: erased = erase_type(t) assert isinstance(erased, Instance) t = erased nominal = True if right.type.has_type_var_tuple_type: # For variadic instances we simply find the correct type argument mappings, # all the heavy lifting is done by the tuple subtyping. assert right.type.type_var_tuple_prefix is not None assert right.type.type_var_tuple_suffix is not None prefix = right.type.type_var_tuple_prefix suffix = right.type.type_var_tuple_suffix tvt = right.type.defn.type_vars[prefix] assert isinstance(tvt, TypeVarTupleType) fallback = tvt.tuple_fallback left_prefix, left_middle, left_suffix = split_with_prefix_and_suffix( t.args, prefix, suffix ) right_prefix, right_middle, right_suffix = split_with_prefix_and_suffix( right.args, prefix, suffix ) left_args = ( left_prefix + (TupleType(list(left_middle), fallback),) + left_suffix ) right_args = ( right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix ) if not self.proper_subtype and is_erased_instance(t): return True if len(left_args) != len(right_args): return False type_params = zip(left_args, right_args, right.type.defn.type_vars) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) if not self.subtype_context.ignore_type_params: tried_infer = False for lefta, righta, tvar in type_params: if isinstance(tvar, TypeVarType): if tvar.variance == VARIANCE_NOT_READY and not tried_infer: infer_class_variances(right.type) tried_infer = True if ( self.subtype_context.always_covariant and tvar.variance == INVARIANT ): variance = COVARIANT else: variance = tvar.variance if not check_type_parameter( lefta, righta, variance, self.proper_subtype, self.subtype_context ): nominal = False else: # TODO: everywhere else ParamSpecs are handled as invariant. if not check_type_parameter( lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context ): nominal = False if nominal: type_state.record_subtype_cache_entry(self._subtype_kind, left, right) else: type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( left, right, proper_subtype=self.proper_subtype, options=self.options ): return True # We record negative cache entry here, and not in the protocol check like we do for # positive cache, to avoid accidentally adding a type that is not a structural # subtype, but is a nominal subtype (involving type: ignore override). type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) return False if isinstance(right, TypeType): item = right.item if isinstance(item, TupleType): item = mypy.typeops.tuple_fallback(item) # TODO: this is a bit arbitrary, we should only skip Any-related cases. if not self.proper_subtype: if is_named_instance(left, "builtins.type"): return self._is_subtype(TypeType(AnyType(TypeOfAny.special_form)), right) if left.type.is_metaclass(): if isinstance(item, AnyType): return True if isinstance(item, Instance): return is_named_instance(item, "builtins.object") if isinstance(right, LiteralType) and left.last_known_value is not None: return self._is_subtype(left.last_known_value, right) if isinstance(right, FunctionLike): # Special case: Instance can be a subtype of Callable / Overloaded. call = find_member("__call__", left, left, is_operator=True) if call: return self._is_subtype(call, right) return False else: return False def visit_type_var(self, left: TypeVarType) -> bool: right = self.right if isinstance(right, TypeVarType) and left.id == right.id: # Fast path for most common case. if left.upper_bound == right.upper_bound: return True # Corner case for self-types in classes generic in type vars # with value restrictions. if left.id.is_self(): return True return self._is_subtype(left.upper_bound, right.upper_bound) if left.values and self._is_subtype(UnionType.make_union(left.values), right): return True return self._is_subtype(left.upper_bound, self.right) def visit_param_spec(self, left: ParamSpecType) -> bool: right = self.right if ( isinstance(right, ParamSpecType) and right.id == left.id and right.flavor == left.flavor ): return self._is_subtype(left.prefix, right.prefix) if isinstance(right, Parameters) and are_trivial_parameters(right): return True return self._is_subtype(left.upper_bound, self.right) def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: right = self.right if isinstance(right, TypeVarTupleType) and right.id == left.id: return left.min_len >= right.min_len return self._is_subtype(left.upper_bound, self.right) def visit_unpack_type(self, left: UnpackType) -> bool: # TODO: Ideally we should not need this (since it is not a real type). # Instead callers (upper level types) should handle it when it appears in type list. if isinstance(self.right, UnpackType): return self._is_subtype(left.type, self.right.type) if isinstance(self.right, Instance) and self.right.type.fullname == "builtins.object": return True return False def visit_parameters(self, left: Parameters) -> bool: if isinstance(self.right, Parameters): return are_parameters_compatible( left, self.right, is_compat=self._is_subtype, # TODO: this should pass the current value, but then couple tests fail. is_proper_subtype=False, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, ) elif isinstance(self.right, Instance): return self.right.type.fullname == "builtins.object" else: return False def visit_callable_type(self, left: CallableType) -> bool: right = self.right if isinstance(right, CallableType): if left.type_guard is not None and right.type_guard is not None: if not self._is_subtype(left.type_guard, right.type_guard): return False elif left.type_is is not None and right.type_is is not None: # For TypeIs we have to check both ways; it is unsafe to pass # a TypeIs[Child] when a TypeIs[Parent] is expected, because # if the narrower returns False, we assume that the narrowed value is # *not* a Parent. if not self._is_subtype(left.type_is, right.type_is) or not self._is_subtype( right.type_is, left.type_is ): return False elif right.type_guard is not None and left.type_guard is None: # This means that one function has `TypeGuard` and other does not. # They are not compatible. See https://github.com/python/mypy/issues/11307 return False elif right.type_is is not None and left.type_is is None: # Similarly, if one function has `TypeIs` and the other does not, # they are not compatible. return False return is_callable_compatible( left, right, is_compat=self._is_subtype, is_proper_subtype=self.proper_subtype, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=( (self.options.extra_checks or self.options.strict_concatenate) if self.options else False ), ) elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items) elif isinstance(right, Instance): if right.type.is_protocol and "__call__" in right.type.protocol_members: # OK, a callable can implement a protocol with a `__call__` member. call = find_member("__call__", right, right, is_operator=True) assert call is not None if self._is_subtype(left, call): if len(right.type.protocol_members) == 1: return True if is_protocol_implementation(left.fallback, right, skip=["__call__"]): return True if right.type.is_protocol and left.is_type_obj(): ret_type = get_proper_type(left.ret_type) if isinstance(ret_type, TupleType): ret_type = mypy.typeops.tuple_fallback(ret_type) if isinstance(ret_type, Instance) and is_protocol_implementation( ret_type, right, proper_subtype=self.proper_subtype, class_obj=True ): return True return self._is_subtype(left.fallback, right) elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and self._is_subtype(left.ret_type, right.item) else: return False def visit_tuple_type(self, left: TupleType) -> bool: right = self.right if isinstance(right, Instance): if is_named_instance(right, "typing.Sized"): return True elif is_named_instance(right, TUPLE_LIKE_INSTANCE_NAMES): if right.args: iter_type = right.args[0] else: if self.proper_subtype: return False iter_type = AnyType(TypeOfAny.special_form) if is_named_instance(right, "builtins.tuple") and isinstance( get_proper_type(iter_type), AnyType ): # TODO: We shouldn't need this special case. This is currently needed # for isinstance(x, tuple), though it's unclear why. return True for li in left.items: if isinstance(li, UnpackType): unpack = get_proper_type(li.type) if isinstance(unpack, TypeVarTupleType): unpack = get_proper_type(unpack.upper_bound) assert ( isinstance(unpack, Instance) and unpack.type.fullname == "builtins.tuple" ) li = unpack.args[0] if not self._is_subtype(li, iter_type): return False return True elif self._is_subtype(left.partial_fallback, right) and self._is_subtype( mypy.typeops.tuple_fallback(left), right ): return True return False elif isinstance(right, TupleType): # If right has a variadic unpack this needs special handling. If there is a TypeVarTuple # unpack, item count must coincide. If the left has variadic unpack but right # doesn't have one, we will fall through to False down the line. if self.variadic_tuple_subtype(left, right): return True if len(left.items) != len(right.items): return False if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): return False if is_named_instance(right.partial_fallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback # may be inconsistent due to how we calculate joins between unions vs. # non-unions. For example, join(int, str) == object, whereas # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True if is_named_instance(left.partial_fallback, "builtins.tuple"): # Again, no need to verify. At this point we know the right fallback # is a subclass of tuple, so if left is plain tuple, it cannot be a subtype. return False # At this point we know both fallbacks are non-tuple. return self._is_subtype(left.partial_fallback, right.partial_fallback) else: return False def variadic_tuple_subtype(self, left: TupleType, right: TupleType) -> bool: """Check subtyping between two potentially variadic tuples. Most non-trivial cases here are due to variadic unpacks like *tuple[X, ...], we handle such unpacks as infinite unions Tuple[()] | Tuple[X] | Tuple[X, X] | ... Note: the cases where right is fixed or has *Ts unpack should be handled by the caller. """ right_unpack_index = find_unpack_in_list(right.items) if right_unpack_index is None: # This case should be handled by the caller. return False right_unpack = right.items[right_unpack_index] assert isinstance(right_unpack, UnpackType) right_unpacked = get_proper_type(right_unpack.type) if not isinstance(right_unpacked, Instance): # This case should be handled by the caller. return False assert right_unpacked.type.fullname == "builtins.tuple" right_item = right_unpacked.args[0] right_prefix = right_unpack_index right_suffix = len(right.items) - right_prefix - 1 left_unpack_index = find_unpack_in_list(left.items) if left_unpack_index is None: # Simple case: left is fixed, simply find correct mapping to the right # (effectively selecting item with matching length from an infinite union). if len(left.items) < right_prefix + right_suffix: return False prefix, middle, suffix = split_with_prefix_and_suffix( tuple(left.items), right_prefix, right_suffix ) if not all( self._is_subtype(li, ri) for li, ri in zip(prefix, right.items[:right_prefix]) ): return False if right_suffix and not all( self._is_subtype(li, ri) for li, ri in zip(suffix, right.items[-right_suffix:]) ): return False return all(self._is_subtype(li, right_item) for li in middle) else: if len(left.items) < len(right.items): # There are some items on the left that will never have a matching length # on the right. return False left_prefix = left_unpack_index left_suffix = len(left.items) - left_prefix - 1 left_unpack = left.items[left_unpack_index] assert isinstance(left_unpack, UnpackType) left_unpacked = get_proper_type(left_unpack.type) if not isinstance(left_unpacked, Instance): # *Ts unpack can't be split, except if it is all mapped to Anys or objects. if self.is_top_type(right_item): right_prefix_types, middle, right_suffix_types = split_with_prefix_and_suffix( tuple(right.items), left_prefix, left_suffix ) if not all( self.is_top_type(ri) or isinstance(ri, UnpackType) for ri in middle ): return False # Also check the tails match as well. return self._all_subtypes( left.items[:left_prefix], right_prefix_types ) and self._all_subtypes(left.items[-left_suffix:], right_suffix_types) return False assert left_unpacked.type.fullname == "builtins.tuple" left_item = left_unpacked.args[0] # The most tricky case with two variadic unpacks we handle similar to union # subtyping: *each* item on the left, must be a subtype of *some* item on the right. # For this we first check the "asymptotic case", i.e. that both unpacks a subtypes, # and then check subtyping for all finite overlaps. if not self._is_subtype(left_item, right_item): return False max_overlap = max(0, right_prefix - left_prefix, right_suffix - left_suffix) for overlap in range(max_overlap + 1): repr_items = left.items[:left_prefix] + [left_item] * overlap if left_suffix: repr_items += left.items[-left_suffix:] left_repr = left.copy_modified(items=repr_items) if not self._is_subtype(left_repr, right): return False return True def is_top_type(self, typ: Type) -> bool: if not self.proper_subtype and isinstance(get_proper_type(typ), AnyType): return True return is_named_instance(typ, "builtins.object") def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, Instance): return self._is_subtype(left.fallback, right) elif isinstance(right, TypedDictType): if left == right: return True # Fast path if not left.names_are_wider_than(right): return False for name, l, r in left.zip(right): # TODO: should we pass on the full subtype_context here and below? right_readonly = name in right.readonly_keys if not right_readonly: if self.proper_subtype: check = is_same_type(l, r) else: check = is_equivalent( l, r, ignore_type_params=self.subtype_context.ignore_type_params, options=self.options, ) else: # Read-only items behave covariantly check = self._is_subtype(l, r) if not check: return False # Non-required key is not compatible with a required key since # indexing may fail unexpectedly if a required key is missing. # Required key is not compatible with a non-read-only non-required # key since the prior doesn't support 'del' but the latter should # support it. # Required key is compatible with a read-only non-required key. required_differ = (name in left.required_keys) != (name in right.required_keys) if not right_readonly and required_differ: return False # Readonly fields check: # # A = TypedDict('A', {'x': ReadOnly[int]}) # B = TypedDict('B', {'x': int}) # def reset_x(b: B) -> None: # b['x'] = 0 # # So, `A` cannot be a subtype of `B`, while `B` can be a subtype of `A`, # because you can use `B` everywhere you use `A`, but not the other way around. if name in left.readonly_keys and name not in right.readonly_keys: return False # (NOTE: Fallbacks don't matter.) return True else: return False def visit_literal_type(self, left: LiteralType) -> bool: if isinstance(self.right, LiteralType): return left == self.right else: return self._is_subtype(left.fallback, self.right) def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): if right.type.is_protocol and "__call__" in right.type.protocol_members: # same as for CallableType call = find_member("__call__", right, right, is_operator=True) assert call is not None if self._is_subtype(left, call): if len(right.type.protocol_members) == 1: return True if is_protocol_implementation(left.fallback, right, skip=["__call__"]): return True return self._is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items: if self._is_subtype(item, right): return True return False elif isinstance(right, Overloaded): if left == self.right: # When it is the same overload, then the types are equal. return True # Ensure each overload on the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() for right_item in right.items: found_match = False for left_index, left_item in enumerate(left.items): subtype_match = self._is_subtype(left_item, right_item) # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: previous_match_left_index = left_index found_match = True matched_overloads.add(left_index) break else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. strict_concat = ( (self.options.extra_checks or self.options.strict_concatenate) if self.options else False ) if left_index not in matched_overloads and ( is_callable_compatible( left_item, right_item, is_compat=self._is_subtype, is_proper_subtype=self.proper_subtype, ignore_return=True, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=strict_concat, ) or is_callable_compatible( right_item, left_item, is_compat=self._is_subtype, is_proper_subtype=self.proper_subtype, ignore_return=True, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=strict_concat, ) ): return False if not found_match: return False return True elif isinstance(right, UnboundType): return True elif isinstance(right, TypeType): # All the items must have the same type object status, so # it's sufficient to query only (any) one of them. # This is unsound, we don't check all the __init__ signatures. return left.is_type_obj() and self._is_subtype(left.items[0], right) else: return False def visit_union_type(self, left: UnionType) -> bool: if isinstance(self.right, Instance): literal_types: set[Instance] = set() # avoid redundant check for union of literals for item in left.relevant_items(): p_item = get_proper_type(item) lit_type = mypy.typeops.simple_literal_type(p_item) if lit_type is not None: if lit_type in literal_types: continue literal_types.add(lit_type) item = lit_type if not self._is_subtype(item, self.orig_right): return False return True elif isinstance(self.right, UnionType): # prune literals early to avoid nasty quadratic behavior which would otherwise arise when checking # subtype relationships between slightly different narrowings of an Enum # we achieve O(N+M) instead of O(N*M) fast_check: set[ProperType] = set() for item in flatten_types(self.right.relevant_items()): p_item = get_proper_type(item) fast_check.add(p_item) if isinstance(p_item, Instance) and p_item.last_known_value is not None: fast_check.add(p_item.last_known_value) for item in left.relevant_items(): p_item = get_proper_type(item) if p_item in fast_check: continue lit_type = mypy.typeops.simple_literal_type(p_item) if lit_type in fast_check: continue if not self._is_subtype(item, self.orig_right): return False return True return all(self._is_subtype(item, self.orig_right) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: # This is indeterminate as we don't really know the complete type yet. if self.proper_subtype: # TODO: What's the right thing to do here? return False if left.type is None: # Special case, partial `None`. This might happen when defining # class-level attributes with explicit `None`. # We can still recover from this. # https://github.com/python/mypy/issues/11105 return self.visit_none_type(NoneType()) raise RuntimeError(f'Partial type "{left}" cannot be checked with "issubtype()"') def visit_type_type(self, left: TypeType) -> bool: right = self.right if left.is_type_form: if isinstance(right, TypeType): if not right.is_type_form: return False return self._is_subtype(left.item, right.item) if isinstance(right, Instance): if right.type.fullname == "builtins.object": return True return False return False else: # not left.is_type_form if isinstance(right, TypeType): return self._is_subtype(left.item, right.item) if isinstance(right, Overloaded) and right.is_type_obj(): # Same as in other direction: if it's a constructor callable, all # items should belong to the same class' constructor, so it's enough # to check one of them. return self._is_subtype(left, right.items[0]) if isinstance(right, CallableType): if self.proper_subtype and not right.is_type_obj(): # We can't accept `Type[X]` as a *proper* subtype of Callable[P, X] # since this will break transitivity of subtyping. return False # This is unsound, we don't check the __init__ signature. return self._is_subtype(left.item, right.ret_type) if isinstance(right, Instance): if right.type.fullname in ["builtins.object", "builtins.type"]: # TODO: Strictly speaking, the type builtins.type is considered equivalent to # Type[Any]. However, this would break the is_proper_subtype check in # conditional_types for cases like isinstance(x, type) when the type # of x is Type[int]. It's unclear what's the right way to address this. return True item = left.item if isinstance(item, TypeVarType): item = get_proper_type(item.upper_bound) if isinstance(item, Instance): if right.type.is_protocol and is_protocol_implementation( item, right, proper_subtype=self.proper_subtype, class_obj=True ): return True metaclass = item.type.metaclass_type return metaclass is not None and self._is_subtype(metaclass, right) return False def visit_type_alias_type(self, left: TypeAliasType) -> bool: assert False, f"This should be never called, got {left}" T = TypeVar("T", bound=Type) @contextmanager def pop_on_exit(stack: list[tuple[T, T]], left: T, right: T) -> Iterator[None]: stack.append((left, right)) yield stack.pop() def is_protocol_implementation( left: Instance, right: Instance, proper_subtype: bool = False, class_obj: bool = False, skip: list[str] | None = None, options: Options | None = None, ) -> bool: """Check whether 'left' implements the protocol 'right'. If 'proper_subtype' is True, then check for a proper subtype. Treat recursive protocols by using the 'assuming' structural subtype matrix (in sparse representation, i.e. as a list of pairs (subtype, supertype)), see also comment in nodes.TypeInfo. When we enter a check for classes (A, P), defined as following:: class P(Protocol): def f(self) -> P: ... class A: def f(self) -> A: ... this results in A being a subtype of P without infinite recursion. On every false result, we pop the assumption, thus avoiding an infinite recursion as well. """ assert right.type.is_protocol if skip is None: skip = [] # We need to record this check to generate protocol fine-grained dependencies. type_state.record_protocol_subtype_check(left.type, right.type) # nominal subtyping currently ignores '__init__' and '__new__' signatures members_not_to_check = {"__init__", "__new__"} members_not_to_check.update(skip) # Trivial check that circumvents the bug described in issue 9771: if left.type.is_protocol: members_right = set(right.type.protocol_members) - members_not_to_check members_left = set(left.type.protocol_members) - members_not_to_check if not members_right.issubset(members_left): return False assuming = right.type.assuming_proper if proper_subtype else right.type.assuming for l, r in reversed(assuming): if l == left and r == right: return True with pop_on_exit(assuming, left, right): for member in right.type.protocol_members: if member in members_not_to_check: continue ignore_names = member != "__call__" # __call__ can be passed kwargs # The third argument below indicates to what self type is bound. # We always bind self to the subtype. (Similarly to nominal types). supertype = find_member(member, right, left) assert supertype is not None subtype = mypy.typeops.get_protocol_member(left, member, class_obj) # Useful for debugging: # print(member, 'of', left, 'has type', subtype) # print(member, 'of', right, 'has type', supertype) if not subtype: return False if not proper_subtype: # Nominal check currently ignores arg names # NOTE: If we ever change this, be sure to also change the call to # SubtypeVisitor.build_subtype_kind(...) down below. is_compat = is_subtype( subtype, supertype, ignore_pos_arg_names=ignore_names, options=options ) else: is_compat = is_proper_subtype(subtype, supertype) if not is_compat: return False if isinstance(get_proper_type(subtype), NoneType) and isinstance( get_proper_type(supertype), CallableType ): # We want __hash__ = None idiom to work even without --strict-optional return False subflags = get_member_flags(member, left, class_obj=class_obj) superflags = get_member_flags(member, right) if IS_SETTABLE in superflags: # Check opposite direction for settable attributes. if IS_EXPLICIT_SETTER in superflags: supertype = find_member(member, right, left, is_lvalue=True) if IS_EXPLICIT_SETTER in subflags: subtype = mypy.typeops.get_protocol_member( left, member, class_obj, is_lvalue=True ) # At this point we know attribute is present on subtype, otherwise we # would return False above. assert supertype is not None and subtype is not None if not is_subtype(supertype, subtype, options=options): return False if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: return False if not class_obj: if IS_SETTABLE not in superflags: if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: return False elif (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): return False else: if IS_VAR in superflags and IS_CLASSVAR not in subflags: # Only class variables are allowed for class object access. return False if IS_CLASSVAR in superflags: # This can be never matched by a class object. return False # This rule is copied from nominal check in checker.py if IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags: return False if not proper_subtype: # Nominal check currently ignores arg names, but __call__ is special for protocols ignore_names = right.type.protocol_members != ["__call__"] else: ignore_names = False subtype_kind = SubtypeVisitor.build_subtype_kind( subtype_context=SubtypeContext(ignore_pos_arg_names=ignore_names), proper_subtype=proper_subtype, ) type_state.record_subtype_cache_entry(subtype_kind, left, right) return True def find_member( name: str, itype: Instance, subtype: Type, *, is_operator: bool = False, class_obj: bool = False, is_lvalue: bool = False, ) -> Type | None: type_checker = checker_state.type_checker if type_checker is None: # Unfortunately, there are many scenarios where someone calls is_subtype() before # type checking phase. In this case we fallback to old (incomplete) logic. # TODO: reduce number of such cases (e.g. semanal_typeargs, post-semanal plugins). return find_member_simple( name, itype, subtype, is_operator=is_operator, class_obj=class_obj, is_lvalue=is_lvalue ) # We don't use ATTR_DEFINED error code below (since missing attributes can cause various # other error codes), instead we perform quick node lookup with all the fallbacks. info = itype.type sym = info.get(name) node = sym.node if sym else None if not node: name_not_found = True if ( name not in ["__getattr__", "__setattr__", "__getattribute__"] and not is_operator and not class_obj and itype.extra_attrs is None # skip ModuleType.__getattr__ ): for method_name in ("__getattribute__", "__getattr__"): method = info.get_method(method_name) if method and method.info.fullname != "builtins.object": name_not_found = False break if name_not_found: if info.fallback_to_any or class_obj and info.meta_fallback_to_any: return AnyType(TypeOfAny.special_form) if itype.extra_attrs and name in itype.extra_attrs.attrs: return itype.extra_attrs.attrs[name] return None from mypy.checkmember import ( MemberContext, analyze_class_attribute_access, analyze_instance_member_access, ) mx = MemberContext( is_lvalue=is_lvalue, is_super=False, is_operator=is_operator, original_type=TypeType.make_normalized(itype) if class_obj else itype, self_type=TypeType.make_normalized(subtype) if class_obj else subtype, context=Context(), # all errors are filtered, but this is a required argument chk=type_checker, suppress_errors=True, # This is needed to avoid infinite recursion in situations involving protocols like # class P(Protocol[T]): # def combine(self, other: P[S]) -> P[Tuple[T, S]]: ... # Normally we call freshen_all_functions_type_vars() during attribute access, # to avoid type variable id collisions, but for protocols this means we can't # use the assumption stack, that will grow indefinitely. # TODO: find a cleaner solution that doesn't involve massive perf impact. preserve_type_var_ids=True, ) with type_checker.msg.filter_errors(filter_deprecated=True): if class_obj: fallback = itype.type.metaclass_type or mx.named_type("builtins.type") return analyze_class_attribute_access(itype, name, mx, mcs_fallback=fallback) else: return analyze_instance_member_access(name, itype, mx, info) def find_member_simple( name: str, itype: Instance, subtype: Type, *, is_operator: bool = False, class_obj: bool = False, is_lvalue: bool = False, ) -> Type | None: """Find the type of member by 'name' in 'itype's TypeInfo. Find the member type after applying type arguments from 'itype', and binding 'self' to 'subtype'. Return None if member was not found. """ info = itype.type method = info.get_method(name) if method: if isinstance(method, Decorator): return find_node_type(method.var, itype, subtype, class_obj=class_obj) if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) # Pass on is_lvalue flag as this may be a property with different setter type. return find_node_type( dec.var, itype, subtype, class_obj=class_obj, is_lvalue=is_lvalue ) return find_node_type(method, itype, subtype, class_obj=class_obj) else: # don't have such method, maybe variable or decorator? node = info.get(name) v = node.node if node else None if isinstance(v, Var): return find_node_type(v, itype, subtype, class_obj=class_obj) if ( not v and name not in ["__getattr__", "__setattr__", "__getattribute__"] and not is_operator and not class_obj and itype.extra_attrs is None # skip ModuleType.__getattr__ ): for method_name in ("__getattribute__", "__getattr__"): # Normally, mypy assumes that instances that define __getattr__ have all # attributes with the corresponding return type. If this will produce # many false negatives, then this could be prohibited for # structural subtyping. method = info.get_method(method_name) if method and method.info.fullname != "builtins.object": if isinstance(method, Decorator): getattr_type = get_proper_type(find_node_type(method.var, itype, subtype)) else: getattr_type = get_proper_type(find_node_type(method, itype, subtype)) if isinstance(getattr_type, CallableType): return getattr_type.ret_type return getattr_type if itype.type.fallback_to_any or class_obj and itype.type.meta_fallback_to_any: return AnyType(TypeOfAny.special_form) if isinstance(v, TypeInfo): # PEP 544 doesn't specify anything about such use cases. So we just try # to do something meaningful (at least we should not crash). return TypeType(fill_typevars_with_any(v)) if itype.extra_attrs and name in itype.extra_attrs.attrs: return itype.extra_attrs.attrs[name] return None def get_member_flags(name: str, itype: Instance, class_obj: bool = False) -> set[int]: """Detect whether a member 'name' is settable, whether it is an instance or class variable, and whether it is class or static method. The flags are defined as following: * IS_SETTABLE: whether this attribute can be set, not set for methods and non-settable properties; * IS_CLASSVAR: set if the variable is annotated as 'x: ClassVar[t]'; * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or with @staticmethod. """ info = itype.type method = info.get_method(name) setattr_meth = info.get_method("__setattr__") if method: if isinstance(method, Decorator): if method.var.is_staticmethod or method.var.is_classmethod: return {IS_CLASS_OR_STATIC} elif method.var.is_property: return {IS_VAR} elif method.is_property: # this could be settable property assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) if dec.var.is_settable_property or setattr_meth: flags = {IS_VAR, IS_SETTABLE} if dec.var.setter_type is not None: flags.add(IS_EXPLICIT_SETTER) return flags else: return {IS_VAR} return set() # Just a regular method node = info.get(name) if not node: if setattr_meth: return {IS_SETTABLE} if itype.extra_attrs and name in itype.extra_attrs.attrs: flags = set() if name not in itype.extra_attrs.immutable: flags.add(IS_SETTABLE) return flags return set() v = node.node # just a variable if isinstance(v, Var): if v.is_property: return {IS_VAR} flags = {IS_VAR} if not v.is_final: flags.add(IS_SETTABLE) # TODO: define cleaner rules for class vs instance variables. if v.is_classvar and not is_descriptor(v.type): flags.add(IS_CLASSVAR) if class_obj and v.is_inferred: flags.add(IS_CLASSVAR) return flags return set() def is_descriptor(typ: Type | None) -> bool: typ = get_proper_type(typ) if isinstance(typ, Instance): return typ.type.get("__get__") is not None if isinstance(typ, UnionType): return all(is_descriptor(item) for item in typ.relevant_items()) return False def find_node_type( node: Var | FuncBase, itype: Instance, subtype: Type, class_obj: bool = False, is_lvalue: bool = False, ) -> Type: """Find type of a variable or method 'node' (maybe also a decorated method). Apply type arguments from 'itype', and bind 'self' to 'subtype'. """ from mypy.typeops import bind_self if isinstance(node, FuncBase): typ: Type | None = mypy.typeops.function_type( node, fallback=Instance(itype.type.mro[-1], []) ) else: # This part and the one below are simply copies of the logic from checkmember.py. if node.is_settable_property and is_lvalue: typ = node.setter_type if typ is None and node.is_ready: typ = node.type else: typ = node.type if typ is not None: typ = expand_self_type(node, typ, subtype) p_typ = get_proper_type(typ) if typ is None: return AnyType(TypeOfAny.from_error) # We don't need to bind 'self' for static methods, since there is no 'self'. if isinstance(node, FuncBase) or ( isinstance(p_typ, FunctionLike) and node.is_initialized_in_class and not node.is_staticmethod ): assert isinstance(p_typ, FunctionLike) if class_obj and not ( node.is_class if isinstance(node, FuncBase) else node.is_classmethod ): # Don't bind instance methods on class objects. signature = p_typ else: signature = bind_self( p_typ, subtype, is_classmethod=isinstance(node, Var) and node.is_classmethod ) if node.is_property and not class_obj: assert isinstance(signature, CallableType) if ( isinstance(node, Var) and node.is_settable_property and is_lvalue and node.setter_type is not None ): typ = signature.arg_types[0] else: typ = signature.ret_type else: typ = signature itype = map_instance_to_supertype(itype, node.info) typ = expand_type_by_instance(typ, itype) return typ def non_method_protocol_members(tp: TypeInfo) -> list[str]: """Find all non-callable members of a protocol.""" assert tp.is_protocol result: list[str] = [] anytype = AnyType(TypeOfAny.special_form) instance = Instance(tp, [anytype] * len(tp.defn.type_vars)) for member in tp.protocol_members: typ = get_proper_type(find_member(member, instance, instance)) if not isinstance(typ, (Overloaded, CallableType)): result.append(member) return result def is_callable_compatible( left: CallableType, right: CallableType, *, is_compat: Callable[[Type, Type], bool], is_proper_subtype: bool, is_compat_return: Callable[[Type, Type], bool] | None = None, ignore_return: bool = False, ignore_pos_arg_names: bool = False, check_args_covariantly: bool = False, allow_partial_overlap: bool = False, strict_concatenate: bool = False, ) -> bool: """Is the left compatible with the right, using the provided compatibility check? is_compat: The check we want to run against the parameters. is_compat_return: The check we want to run against the return type. If None, use the 'is_compat' check. check_args_covariantly: If true, check if the left's args is compatible with the right's instead of the other way around (contravariantly). This function is mostly used to check if the left is a subtype of the right which is why the default is to check the args contravariantly. However, it's occasionally useful to check the args using some other check, so we leave the variance configurable. For example, when checking the validity of overloads, it's useful to see if the first overload alternative has more precise arguments than the second. We would want to check the arguments covariantly in that case. Note! The following two function calls are NOT equivalent: is_callable_compatible(f, g, is_compat=is_subtype, check_args_covariantly=False) is_callable_compatible(g, f, is_compat=is_subtype, check_args_covariantly=True) The two calls are similar in that they both check the function arguments in the same direction: they both run `is_subtype(argument_from_g, argument_from_f)`. However, the two calls differ in which direction they check things like keyword arguments. For example, suppose f and g are defined like so: def f(x: int, *y: int) -> int: ... def g(x: int) -> int: ... In this case, the first call will succeed and the second will fail: f is a valid stand-in for g but not vice-versa. allow_partial_overlap: By default this function returns True if and only if *all* calls to left are also calls to right (with respect to the provided 'is_compat' function). If this parameter is set to 'True', we return True if *there exists at least one* call to left that's also a call to right. In other words, we perform an existential check instead of a universal one; we require left to only overlap with right instead of being a subset. For example, suppose we set 'is_compat' to some subtype check and compare following: f(x: float, y: str = "...", *args: bool) -> str g(*args: int) -> str This function would normally return 'False': f is not a subtype of g. However, we would return True if this parameter is set to 'True': the two calls are compatible if the user runs "f_or_g(3)". In the context of that specific call, the two functions effectively have signatures of: f2(float) -> str g2(int) -> str Here, f2 is a valid subtype of g2 so we return True. Specifically, if this parameter is set this function will: - Ignore optional arguments on either the left or right that have no corresponding match. - No longer mandate optional arguments on either side are also optional on the other. - No longer mandate that if right has a *arg or **kwarg that left must also have the same. Note: when this argument is set to True, this function becomes "symmetric" -- the following calls are equivalent: is_callable_compatible(f, g, is_compat=some_check, check_args_covariantly=False, allow_partial_overlap=True) is_callable_compatible(g, f, is_compat=some_check, check_args_covariantly=True, allow_partial_overlap=True) If the 'some_check' function is also symmetric, the two calls would be equivalent whether or not we check the args covariantly. """ # Normalize both types before comparing them. left = left.with_unpacked_kwargs().with_normalized_var_args() right = right.with_unpacked_kwargs().with_normalized_var_args() if is_compat_return is None: is_compat_return = is_compat # If either function is implicitly typed, ignore positional arg names too if left.implicit or right.implicit: ignore_pos_arg_names = True # Non-type cannot be a subtype of type. if right.is_type_obj() and not left.is_type_obj() and not allow_partial_overlap: return False # A callable L is a subtype of a generic callable R if L is a # subtype of every type obtained from R by substituting types for # the variables of R. We can check this by simply leaving the # generic variables of R as type variables, effectively varying # over all possible values. # It's okay even if these variables share ids with generic # type variables of L, because generating and solving # constraints for the variables of L to make L a subtype of R # (below) treats type variables on the two sides as independent. if left.variables: # Apply generic type variables away in left via type inference. unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False left = unified # Check return types. if not ignore_return and not is_compat_return(left.ret_type, right.ret_type): return False if check_args_covariantly: is_compat = flip_compat_check(is_compat) if not strict_concatenate and (left.from_concatenate or right.from_concatenate): strict_concatenate_check = False else: strict_concatenate_check = True return are_parameters_compatible( left, right, is_compat=is_compat, is_proper_subtype=is_proper_subtype, ignore_pos_arg_names=ignore_pos_arg_names, allow_partial_overlap=allow_partial_overlap, strict_concatenate_check=strict_concatenate_check, ) def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: param_star = param.var_arg() param_star2 = param.kw_arg() return ( param.arg_kinds == [ARG_STAR, ARG_STAR2] and param_star is not None and isinstance(get_proper_type(param_star.typ), AnyType) and param_star2 is not None and isinstance(get_proper_type(param_star2.typ), AnyType) ) def is_trivial_suffix(param: Parameters | NormalizedCallableType) -> bool: param_star = param.var_arg() param_star2 = param.kw_arg() return ( param.arg_kinds[-2:] == [ARG_STAR, ARG_STAR2] and param_star is not None and isinstance(get_proper_type(param_star.typ), AnyType) and param_star2 is not None and isinstance(get_proper_type(param_star2.typ), AnyType) ) def are_parameters_compatible( left: Parameters | NormalizedCallableType, right: Parameters | NormalizedCallableType, *, is_compat: Callable[[Type, Type], bool], is_proper_subtype: bool, ignore_pos_arg_names: bool = False, allow_partial_overlap: bool = False, strict_concatenate_check: bool = False, ) -> bool: """Helper function for is_callable_compatible, used for Parameter compatibility""" if right.is_ellipsis_args and not is_proper_subtype: return True left_star = left.var_arg() left_star2 = left.kw_arg() right_star = right.var_arg() right_star2 = right.kw_arg() # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" if are_trivial_parameters(right) and not is_proper_subtype: return True trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype trivial_vararg_suffix = False if ( right.arg_kinds[-1:] == [ARG_STAR] and isinstance(get_proper_type(right.arg_types[-1]), AnyType) and not is_proper_subtype and all(k.is_positional(star=True) for k in left.arg_kinds) ): # Similar to how (*Any, **Any) is considered a supertype of all callables, we consider # (*Any) a supertype of all callables with positional arguments. This is needed in # particular because we often refuse to try type inference if actual type is not # a subtype of erased template type. trivial_vararg_suffix = True # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must # be "more general" than argR if L is to be a subtype of R. # Arguments are corresponding if they either share a name, share a position, # or both. If L's corresponding argument is ambiguous, L is not a subtype of R. # If left has one corresponding argument by name and another by position, # consider them to be one "merged" argument (and not ambiguous) if they're # both optional, they're name-only and position-only respectively, and they # have the same type. This rule allows functions with (*args, **kwargs) to # properly stand in for the full domain of formal arguments that they're # used for in practice. # Every argument in R must have a corresponding argument in L, and every # required argument in L must have a corresponding argument in R. # Phase 1: Confirm every argument in R has a corresponding argument in L. # Phase 1a: If left and right can both accept an infinite number of args, # their types must be compatible. # # Furthermore, if we're checking for compatibility in all cases, # we confirm that if R accepts an infinite number of arguments, # L must accept the same. def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | None) -> bool: if right_arg is None: return False if left_arg is None: return not allow_partial_overlap and not trivial_suffix return not is_compat(right_arg.typ, left_arg.typ) if ( _incompatible(left_star, right_star) and not trivial_vararg_suffix or _incompatible(left_star2, right_star2) ): return False # Phase 1b: Check non-star args: for every arg right can accept, left must # also accept. The only exception is if we are allowing partial # overlaps: in that case, we ignore optional args on the right. for right_arg in right.formal_arguments(): left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) if left_arg is None: if allow_partial_overlap and not right_arg.required: continue return False if not are_args_compatible( left_arg, right_arg, is_compat, ignore_pos_arg_names=ignore_pos_arg_names, allow_partial_overlap=allow_partial_overlap, allow_imprecise_kinds=right.imprecise_arg_kinds, ): return False if trivial_suffix: # For trivial right suffix we *only* check that every non-star right argument # has a valid match on the left. return True # Phase 1c: Check var args. Right has an infinite series of optional positional # arguments. Get all further positional args of left, and make sure # they're more general than the corresponding member in right. # TODO: handle suffix in UnpackType (i.e. *args: *Tuple[Ts, X, Y]). if right_star is not None and not trivial_vararg_suffix: # Synthesize an anonymous formal argument for the right right_by_position = right.try_synthesizing_arg_from_vararg(None) assert right_by_position is not None i = right_star.pos assert i is not None while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional(): if allow_partial_overlap and left.arg_kinds[i].is_optional(): break left_by_position = left.argument_by_position(i) assert left_by_position is not None if not are_args_compatible( left_by_position, right_by_position, is_compat, ignore_pos_arg_names=ignore_pos_arg_names, allow_partial_overlap=allow_partial_overlap, ): return False i += 1 # Phase 1d: Check kw args. Right has an infinite series of optional named # arguments. Get all further named args of left, and make sure # they're more general than the corresponding member in right. if right_star2 is not None: right_names = {name for name in right.arg_names if name is not None} left_only_names = set() for name, kind in zip(left.arg_names, left.arg_kinds): if ( name is None or kind.is_star() or name in right_names or not strict_concatenate_check ): continue left_only_names.add(name) # Synthesize an anonymous formal argument for the right right_by_name = right.try_synthesizing_arg_from_kwarg(None) assert right_by_name is not None for name in left_only_names: left_by_name = left.argument_by_name(name) assert left_by_name is not None if allow_partial_overlap and not left_by_name.required: continue if not are_args_compatible( left_by_name, right_by_name, is_compat, ignore_pos_arg_names=ignore_pos_arg_names, allow_partial_overlap=allow_partial_overlap, ): return False # Phase 2: Left must not impose additional restrictions. # (Every required argument in L must have a corresponding argument in R) # Note: we already checked the *arg and **kwarg arguments in phase 1a. for left_arg in left.formal_arguments(): right_by_name = ( right.argument_by_name(left_arg.name) if left_arg.name is not None else None ) right_by_pos = ( right.argument_by_position(left_arg.pos) if left_arg.pos is not None else None ) # If the left hand argument corresponds to two right-hand arguments, # neither of them can be required. if ( right_by_name is not None and right_by_pos is not None and right_by_name != right_by_pos and (right_by_pos.required or right_by_name.required) and strict_concatenate_check and not right.imprecise_arg_kinds ): return False # All *required* left-hand arguments must have a corresponding # right-hand argument. Optional args do not matter. if left_arg.required and right_by_pos is None and right_by_name is None: return False return True def are_args_compatible( left: FormalArgument, right: FormalArgument, is_compat: Callable[[Type, Type], bool], *, ignore_pos_arg_names: bool, allow_partial_overlap: bool, allow_imprecise_kinds: bool = False, ) -> bool: if left.required and right.required: # If both arguments are required allow_partial_overlap has no effect. allow_partial_overlap = False def is_different( left_item: object | None, right_item: object | None, allow_overlap: bool ) -> bool: """Checks if the left and right items are different. If the right item is unspecified (e.g. if the right callable doesn't care about what name or position its arg has), we default to returning False. If we're allowing partial overlap, we also default to returning False if the left callable also doesn't care.""" if right_item is None: return False if allow_overlap and left_item is None: return False return left_item != right_item # If right has a specific name it wants this argument to be, left must # have the same. if is_different(left.name, right.name, allow_partial_overlap): # But pay attention to whether we're ignoring positional arg names if not ignore_pos_arg_names or right.pos is None: return False # If right is at a specific position, left must have the same. # TODO: partial overlap logic is flawed for positions. # We disable it to avoid false positives at a cost of few false negatives. if is_different(left.pos, right.pos, allow_overlap=False) and not allow_imprecise_kinds: return False # If right's argument is optional, left's must also be # (unless we're relaxing the checks to allow potential # rather than definite compatibility). if not allow_partial_overlap and not right.required and left.required: return False # If we're allowing partial overlaps and neither arg is required, # the types don't actually need to be the same if allow_partial_overlap and not left.required and not right.required: return True # Left must have a more general type return is_compat(right.typ, left.typ) def flip_compat_check(is_compat: Callable[[Type, Type], bool]) -> Callable[[Type, Type], bool]: def new_is_compat(left: Type, right: Type) -> bool: return is_compat(right, left) return new_is_compat def unify_generic_callable( type: NormalizedCallableType, target: NormalizedCallableType, ignore_return: bool, return_constraint_direction: int | None = None, ) -> NormalizedCallableType | None: """Try to unify a generic callable type with another callable type. Return unified CallableType if successful; otherwise, return None. """ import mypy.solve if set(type.type_var_ids()) & {v.id for v in mypy.typeops.get_all_type_vars(target)}: # Overload overlap check does nasty things like unifying in opposite direction. # This can easily create type variable clashes, so we need to refresh. type = freshen_function_type_vars(type) if return_constraint_direction is None: return_constraint_direction = mypy.constraints.SUBTYPE_OF constraints: list[mypy.constraints.Constraint] = [] # There is some special logic for inference in callables, so better use them # as wholes instead of picking separate arguments. cs = mypy.constraints.infer_constraints( type.copy_modified(ret_type=UninhabitedType()), target.copy_modified(ret_type=UninhabitedType()), mypy.constraints.SUBTYPE_OF, skip_neg_op=True, ) constraints.extend(cs) if not ignore_return: c = mypy.constraints.infer_constraints( type.ret_type, target.ret_type, return_constraint_direction ) constraints.extend(c) inferred_vars, _ = mypy.solve.solve_constraints( type.variables, constraints, allow_polymorphic=True ) if None in inferred_vars: return None non_none_inferred_vars = cast(list[Type], inferred_vars) had_errors = False def report(*args: Any) -> None: nonlocal had_errors had_errors = True # This function may be called by the solver, so we need to allow erased types here. # We anyway allow checking subtyping between other types containing <Erased> # (probably also because solver needs subtyping). See also comment in # ExpandTypeVisitor.visit_erased_type(). applied = mypy.applytype.apply_generic_arguments( type, non_none_inferred_vars, report, context=target ) if had_errors: return None return cast(NormalizedCallableType, applied) def try_restrict_literal_union(t: UnionType, s: Type) -> list[Type] | None: """Return the items of t, excluding any occurrence of s, if and only if - t only contains simple literals - s is a simple literal Otherwise, returns None """ ps = get_proper_type(s) if not mypy.typeops.is_simple_literal(ps): return None new_items: list[Type] = [] for i in t.relevant_items(): pi = get_proper_type(i) if not mypy.typeops.is_simple_literal(pi): return None if pi != ps: new_items.append(i) return new_items def restrict_subtype_away(t: Type, s: Type, *, consider_runtime_isinstance: bool = True) -> Type: """Return t minus s for runtime type assertions. If we can't determine a precise result, return a supertype of the ideal result (just t is a valid result). This is used for type inference of runtime type checks such as isinstance(). Currently, this just removes elements of a union type. """ p_t = get_proper_type(t) if isinstance(p_t, UnionType): new_items = try_restrict_literal_union(p_t, s) if new_items is None: new_items = [ restrict_subtype_away( item, s, consider_runtime_isinstance=consider_runtime_isinstance ) for item in p_t.relevant_items() ] return UnionType.make_union( [item for item in new_items if not isinstance(get_proper_type(item), UninhabitedType)] ) elif isinstance(p_t, TypeVarType): return p_t.copy_modified(upper_bound=restrict_subtype_away(p_t.upper_bound, s)) if consider_runtime_isinstance: if covers_at_runtime(t, s): return UninhabitedType() else: return t else: if is_proper_subtype(t, s, ignore_promotions=True): return UninhabitedType() if is_proper_subtype(t, s, ignore_promotions=True, erase_instances=True): return UninhabitedType() return t def covers_at_runtime(item: Type, supertype: Type) -> bool: """Will isinstance(item, supertype) always return True at runtime?""" item = get_proper_type(item) supertype = get_proper_type(supertype) # Since runtime type checks will ignore type arguments, erase the types. if not (isinstance(supertype, FunctionLike) and supertype.is_type_obj()): supertype = erase_type(supertype) if is_proper_subtype( erase_type(item), supertype, ignore_promotions=True, erase_instances=True ): return True if isinstance(supertype, Instance): if supertype.type.is_protocol: # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. if is_proper_subtype(item, supertype, ignore_promotions=True): return True if isinstance(item, TypedDictType): # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). if supertype.type.fullname == "builtins.dict": return True elif isinstance(item, TypeVarType): if is_proper_subtype(item.upper_bound, supertype, ignore_promotions=True): return True elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": # "int" covers all native int types if item.type.fullname in MYPYC_NATIVE_INT_NAMES: return True # TODO: Add more special cases. return False def is_more_precise(left: Type, right: Type, *, ignore_promotions: bool = False) -> bool: """Check if left is a more precise type than right. A left is a proper subtype of right, left is also more precise than right. Also, if right is Any, left is more precise than right, for any left. """ # TODO Should List[int] be more precise than List[Any]? right = get_proper_type(right) if isinstance(right, AnyType): return True return is_proper_subtype(left, right, ignore_promotions=ignore_promotions) def all_non_object_members(info: TypeInfo) -> set[str]: members = set(info.names) for base in info.mro[1:-1]: members.update(base.names) return members def infer_variance(info: TypeInfo, i: int) -> bool: """Infer the variance of the ith type variable of a generic class. Return True if successful. This can fail if some inferred types aren't ready. """ object_type = Instance(info.mro[-1], []) for variance in COVARIANT, CONTRAVARIANT, INVARIANT: tv = info.defn.type_vars[i] assert isinstance(tv, TypeVarType) if tv.variance != VARIANCE_NOT_READY: continue tv.variance = variance co = True contra = True tvar = info.defn.type_vars[i] self_type = fill_typevars(info) for member in all_non_object_members(info): # __mypy-replace is an implementation detail of the dataclass plugin if member in ("__init__", "__new__", "__mypy-replace"): continue if isinstance(self_type, TupleType): self_type = mypy.typeops.tuple_fallback(self_type) flags = get_member_flags(member, self_type) settable = IS_SETTABLE in flags node = info[member].node if isinstance(node, Var): if node.type is None: tv.variance = VARIANCE_NOT_READY return False if has_underscore_prefix(member): # Special case to avoid false positives (and to pass conformance tests) settable = False # TODO: handle settable properties with setter type different from getter. typ = find_member(member, self_type, self_type) if typ: # It's okay for a method in a generic class with a contravariant type # variable to return a generic instance of the class, if it doesn't involve # variance (i.e. values of type variables are propagated). Our normal rules # would disallow this. Replace such return types with 'Any' to allow this. # # This could probably be more lenient (e.g. allow self type be nested, don't # require all type arguments to be identical to self_type), but this will # hopefully cover the vast majority of such cases, including Self. typ = erase_return_self_types(typ, self_type) typ2 = expand_type(typ, {tvar.id: object_type}) if not is_subtype(typ, typ2): co = False if not is_subtype(typ2, typ): contra = False if settable: co = False # Infer variance from base classes, in case they have explicit variances for base in info.bases: base2 = expand_type(base, {tvar.id: object_type}) if not is_subtype(base, base2): co = False if not is_subtype(base2, base): contra = False if co: v = COVARIANT elif contra: v = CONTRAVARIANT else: v = INVARIANT if v == variance: break tv.variance = VARIANCE_NOT_READY return True def has_underscore_prefix(name: str) -> bool: return name.startswith("_") and not (name.startswith("__") and name.endswith("__")) def infer_class_variances(info: TypeInfo) -> bool: if not info.defn.type_args: return True tvs = info.defn.type_vars success = True for i, tv in enumerate(tvs): if isinstance(tv, TypeVarType) and tv.variance == VARIANCE_NOT_READY: if not infer_variance(info, i): success = False return success def erase_return_self_types(typ: Type, self_type: Instance) -> Type: """If a typ is function-like and returns self_type, replace return type with Any.""" proper_type = get_proper_type(typ) if isinstance(proper_type, CallableType): ret = get_proper_type(proper_type.ret_type) if isinstance(ret, Instance) and ret == self_type: return proper_type.copy_modified(ret_type=AnyType(TypeOfAny.implementation_artifact)) elif isinstance(proper_type, Overloaded): return Overloaded( [ cast(CallableType, erase_return_self_types(it, self_type)) for it in proper_type.items ] ) return typ def is_erased_instance(t: Instance) -> bool: """Is this an instance where all args are Any types?""" if not t.args: return False for arg in t.args: if isinstance(arg, UnpackType): unpacked = get_proper_type(arg.type) if not isinstance(unpacked, Instance): return False assert unpacked.type.fullname == "builtins.tuple" if not isinstance(get_proper_type(unpacked.args[0]), AnyType): return False elif not isinstance(get_proper_type(arg), AnyType): return False return True
SubtypeVisitor
python
huggingface__transformers
src/transformers/models/musicgen/modeling_musicgen.py
{ "start": 30173, "end": 34784 }
class ____(MusicgenPreTrainedModel): def __init__(self, config: MusicgenDecoderConfig): super().__init__(config) self.decoder = MusicgenDecoder(config) # Initialize weights and apply final processing self.post_init() def get_input_embeddings(self): return self.decoder.embed_tokens def set_input_embeddings(self, value): self.decoder.embed_tokens = value @auto_docstring def forward( self, input_ids: Optional[torch.LongTensor] = None, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.FloatTensor] = None, encoder_attention_mask: Optional[torch.LongTensor] = None, past_key_values: Optional[Cache] = None, inputs_embeds: Optional[torch.FloatTensor] = None, use_cache: Optional[bool] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, cache_position: Optional[torch.Tensor] = None, ) -> Union[tuple, BaseModelOutputWithPastAndCrossAttentions]: r""" input_ids (`torch.LongTensor` of shape `(batch_size * num_codebooks, sequence_length)`): Indices of input sequence tokens in the vocabulary, corresponding to the sequence of audio codes. Indices can be obtained by encoding an audio prompt with an audio encoder model to predict audio codes, such as with the [`EncodecModel`]. See [`EncodecModel.encode`] for details. [What are input IDs?](../glossary#input-ids) <Tip warning={true}> The `input_ids` will automatically be converted from shape `(batch_size * num_codebooks, target_sequence_length)` to `(batch_size, num_codebooks, target_sequence_length)` in the forward pass. If you obtain audio codes from an audio encoding model, such as [`EncodecModel`], ensure that the number of frames is equal to 1, and that you reshape the audio codes from `(frames, batch_size, num_codebooks, target_sequence_length)` to `(batch_size * num_codebooks, target_sequence_length)` prior to passing them as `input_ids`. </Tip> encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, encoder_sequence_length)`, *optional*): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # decoder outputs consists of (dec_features, past_key_values, dec_hidden, dec_attn) decoder_outputs = self.decoder( input_ids=input_ids, attention_mask=attention_mask, encoder_attention_mask=encoder_attention_mask, encoder_hidden_states=encoder_hidden_states, past_key_values=past_key_values, inputs_embeds=inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, cache_position=cache_position, ) if not return_dict: return decoder_outputs return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=decoder_outputs.last_hidden_state, past_key_values=decoder_outputs.past_key_values, hidden_states=decoder_outputs.hidden_states, attentions=decoder_outputs.attentions, cross_attentions=decoder_outputs.cross_attentions, ) @auto_docstring( custom_intro=""" The MusicGen decoder model with a language modelling head on top. """ )
MusicgenModel
python
openai__openai-python
src/openai/types/webhooks/realtime_call_incoming_webhook_event.py
{ "start": 401, "end": 558 }
class ____(BaseModel): call_id: str """The unique ID of this call.""" sip_headers: List[DataSipHeader] """Headers from the SIP Invite."""
Data
python
ansible__ansible
lib/ansible/_internal/_testing.py
{ "start": 230, "end": 825 }
class ____: @staticmethod def check(value: object, msg: str | None = 'Value is not truthy.') -> None: """Raise an `AssertionError` if the given `value` is not truthy.""" if not value: raise AssertionError(msg) @contextlib.contextmanager def hard_fail_context(msg: str) -> t.Generator[_Checker]: """Enter a context which converts all exceptions to `BaseException` and provides a `Checker` instance for making assertions.""" try: yield _Checker() except BaseException as ex: raise BaseException(f"Hard failure: {msg}") from ex
_Checker
python
run-llama__llama_index
llama-index-integrations/storage/chat_store/llama-index-storage-chat-store-sqlite/llama_index/storage/chat_store/sqlite/base.py
{ "start": 1609, "end": 3729 }
class ____(Table): """A table protocol class for typing.""" id: Column key: Column # type: ignore value: Column if SQLALCHEMY_1_4_0_PLUS: def get_data_model( base: DeclarativeBase, index_name: str, ) -> TableProtocol: """ This part create a dynamic sqlalchemy model with a new table. """ class_name = f"Data{index_name}" # dynamic class name class AbstractData(base): # type: ignore __tablename__ = f"data_{index_name}" # dynamic table name __abstract__ = True # this line is necessary id = Column( Integer(), primary_key=True, autoincrement=True, index=True, ) # Add primary key key = Column( String(), nullable=False, index=True, ) value = Column(JSON()) return type( class_name, (AbstractData,), {}, ) # type: ignore elif SQLALCHEMY_2_0_0_PLUS: from sqlalchemy.orm import Mapped, mapped_column def get_data_model( base: DeclarativeBase, index_name: str, ) -> TableProtocol: """ This part create a dynamic sqlalchemy model with a new table. """ class_name = f"Data{index_name}" # dynamic class name class AbstractData(base): # type: ignore __tablename__ = f"data_{index_name}" # dynamic table name __abstract__ = True # this line is necessary id: Mapped[int] = mapped_column( Integer(), primary_key=True, autoincrement=True, index=True, ) # Add primary key key: Mapped[str] = mapped_column( String(), nullable=False, index=True, ) value: Mapped[str] = mapped_column(JSON()) return type( class_name, (AbstractData,), {}, ) # type: ignore
TableProtocol
python
PyCQA__pyflakes
pyflakes/messages.py
{ "start": 3319, "end": 3557 }
class ____(Message): message = 'dictionary key %r repeated with different values' def __init__(self, filename, loc, key): Message.__init__(self, filename, loc) self.message_args = (key,)
MultiValueRepeatedKeyLiteral
python
dask__dask
dask/dataframe/dask_expr/_groupby.py
{ "start": 18164, "end": 18608 }
class ____(SingleAggregation): groupby_chunk = M.unique groupby_aggregate = staticmethod(_unique_aggregate) @functools.cached_property def aggregate_kwargs(self) -> dict: # type: ignore[override] kwargs = super().aggregate_kwargs meta = self.frame._meta if meta.ndim == 1: name = meta.name else: name = meta[self._slice].name return {**kwargs, "name": name}
Unique
python
apache__airflow
docker-tests/tests/docker_tests/test_prod_image.py
{ "start": 8925, "end": 9756 }
class ____: def test_execute_airflow_as_root(self, default_docker_image): run_cmd_in_docker( cmd=["airflow", "info"], user=0, envs={"PYTHONDONTWRITEBYTECODE": "true"}, image=default_docker_image, ) def test_run_custom_python_packages_as_root(self, tmp_path, default_docker_image): (tmp_path / "__init__.py").write_text("") (tmp_path / "awesome.py").write_text('print("Awesome")') output = run_cmd_in_docker( envs={"PYTHONPATH": "/custom/mount", "PYTHONDONTWRITEBYTECODE": "true"}, volumes=[(tmp_path.as_posix(), "/custom/mount")], user=0, cmd=["python", "-c", "import awesome"], image=default_docker_image, ) assert output.strip() == "Awesome"
TestExecuteAsRoot
python
tox-dev__tox
tests/execute/local_subprocess/test_local_subprocess.py
{ "start": 901, "end": 16579 }
class ____: def __init__(self) -> None: self.out_err = ( TextIOWrapper(NamedBytesIO("out"), encoding=locale.getpreferredencoding(False)), TextIOWrapper(NamedBytesIO("err"), encoding=locale.getpreferredencoding(False)), ) def read_out_err(self) -> tuple[str, str]: out_got = self.out_err[0].buffer.getvalue().decode(self.out_err[0].encoding) err_got = self.out_err[1].buffer.getvalue().decode(self.out_err[1].encoding) return out_got, err_got @pytest.mark.parametrize("color", [True, False], ids=["color", "no_color"]) @pytest.mark.parametrize(("out", "err"), [("out", "err"), ("", "")], ids=["simple", "nothing"]) @pytest.mark.parametrize("show", [True, False], ids=["show", "no_show"]) @pytest.mark.parametrize( "stderr_color", ["RED", "YELLOW", "RESET"], ids=["stderr_color_default", "stderr_color_yellow", "stderr_color_reset"], ) def test_local_execute_basic_pass( # noqa: PLR0913 caplog: LogCaptureFixture, os_env: dict[str, str], out: str, err: str, show: bool, color: bool, stderr_color: str, ) -> None: caplog.set_level(logging.NOTSET) executor = LocalSubProcessExecutor(colored=color) tox_env = MagicMock() tox_env.conf._conf.options.stderr_color = stderr_color # noqa: SLF001 code = f"import sys; print({out!r}, end=''); print({err!r}, end='', file=sys.stderr)" request = ExecuteRequest(cmd=[sys.executable, "-c", code], cwd=Path(), env=os_env, stdin=StdinSource.OFF, run_id="") out_err = FakeOutErr() with executor.call(request, show=show, out_err=out_err.out_err, env=tox_env) as status: while status.exit_code is None: # pragma: no branch status.wait() assert status.out == out.encode() assert status.err == err.encode() outcome = status.outcome assert outcome is not None assert bool(outcome) is True, outcome assert outcome.exit_code == Outcome.OK assert outcome.err == err assert outcome.out == out assert outcome.request == request out_got, err_got = out_err.read_out_err() if show: assert out_got == out expected = f"{getattr(Fore, stderr_color)}{err}{Fore.RESET}" if color and err else err assert err_got == expected else: assert not out_got assert not err_got assert not caplog.records def test_local_execute_basic_pass_show_on_standard_newline_flush(caplog: LogCaptureFixture) -> None: caplog.set_level(logging.NOTSET) executor = LocalSubProcessExecutor(colored=False) request = ExecuteRequest( cmd=[sys.executable, "-c", "import sys; print('out'); print('yay')"], cwd=Path(), env=os.environ.copy(), stdin=StdinSource.OFF, run_id="", ) out_err = FakeOutErr() with executor.call(request, show=True, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert repr(outcome) assert bool(outcome) is True, outcome assert outcome.exit_code == Outcome.OK assert not outcome.err assert outcome.out == f"out{os.linesep}yay{os.linesep}" out, err = out_err.read_out_err() assert out == f"out{os.linesep}yay{os.linesep}" assert not err assert not caplog.records def test_local_execute_write_a_lot(os_env: dict[str, str]) -> None: count = 10_000 executor = LocalSubProcessExecutor(colored=False) request = ExecuteRequest( cmd=[ sys.executable, "-c", ( "import sys; import time; from datetime import datetime; import os;" f"print('e' * {count}, file=sys.stderr);" f"print('o' * {count}, file=sys.stdout);" "time.sleep(0.5);" f"print('a' * {count}, file=sys.stderr);" f"print('b' * {count}, file=sys.stdout);" ), ], cwd=Path(), env=os_env, stdin=StdinSource.OFF, run_id="", ) out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert bool(outcome), outcome expected_out = f"{'o' * count}{os.linesep}{'b' * count}{os.linesep}" assert outcome.out == expected_out, expected_out[len(outcome.out) :] expected_err = f"{'e' * count}{os.linesep}{'a' * count}{os.linesep}" assert outcome.err == expected_err, expected_err[len(outcome.err) :] @pytest.mark.skipif(sys.platform == "win32", reason="Unix terminal size test") def test_local_execute_terminal_size(os_env: dict[str, str], monkeypatch: MonkeyPatch) -> None: """Regression test for #2999 - check terminal size is set correctly in tox subprocess.""" import pty # noqa: PLC0415 terminal_size = os.terminal_size((84, 42)) main, child = pty.openpty() # type: ignore[attr-defined, unused-ignore] # Use ReadViaThreadUnix to help with debugging the test itself. pipe_out = ReadViaThreadUnix(main, sys.stdout.buffer.write, name="testout", drain=True) # type: ignore[arg-type] with ( pipe_out, monkeypatch.context() as monkey, open( # noqa: PTH123 child, "w", encoding=locale.getpreferredencoding(False) ) as stdout_mock, ): # Switch stdout with test pty monkey.setattr(sys, "stdout", stdout_mock) monkey.setenv("COLUMNS", "84") monkey.setenv("LINES", "42") executor = LocalSubProcessExecutor(colored=False) request = ExecuteRequest( cmd=[sys.executable, "-c", "import os; print(os.get_terminal_size())"], cwd=Path(), env=os_env, stdin=StdinSource.OFF, run_id="", ) out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert bool(outcome), outcome expected_out = f"{terminal_size!r}\r\n" assert outcome.out == expected_out, expected_out[len(outcome.out) :] assert not outcome.err def test_local_execute_basic_fail(capsys: CaptureFixture, caplog: LogCaptureFixture, monkeypatch: MonkeyPatch) -> None: monkeypatch.chdir(Path(__file__).parents[3]) caplog.set_level(logging.NOTSET) executor = LocalSubProcessExecutor(colored=False) cwd = Path().absolute() cmd = [ sys.executable, "-c", "import sys; print('out', end=''); print('err', file=sys.stderr, end=''); sys.exit(3)", ] request = ExecuteRequest(cmd=cmd, cwd=cwd, env=os.environ.copy(), stdin=StdinSource.OFF, run_id="") # run test out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert repr(outcome) # assert no output, no logs out, err = out_err.read_out_err() assert not out assert not err assert not caplog.records # assert return object assert bool(outcome) is False, outcome assert outcome.exit_code == 3 assert outcome.err == "err" assert outcome.out == "out" assert outcome.request == request # asset fail with pytest.raises(SystemExit) as context: outcome.assert_success() # asset fail assert context.value.code == 3 out, err = capsys.readouterr() assert out == "out\n" expected = f"{Fore.RED}err{Fore.RESET}\n" assert err == expected assert len(caplog.records) == 1 record = caplog.records[0] assert record.levelno == logging.CRITICAL assert record.msg == "exit %s (%.2f seconds) %s> %s%s" assert record.args is not None code, duration, cwd_, cmd_, metadata = record.args assert code == 3 assert cwd_ == cwd assert cmd_ == request.shell_cmd assert isinstance(duration, float) assert duration > 0 assert isinstance(metadata, str) assert metadata.startswith(" pid=") def test_command_does_not_exist(caplog: LogCaptureFixture, os_env: dict[str, str]) -> None: caplog.set_level(logging.NOTSET) executor = LocalSubProcessExecutor(colored=False) request = ExecuteRequest( cmd=["sys-must-be-missing"], cwd=Path().absolute(), env=os_env, stdin=StdinSource.OFF, run_id="", ) out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() # pragma: no cover outcome = status.outcome assert outcome is not None assert bool(outcome) is False, outcome assert outcome.exit_code != Outcome.OK assert not outcome.out assert not outcome.err assert len(caplog.records) == 1 assert caplog.records[0].levelname == "ERROR" assert re.match( r".*(No such file or directory|The system cannot find the file specified).*", caplog.records[0].message ) @pytest.mark.skipif(sys.platform == "win32", reason="You need a conhost shell for keyboard interrupt") @pytest.mark.flaky(max_runs=3, min_passes=1) def test_command_keyboard_interrupt(tmp_path: Path, monkeypatch: MonkeyPatch, capfd: CaptureFixture) -> None: monkeypatch.chdir(tmp_path) process_up_signal = tmp_path / "signal" cmd = [sys.executable, str(Path(__file__).parent / "local_subprocess_sigint.py"), str(process_up_signal)] process = subprocess.Popen(cmd) while not process_up_signal.exists(): assert process.poll() is None root = process.pid try: child = next(iter(psutil.Process(pid=root).children())).pid except AccessDenied as exc: # pragma: no cover # on termux for example pytest.skip(str(exc)) # pragma: no cover raise # pragma: no cover print(f"test running in {os.getpid()} and sending CTRL+C to {process.pid}", file=sys.stderr) # noqa: T201 process.send_signal(SIG_INTERRUPT) try: process.communicate(timeout=3) except subprocess.TimeoutExpired: # pragma: no cover process.kill() raise out, err = capfd.readouterr() assert f"W requested interrupt of {child} from {root}, activate in 0.01" in err, err assert f"W send signal SIGINT(2) to {child} from {root} with timeout 0.05" in err, err assert f"W send signal SIGTERM(15) to {child} from {root} with timeout 0.07" in err, err assert f"W send signal SIGKILL(9) to {child} from {root}" in err, err outs = out.split("\n") exit_code = int(outs[0]) assert exit_code == -9 assert float(outs[3]) > 0 # duration assert "how about no signal 2" in outs[1], outs[1] # 2 - Interrupt assert "how about no signal 15" in outs[1], outs[1] # 15 - Terminated @pytest.mark.parametrize("tty_mode", ["on", "off"]) def test_local_subprocess_tty(monkeypatch: MonkeyPatch, mocker: MockerFixture, tty_mode: str) -> None: monkeypatch.setenv("COLUMNS", "100") monkeypatch.setenv("LINES", "100") tty = tty_mode == "on" mocker.patch("sys.stdout.isatty", return_value=tty) mocker.patch("sys.stderr.isatty", return_value=tty) try: import termios # noqa: F401, PLC0415 except ImportError: exp_tty = False # platforms without tty support at all else: # to avoid trying (and failing) to copy mode bits exp_tty = tty mocker.patch("termios.tcgetattr") mocker.patch("termios.tcsetattr") executor = LocalSubProcessExecutor(colored=False) cmd: list[str] = [sys.executable, str(Path(__file__).parent / "tty_check.py")] request = ExecuteRequest(cmd=cmd, stdin=StdinSource.API, cwd=Path.cwd(), env=dict(os.environ), run_id="") out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert outcome info = json.loads(outcome.out) assert info == { "stdout": exp_tty, "stderr": exp_tty, "stdin": False, "terminal": [100, 100], } @pytest.mark.parametrize("mode", ["stem", "full", "stem-pattern", "full-pattern", "all"]) def test_allow_list_external_ok(fake_exe_on_path: Path, mode: str) -> None: exe = f"{fake_exe_on_path}{'.EXE' if sys.platform == 'win32' else ''}" allow = exe if "full" in mode else fake_exe_on_path.stem allow = f"{allow[:-2]}*" if "pattern" in mode else allow allow = "*" if mode == "all" else allow request = ExecuteRequest( cmd=[fake_exe_on_path.stem], cwd=Path.cwd(), env={"PATH": os.environ["PATH"]}, stdin=StdinSource.OFF, run_id="run-id", allow=[allow], ) inst = LocalSubProcessExecuteInstance(request, MagicMock(), out=SyncWrite("out", None), err=SyncWrite("err", None)) assert inst.cmd == [exe] def test_shebang_limited_on(tmp_path: Path) -> None: exe, script, instance = _create_shebang_test(tmp_path, env={"TOX_LIMITED_SHEBANG": "1"}) if sys.platform == "win32": # pragma: win32 cover assert instance.cmd == [str(script), "--magic"] else: assert instance.cmd == [exe, "-s", str(script), "--magic"] @pytest.mark.parametrize("env", [{}, {"TOX_LIMITED_SHEBANG": ""}]) def test_shebang_limited_off(tmp_path: Path, env: dict[str, str]) -> None: _, script, instance = _create_shebang_test(tmp_path, env=env) assert instance.cmd == [str(script), "--magic"] def test_shebang_failed_to_parse(tmp_path: Path) -> None: _, script, instance = _create_shebang_test(tmp_path, env={"TOX_LIMITED_SHEBANG": "yes"}) script.write_text("") assert instance.cmd == [str(script), "--magic"] def _create_shebang_test(tmp_path: Path, env: dict[str, str]) -> tuple[str, Path, LocalSubProcessExecuteInstance]: exe = shutil.which("python") assert exe is not None script = tmp_path / f"s{'.EXE' if sys.platform == 'win32' else ''}" script.write_text(f"#!{exe} -s") script.chmod(script.stat().st_mode | stat.S_IEXEC) # mark it executable env["PATH"] = str(script.parent) request = create_autospec(ExecuteRequest, cmd=["s", "--magic"], env=env, allow=None) writer = create_autospec(SyncWrite) instance = LocalSubProcessExecuteInstance(request, create_autospec(ExecuteOptions), writer, writer) return exe, script, instance @pytest.mark.parametrize("key", ["COLUMNS", "ROWS"]) def test_local_execute_does_not_overwrite(key: str, mocker: MockerFixture) -> None: mocker.patch("shutil.get_terminal_size", return_value=(101, 102)) env = dict(os.environ) env[key] = key executor = LocalSubProcessExecutor(colored=False) cmd = [sys.executable, "-c", f"import os; print(os.environ['{key}'], end='')"] request = ExecuteRequest(cmd=cmd, stdin=StdinSource.API, cwd=Path.cwd(), env=env, run_id="") out_err = FakeOutErr() with executor.call(request, show=False, out_err=out_err.out_err, env=MagicMock()) as status: while status.exit_code is None: # pragma: no branch status.wait() outcome = status.outcome assert outcome is not None assert outcome.out == key
FakeOutErr
python
pandas-dev__pandas
asv_bench/benchmarks/categoricals.py
{ "start": 4607, "end": 4754 }
class ____: def setup(self): self.sel = pd.Series(["s1234"]).astype("category") def time_rendering(self): str(self.sel)
Repr
python
sqlalchemy__sqlalchemy
test/orm/test_relationships.py
{ "start": 90944, "end": 94542 }
class ____(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table( "t1", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("data", String(40)), ) Table( "t2", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("data", String(40)), Column("t1id", Integer, ForeignKey("t1.id")), ) def _assert_fk(self, a1, b1, is_set): s = Session(testing.db) s.add_all([a1, b1]) s.flush() if is_set: eq_(b1.t1id, a1.id) else: eq_(b1.t1id, None) return s def test_o2m_viewonly_oneside(self): class A(ComparableEntity): pass class B(ComparableEntity): pass self.mapper_registry.map_imperatively( A, self.tables.t1, properties={ "bs": relationship( B, viewonly=True, backref=backref("a", viewonly=False) ) }, ) self.mapper_registry.map_imperatively(B, self.tables.t2) configure_mappers() a1 = A() b1 = B() a1.bs.append(b1) assert b1.a is None assert not inspect(a1).attrs.bs.history.has_changes() assert not inspect(b1).attrs.a.history.has_changes() sess = self._assert_fk(a1, b1, False) a1.bs.remove(b1) assert a1 not in sess.dirty assert b1 not in sess.dirty def test_m2o_viewonly_oneside(self): class A(ComparableEntity): pass class B(ComparableEntity): pass self.mapper_registry.map_imperatively( A, self.tables.t1, properties={ "bs": relationship( B, viewonly=False, backref=backref("a", viewonly=True) ) }, ) self.mapper_registry.map_imperatively(B, self.tables.t2) configure_mappers() a1 = A() b1 = B() b1.a = a1 assert b1 not in a1.bs assert not inspect(a1).attrs.bs.history.has_changes() assert not inspect(b1).attrs.a.history.has_changes() sess = self._assert_fk(a1, b1, False) b1.a = None assert a1 not in sess.dirty assert b1 not in sess.dirty def test_o2m_viewonly_only(self): class A(ComparableEntity): pass class B(ComparableEntity): pass self.mapper_registry.map_imperatively( A, self.tables.t1, properties={"bs": relationship(B, viewonly=True)}, ) self.mapper_registry.map_imperatively(B, self.tables.t2) a1 = A() b1 = B() a1.bs.append(b1) assert not inspect(a1).attrs.bs.history.has_changes() self._assert_fk(a1, b1, False) def test_m2o_viewonly_only(self): class A(ComparableEntity): pass class B(ComparableEntity): pass self.mapper_registry.map_imperatively(A, self.tables.t1) self.mapper_registry.map_imperatively( B, self.tables.t2, properties={"a": relationship(A, viewonly=True)} ) a1 = A() b1 = B() b1.a = a1 assert not inspect(b1).attrs.a.history.has_changes() self._assert_fk(a1, b1, False)
ViewOnlyHistoryTest
python
kubernetes-client__python
kubernetes/client/api/custom_objects_api.py
{ "start": 543, "end": 334797 }
class ____(object): """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_cluster_custom_object(self, group, version, plural, body, **kwargs): # noqa: E501 """create_cluster_custom_object # noqa: E501 Creates a cluster scoped Custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_custom_object(group, version, plural, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param object body: The JSON schema of the Resource to create. (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.create_cluster_custom_object_with_http_info(group, version, plural, body, **kwargs) # noqa: E501 def create_cluster_custom_object_with_http_info(self, group, version, plural, body, **kwargs): # noqa: E501 """create_cluster_custom_object # noqa: E501 Creates a cluster scoped Custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_custom_object_with_http_info(group, version, plural, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param object body: The JSON schema of the Resource to create. (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'body', 'pretty', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method create_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `create_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `create_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `create_cluster_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `create_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def create_namespaced_custom_object(self, group, version, namespace, plural, body, **kwargs): # noqa: E501 """create_namespaced_custom_object # noqa: E501 Creates a namespace scoped Custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_custom_object(group, version, namespace, plural, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param object body: The JSON schema of the Resource to create. (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.create_namespaced_custom_object_with_http_info(group, version, namespace, plural, body, **kwargs) # noqa: E501 def create_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, body, **kwargs): # noqa: E501 """create_namespaced_custom_object # noqa: E501 Creates a namespace scoped Custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_custom_object_with_http_info(group, version, namespace, plural, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param object body: The JSON schema of the Resource to create. (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'body', 'pretty', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `create_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `create_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `create_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `create_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `create_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_cluster_custom_object(self, group, version, plural, name, **kwargs): # noqa: E501 """delete_cluster_custom_object # noqa: E501 Deletes the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_custom_object(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_cluster_custom_object_with_http_info(group, version, plural, name, **kwargs) # noqa: E501 def delete_cluster_custom_object_with_http_info(self, group, version, plural, name, **kwargs): # noqa: E501 """delete_cluster_custom_object # noqa: E501 Deletes the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_custom_object_with_http_info(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'dry_run', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `delete_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `delete_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `delete_cluster_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `delete_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'grace_period_seconds' in local_var_params and local_var_params['grace_period_seconds'] is not None: # noqa: E501 query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in local_var_params and local_var_params['orphan_dependents'] is not None: # noqa: E501 query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in local_var_params and local_var_params['propagation_policy'] is not None: # noqa: E501 query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_cluster_custom_object(self, group, version, plural, **kwargs): # noqa: E501 """delete_collection_cluster_custom_object # noqa: E501 Delete collection of cluster scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_custom_object(group, version, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_collection_cluster_custom_object_with_http_info(group, version, plural, **kwargs) # noqa: E501 def delete_collection_cluster_custom_object_with_http_info(self, group, version, plural, **kwargs): # noqa: E501 """delete_collection_cluster_custom_object # noqa: E501 Delete collection of cluster scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_custom_object_with_http_info(group, version, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'pretty', 'label_selector', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'dry_run', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `delete_collection_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `delete_collection_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `delete_collection_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'label_selector' in local_var_params and local_var_params['label_selector'] is not None: # noqa: E501 query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501 if 'grace_period_seconds' in local_var_params and local_var_params['grace_period_seconds'] is not None: # noqa: E501 query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in local_var_params and local_var_params['orphan_dependents'] is not None: # noqa: E501 query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in local_var_params and local_var_params['propagation_policy'] is not None: # noqa: E501 query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_namespaced_custom_object(self, group, version, namespace, plural, **kwargs): # noqa: E501 """delete_collection_namespaced_custom_object # noqa: E501 Delete collection of namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_custom_object(group, version, namespace, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_collection_namespaced_custom_object_with_http_info(group, version, namespace, plural, **kwargs) # noqa: E501 def delete_collection_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, **kwargs): # noqa: E501 """delete_collection_namespaced_custom_object # noqa: E501 Delete collection of namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_custom_object_with_http_info(group, version, namespace, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'pretty', 'label_selector', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'dry_run', 'field_selector', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `delete_collection_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `delete_collection_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `delete_collection_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'label_selector' in local_var_params and local_var_params['label_selector'] is not None: # noqa: E501 query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501 if 'grace_period_seconds' in local_var_params and local_var_params['grace_period_seconds'] is not None: # noqa: E501 query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in local_var_params and local_var_params['orphan_dependents'] is not None: # noqa: E501 query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in local_var_params and local_var_params['propagation_policy'] is not None: # noqa: E501 query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_selector' in local_var_params and local_var_params['field_selector'] is not None: # noqa: E501 query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_namespaced_custom_object(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """delete_namespaced_custom_object # noqa: E501 Deletes the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_custom_object(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs) # noqa: E501 def delete_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """delete_namespaced_custom_object # noqa: E501 Deletes the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param V1DeleteOptions body: :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'dry_run', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `delete_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `delete_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `delete_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `delete_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `delete_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'grace_period_seconds' in local_var_params and local_var_params['grace_period_seconds'] is not None: # noqa: E501 query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in local_var_params and local_var_params['orphan_dependents'] is not None: # noqa: E501 query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in local_var_params and local_var_params['propagation_policy'] is not None: # noqa: E501 query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501 if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_api_resources(self, group, version, **kwargs): # noqa: E501 """get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources(group, version, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1APIResourceList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_api_resources_with_http_info(group, version, **kwargs) # noqa: E501 def get_api_resources_with_http_info(self, group, version, **kwargs): # noqa: E501 """get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources_with_http_info(group, version, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(V1APIResourceList, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_api_resources" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_api_resources`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_api_resources`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_cluster_custom_object(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object # noqa: E501 Returns a cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_cluster_custom_object_with_http_info(group, version, plural, name, **kwargs) # noqa: E501 def get_cluster_custom_object_with_http_info(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object # noqa: E501 Returns a cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object_with_http_info(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_cluster_custom_object_scale(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object_scale # noqa: E501 read scale of the specified custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object_scale(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_cluster_custom_object_scale_with_http_info(group, version, plural, name, **kwargs) # noqa: E501 def get_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object_scale # noqa: E501 read scale of the specified custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object_scale_with_http_info(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_cluster_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_cluster_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/scale', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_cluster_custom_object_status(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object_status # noqa: E501 read status of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object_status(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_cluster_custom_object_status_with_http_info(group, version, plural, name, **kwargs) # noqa: E501 def get_cluster_custom_object_status_with_http_info(self, group, version, plural, name, **kwargs): # noqa: E501 """get_cluster_custom_object_status # noqa: E501 read status of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_cluster_custom_object_status_with_http_info(group, version, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_cluster_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_cluster_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/status', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_namespaced_custom_object(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object # noqa: E501 Returns a namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs) # noqa: E501 def get_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object # noqa: E501 Returns a namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_namespaced_custom_object_scale(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object_scale # noqa: E501 read scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object_scale(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, **kwargs) # noqa: E501 def get_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object_scale # noqa: E501 read scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_namespaced_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_namespaced_custom_object_status(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object_status # noqa: E501 read status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object_status(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.get_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, **kwargs) # noqa: E501 def get_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, **kwargs): # noqa: E501 """get_namespaced_custom_object_status # noqa: E501 read status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_namespaced_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `get_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `get_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `get_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `get_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `get_namespaced_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def list_cluster_custom_object(self, group, version, plural, **kwargs): # noqa: E501 """list_cluster_custom_object # noqa: E501 list or watch cluster scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_custom_object(group, version, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.list_cluster_custom_object_with_http_info(group, version, plural, **kwargs) # noqa: E501 def list_cluster_custom_object_with_http_info(self, group, version, plural, **kwargs): # noqa: E501 """list_cluster_custom_object # noqa: E501 list or watch cluster scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_custom_object_with_http_info(group, version, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'pretty', 'allow_watch_bookmarks', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'resource_version_match', 'timeout_seconds', 'watch' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method list_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `list_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `list_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `list_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'allow_watch_bookmarks' in local_var_params and local_var_params['allow_watch_bookmarks'] is not None: # noqa: E501 query_params.append(('allowWatchBookmarks', local_var_params['allow_watch_bookmarks'])) # noqa: E501 if '_continue' in local_var_params and local_var_params['_continue'] is not None: # noqa: E501 query_params.append(('continue', local_var_params['_continue'])) # noqa: E501 if 'field_selector' in local_var_params and local_var_params['field_selector'] is not None: # noqa: E501 query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501 if 'label_selector' in local_var_params and local_var_params['label_selector'] is not None: # noqa: E501 query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501 if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501 query_params.append(('limit', local_var_params['limit'])) # noqa: E501 if 'resource_version' in local_var_params and local_var_params['resource_version'] is not None: # noqa: E501 query_params.append(('resourceVersion', local_var_params['resource_version'])) # noqa: E501 if 'resource_version_match' in local_var_params and local_var_params['resource_version_match'] is not None: # noqa: E501 query_params.append(('resourceVersionMatch', local_var_params['resource_version_match'])) # noqa: E501 if 'timeout_seconds' in local_var_params and local_var_params['timeout_seconds'] is not None: # noqa: E501 query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501 if 'watch' in local_var_params and local_var_params['watch'] is not None: # noqa: E501 query_params.append(('watch', local_var_params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/json;stream=watch']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def list_custom_object_for_all_namespaces(self, group, version, resource_plural, **kwargs): # noqa: E501 """list_custom_object_for_all_namespaces # noqa: E501 list or watch namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_custom_object_for_all_namespaces(group, version, resource_plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str resource_plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.list_custom_object_for_all_namespaces_with_http_info(group, version, resource_plural, **kwargs) # noqa: E501 def list_custom_object_for_all_namespaces_with_http_info(self, group, version, resource_plural, **kwargs): # noqa: E501 """list_custom_object_for_all_namespaces # noqa: E501 list or watch namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_custom_object_for_all_namespaces_with_http_info(group, version, resource_plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str resource_plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'resource_plural', 'pretty', 'allow_watch_bookmarks', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'resource_version_match', 'timeout_seconds', 'watch' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method list_custom_object_for_all_namespaces" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `list_custom_object_for_all_namespaces`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `list_custom_object_for_all_namespaces`") # noqa: E501 # verify the required parameter 'resource_plural' is set if self.api_client.client_side_validation and ('resource_plural' not in local_var_params or # noqa: E501 local_var_params['resource_plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `resource_plural` when calling `list_custom_object_for_all_namespaces`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'resource_plural' in local_var_params: path_params['resource_plural'] = local_var_params['resource_plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'allow_watch_bookmarks' in local_var_params and local_var_params['allow_watch_bookmarks'] is not None: # noqa: E501 query_params.append(('allowWatchBookmarks', local_var_params['allow_watch_bookmarks'])) # noqa: E501 if '_continue' in local_var_params and local_var_params['_continue'] is not None: # noqa: E501 query_params.append(('continue', local_var_params['_continue'])) # noqa: E501 if 'field_selector' in local_var_params and local_var_params['field_selector'] is not None: # noqa: E501 query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501 if 'label_selector' in local_var_params and local_var_params['label_selector'] is not None: # noqa: E501 query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501 if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501 query_params.append(('limit', local_var_params['limit'])) # noqa: E501 if 'resource_version' in local_var_params and local_var_params['resource_version'] is not None: # noqa: E501 query_params.append(('resourceVersion', local_var_params['resource_version'])) # noqa: E501 if 'resource_version_match' in local_var_params and local_var_params['resource_version_match'] is not None: # noqa: E501 query_params.append(('resourceVersionMatch', local_var_params['resource_version_match'])) # noqa: E501 if 'timeout_seconds' in local_var_params and local_var_params['timeout_seconds'] is not None: # noqa: E501 query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501 if 'watch' in local_var_params and local_var_params['watch'] is not None: # noqa: E501 query_params.append(('watch', local_var_params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/json;stream=watch']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{resource_plural}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def list_namespaced_custom_object(self, group, version, namespace, plural, **kwargs): # noqa: E501 """list_namespaced_custom_object # noqa: E501 list or watch namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_custom_object(group, version, namespace, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.list_namespaced_custom_object_with_http_info(group, version, namespace, plural, **kwargs) # noqa: E501 def list_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, **kwargs): # noqa: E501 """list_namespaced_custom_object # noqa: E501 list or watch namespace scoped custom objects # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_custom_object_with_http_info(group, version, namespace, plural, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: The custom resource's group name (required) :param str version: The custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: The custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str pretty: If 'true', then the output is pretty printed. :param bool allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'pretty', 'allow_watch_bookmarks', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'resource_version_match', 'timeout_seconds', 'watch' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method list_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `list_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `list_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `list_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `list_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 query_params = [] if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501 query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501 if 'allow_watch_bookmarks' in local_var_params and local_var_params['allow_watch_bookmarks'] is not None: # noqa: E501 query_params.append(('allowWatchBookmarks', local_var_params['allow_watch_bookmarks'])) # noqa: E501 if '_continue' in local_var_params and local_var_params['_continue'] is not None: # noqa: E501 query_params.append(('continue', local_var_params['_continue'])) # noqa: E501 if 'field_selector' in local_var_params and local_var_params['field_selector'] is not None: # noqa: E501 query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501 if 'label_selector' in local_var_params and local_var_params['label_selector'] is not None: # noqa: E501 query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501 if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501 query_params.append(('limit', local_var_params['limit'])) # noqa: E501 if 'resource_version' in local_var_params and local_var_params['resource_version'] is not None: # noqa: E501 query_params.append(('resourceVersion', local_var_params['resource_version'])) # noqa: E501 if 'resource_version_match' in local_var_params and local_var_params['resource_version_match'] is not None: # noqa: E501 query_params.append(('resourceVersionMatch', local_var_params['resource_version_match'])) # noqa: E501 if 'timeout_seconds' in local_var_params and local_var_params['timeout_seconds'] is not None: # noqa: E501 query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501 if 'watch' in local_var_params and local_var_params['watch'] is not None: # noqa: E501 query_params.append(('watch', local_var_params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/json;stream=watch']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_cluster_custom_object(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object # noqa: E501 patch the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to patch. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def patch_cluster_custom_object_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object # noqa: E501 patch the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to patch. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object_scale # noqa: E501 partially update scale of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object_scale(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def patch_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object_scale # noqa: E501 partially update scale of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_cluster_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/scale', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_cluster_custom_object_status(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object_status # noqa: E501 partially update status of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object_status(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def patch_cluster_custom_object_status_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """patch_cluster_custom_object_status # noqa: E501 partially update status of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_custom_object_status_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_cluster_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_cluster_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/status', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object # noqa: E501 patch the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to patch. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def patch_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object # noqa: E501 patch the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to patch. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_namespaced_custom_object_scale(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object_scale # noqa: E501 partially update scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object_scale(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def patch_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object_scale # noqa: E501 partially update scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_namespaced_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def patch_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object_status # noqa: E501 partially update status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.patch_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def patch_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """patch_namespaced_custom_object_status # noqa: E501 partially update status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation', 'force' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method patch_namespaced_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `patch_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `patch_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `patch_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `patch_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `patch_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `patch_namespaced_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501 query_params.append(('force', local_var_params['force'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_cluster_custom_object(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object # noqa: E501 replace the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to replace. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_cluster_custom_object_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def replace_cluster_custom_object_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object # noqa: E501 replace the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom object's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to replace. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_cluster_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_cluster_custom_object_scale(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object_scale # noqa: E501 replace scale of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object_scale(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def replace_cluster_custom_object_scale_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object_scale # noqa: E501 replace scale of the specified cluster scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object_scale_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_cluster_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object_scale`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/scale', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_cluster_custom_object_status(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object_status # noqa: E501 replace status of the cluster scoped specified custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object_status(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_cluster_custom_object_status_with_http_info(group, version, plural, name, body, **kwargs) # noqa: E501 def replace_cluster_custom_object_status_with_http_info(self, group, version, plural, name, body, **kwargs): # noqa: E501 """replace_cluster_custom_object_status # noqa: E501 replace status of the cluster scoped specified custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_custom_object_status_with_http_info(group, version, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_cluster_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_cluster_custom_object_status`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_cluster_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/{plural}/{name}/status', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object # noqa: E501 replace the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to replace. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def replace_namespaced_custom_object_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object # noqa: E501 replace the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: The JSON schema of the Resource to replace. (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_namespaced_custom_object" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_namespaced_custom_object_scale(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object_scale # noqa: E501 replace scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object_scale(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def replace_namespaced_custom_object_scale_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object_scale # noqa: E501 replace scale of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object_scale_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_namespaced_custom_object_scale" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object_scale`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/scale', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def replace_namespaced_custom_object_status(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object_status # noqa: E501 replace status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object_status(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, **kwargs) # noqa: E501 def replace_namespaced_custom_object_status_with_http_info(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501 """replace_namespaced_custom_object_status # noqa: E501 replace status of the specified namespace scoped custom object # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_custom_object_status_with_http_info(group, version, namespace, plural, name, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str group: the custom resource's group (required) :param str version: the custom resource's version (required) :param str namespace: The custom resource's namespace (required) :param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required) :param str name: the custom object's name (required) :param object body: (required) :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :param str field_validation: fieldValidation instructs the server on how to handle objects in the request (POST/PUT/PATCH) containing unknown or duplicate fields. Valid values are: - Ignore: This will ignore any unknown fields that are silently dropped from the object, and will ignore all but the last duplicate field that the decoder encounters. This is the default behavior prior to v1.23. - Warn: This will send a warning via the standard warning response header for each unknown field that is dropped from the object, and for each duplicate field that is encountered. The request will still succeed if there are no other errors, and will only persist the last of any duplicate fields. This is the default in v1.23+ - Strict: This will fail the request with a BadRequest error if any unknown fields would be dropped from the object, or if any duplicate fields are present. The error returned from the server will contain all unknown and duplicate fields encountered. (optional) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(object, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread. """ local_var_params = locals() all_params = [ 'group', 'version', 'namespace', 'plural', 'name', 'body', 'dry_run', 'field_manager', 'field_validation' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method replace_namespaced_custom_object_status" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'group' is set if self.api_client.client_side_validation and ('group' not in local_var_params or # noqa: E501 local_var_params['group'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `group` when calling `replace_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'version' is set if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 local_var_params['version'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `version` when calling `replace_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'namespace' is set if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `replace_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'plural' is set if self.api_client.client_side_validation and ('plural' not in local_var_params or # noqa: E501 local_var_params['plural'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `plural` when calling `replace_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 local_var_params['name'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `name` when calling `replace_namespaced_custom_object_status`") # noqa: E501 # verify the required parameter 'body' is set if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 local_var_params['body'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `body` when calling `replace_namespaced_custom_object_status`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in local_var_params: path_params['group'] = local_var_params['group'] # noqa: E501 if 'version' in local_var_params: path_params['version'] = local_var_params['version'] # noqa: E501 if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] # noqa: E501 if 'plural' in local_var_params: path_params['plural'] = local_var_params['plural'] # noqa: E501 if 'name' in local_var_params: path_params['name'] = local_var_params['name'] # noqa: E501 query_params = [] if 'dry_run' in local_var_params and local_var_params['dry_run'] is not None: # noqa: E501 query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501 if 'field_manager' in local_var_params and local_var_params['field_manager'] is not None: # noqa: E501 query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501 if 'field_validation' in local_var_params and local_var_params['field_validation'] is not None: # noqa: E501 query_params.append(('fieldValidation', local_var_params['field_validation'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/{group}/{version}/namespaces/{namespace}/{plural}/{name}/status', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
CustomObjectsApi
python
lepture__authlib
authlib/oauth2/rfc6750/token.py
{ "start": 0, "end": 3505 }
class ____: """Bearer token generator which can create the payload for token response by OAuth 2 server. A typical token response would be: .. code-block:: http HTTP/1.1 200 OK Content-Type: application/json;charset=UTF-8 Cache-Control: no-store Pragma: no-cache { "access_token":"mF_9.B5f-4.1JqM", "token_type":"Bearer", "expires_in":3600, "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA" } """ #: default expires_in value DEFAULT_EXPIRES_IN = 3600 #: default expires_in value differentiate by grant_type GRANT_TYPES_EXPIRES_IN = { "authorization_code": 864000, "implicit": 3600, "password": 864000, "client_credentials": 864000, } def __init__( self, access_token_generator, refresh_token_generator=None, expires_generator=None, ): self.access_token_generator = access_token_generator self.refresh_token_generator = refresh_token_generator self.expires_generator = expires_generator def _get_expires_in(self, client, grant_type): if self.expires_generator is None: expires_in = self.GRANT_TYPES_EXPIRES_IN.get( grant_type, self.DEFAULT_EXPIRES_IN ) elif callable(self.expires_generator): expires_in = self.expires_generator(client, grant_type) elif isinstance(self.expires_generator, int): expires_in = self.expires_generator else: expires_in = self.DEFAULT_EXPIRES_IN return expires_in @staticmethod def get_allowed_scope(client, scope): if scope: scope = client.get_allowed_scope(scope) return scope def generate( self, grant_type, client, user=None, scope=None, expires_in=None, include_refresh_token=True, ): """Generate a bearer token for OAuth 2.0 authorization token endpoint. :param client: the client that making the request. :param grant_type: current requested grant_type. :param user: current authorized user. :param expires_in: if provided, use this value as expires_in. :param scope: current requested scope. :param include_refresh_token: should refresh_token be included. :return: Token dict """ scope = self.get_allowed_scope(client, scope) access_token = self.access_token_generator( client=client, grant_type=grant_type, user=user, scope=scope ) if expires_in is None: expires_in = self._get_expires_in(client, grant_type) token = { "token_type": "Bearer", "access_token": access_token, } if expires_in: token["expires_in"] = expires_in if include_refresh_token and self.refresh_token_generator: token["refresh_token"] = self.refresh_token_generator( client=client, grant_type=grant_type, user=user, scope=scope ) if scope: token["scope"] = scope return token def __call__( self, grant_type, client, user=None, scope=None, expires_in=None, include_refresh_token=True, ): return self.generate( grant_type, client, user, scope, expires_in, include_refresh_token )
BearerTokenGenerator
python
django__django
tests/generic_views/views.py
{ "start": 1339, "end": 1450 }
class ____(generic.DetailView): queryset = Page.objects.all() template_name_field = "template"
PageDetail
python
walkccc__LeetCode
solutions/3466. Maximum Coin Collection/3466.py
{ "start": 0, "end": 497 }
class ____: def maxCoins(self, lane1: list[int], lane2: list[int]) -> int: n = len(lane1) # dp[i][k] := the maximum number of coins at i-th mile with k switches dp = [[-math.inf] * 3 for _ in range(n)] dp[0][0] = lane1[0] dp[0][1] = lane2[0] for i in range(1, n): dp[i][0] = max(0, dp[i - 1][0]) + lane1[i] dp[i][1] = max(0, dp[i - 1][0], dp[i - 1][1]) + lane2[i] dp[i][2] = max(dp[i - 1][1], dp[i - 1][2]) + lane1[i] return max(map(max, dp))
Solution
python
ray-project__ray
python/ray/_private/thirdparty/pynvml/pynvml.py
{ "start": 66717, "end": 67324 }
class ____(_PrintableStructure): _fields_ = [ ('year', c_uint32), ('month', c_uint16), ('day', c_uint16), ('hour', c_uint16), ('min', c_uint16), ('sec', c_uint16), ('status', c_uint8), ] NVML_GRID_LICENSE_STATE_UNKNOWN = 0 NVML_GRID_LICENSE_STATE_UNINITIALIZED = 1 NVML_GRID_LICENSE_STATE_UNLICENSED_UNRESTRICTED = 2 NVML_GRID_LICENSE_STATE_UNLICENSED_RESTRICTED = 3 NVML_GRID_LICENSE_STATE_UNLICENSED = 4 NVML_GRID_LICENSE_STATE_LICENSED = 5
c_nvmlVgpuLicenseExpiry_t
python
ray-project__ray
python/ray/dashboard/modules/tests/test_dashboard_sdk.py
{ "start": 143, "end": 3257 }
class ____: @pytest.mark.skipif( sys.platform == "win32", reason="File path incorrect on Windows." ) def test_runtime_env_valid(self): config_file_name = os.path.join( os.path.dirname(__file__), "test_config_files", "basic_runtime_env.yaml" ) assert parse_runtime_env_args(runtime_env=config_file_name) == { "py_modules": ["pm1", "pm2"], "working_dir": "wd", } def test_runtime_env_json_valid(self): runtime_env = '{"py_modules": ["pm1", "pm2"], "working_dir": "wd"}' assert parse_runtime_env_args(runtime_env_json=runtime_env) == { "py_modules": ["pm1", "pm2"], "working_dir": "wd", } @pytest.mark.skipif( sys.platform == "win32", reason="File path incorrect on Windows." ) def test_runtime_env_and_json(self): config_file_name = os.path.join( os.path.dirname(__file__), "test_config_files", "basic_runtime_env.yaml" ) runtime_env_json = '{"py_modules": ["pm1", "pm2"], "working_dir": "wd"}' with pytest.raises(ValueError): parse_runtime_env_args( runtime_env=config_file_name, runtime_env_json=runtime_env_json ) def test_working_dir_valid(self): assert parse_runtime_env_args(working_dir="wd") == {"working_dir": "wd"} @pytest.mark.skipif( sys.platform == "win32", reason="File path incorrect on Windows." ) def test_working_dir_override(self): config_file_name = os.path.join( os.path.dirname(__file__), "test_config_files", "basic_runtime_env.yaml" ) assert parse_runtime_env_args( runtime_env=config_file_name, working_dir="wd2" ) == {"py_modules": ["pm1", "pm2"], "working_dir": "wd2"} runtime_env = '{"py_modules": ["pm1", "pm2"], "working_dir": "wd2"}' assert parse_runtime_env_args( runtime_env_json=runtime_env, working_dir="wd2" ) == {"py_modules": ["pm1", "pm2"], "working_dir": "wd2"} def test_all_none(self): assert parse_runtime_env_args() == {} def test_get_job_submission_client_cluster_info(): # Test that the name for get_job_submission_client_cluster_info stays the # same from ray.dashboard.modules.dashboard_sdk import ( # noqa: F401 get_job_submission_client_cluster_info, ) def test_parse_cluster_address_validation(): """Test that parse_cluster_info validates address schemes.""" # Check that "auto" is rejected with pytest.raises(ValueError): parse_cluster_info("auto") # Check that invalid schemes raise a ValueError invalid_schemes = ["ray"] for scheme in invalid_schemes: with pytest.raises(ValueError): parse_cluster_info(f"{scheme}://localhost:10001") # Check that valid schemes are OK valid_schemes = ["http", "https"] for scheme in valid_schemes: parse_cluster_info(f"{scheme}://localhost:10001") if __name__ == "__main__": sys.exit(pytest.main(["-v", __file__]))
TestParseRuntimeEnvArgs
python
readthedocs__readthedocs.org
readthedocs/builds/tests/test_views.py
{ "start": 3996, "end": 4074 }
class ____(CancelBuildViewTests): pass
CancelBuildViewWithOrganizationsTests
python
huggingface__transformers
src/transformers/models/swin2sr/modeling_swin2sr.py
{ "start": 16110, "end": 16647 }
class ____(nn.Module): def __init__(self, config, dim): super().__init__() self.dense = nn.Linear(dim, dim) self.dropout = nn.Dropout(config.attention_probs_dropout_prob) def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor: hidden_states = self.dense(hidden_states) hidden_states = self.dropout(hidden_states) return hidden_states # Copied from transformers.models.swinv2.modeling_swinv2.Swinv2Attention with Swinv2->Swin2SR
Swin2SRSelfOutput
python
pyca__cryptography
tests/hazmat/primitives/test_ciphers.py
{ "start": 2548, "end": 2725 }
class ____: @pytest.mark.parametrize("size", [7, 129]) def test_gcm_min_max(self, size): with pytest.raises(ValueError): modes.GCM(b"0" * size)
TestGCM
python
pydata__xarray
xarray/tests/test_datatree.py
{ "start": 63642, "end": 65573 }
class ____: def test_noop(self, create_test_datatree: Callable[[], DataTree]) -> None: dt = create_test_datatree() actual = dt.pipe(lambda tree: tree) assert actual.identical(dt) def test_args(self, create_test_datatree: Callable[[], DataTree]) -> None: dt = create_test_datatree() def f(tree: DataTree, x: int, y: int) -> DataTree: return tree.assign( arr_with_attrs=xr.Variable("dim0", [], attrs=dict(x=x, y=y)) ) actual = dt.pipe(f, 1, 2) assert actual["arr_with_attrs"].attrs == dict(x=1, y=2) def test_kwargs(self, create_test_datatree: Callable[[], DataTree]) -> None: dt = create_test_datatree() def f(tree: DataTree, *, x: int, y: int, z: int) -> DataTree: return tree.assign( arr_with_attrs=xr.Variable("dim0", [], attrs=dict(x=x, y=y, z=z)) ) attrs = {"x": 1, "y": 2, "z": 3} actual = dt.pipe(f, **attrs) assert actual["arr_with_attrs"].attrs == attrs def test_args_kwargs(self, create_test_datatree: Callable[[], DataTree]) -> None: dt = create_test_datatree() def f(tree: DataTree, x: int, *, y: int, z: int) -> DataTree: return tree.assign( arr_with_attrs=xr.Variable("dim0", [], attrs=dict(x=x, y=y, z=z)) ) attrs = {"x": 1, "y": 2, "z": 3} actual = dt.pipe(f, attrs["x"], y=attrs["y"], z=attrs["z"]) assert actual["arr_with_attrs"].attrs == attrs def test_named_self(self, create_test_datatree: Callable[[], DataTree]) -> None: dt = create_test_datatree() def f(x: int, tree: DataTree, y: int): tree.attrs.update({"x": x, "y": y}) return tree attrs = {"x": 1, "y": 2} actual = dt.pipe((f, "tree"), **attrs) assert actual is dt and actual.attrs == attrs
TestPipe
python
dagster-io__dagster
python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py
{ "start": 20705, "end": 39319 }
class ____: """This class exposes methods on top of the Fivetran REST API.""" def __init__( self, api_key: str, api_secret: str, request_max_retries: int, request_retry_delay: float, disable_schedule_on_trigger: bool, ): self.api_key = api_key self.api_secret = api_secret self.request_max_retries = request_max_retries self.request_retry_delay = request_retry_delay self.disable_schedule_on_trigger = disable_schedule_on_trigger @property def _auth(self) -> HTTPBasicAuth: return HTTPBasicAuth(self.api_key, self.api_secret) @property @cached_method def _log(self) -> logging.Logger: return get_dagster_logger() @property def api_base_url(self) -> str: return f"{FIVETRAN_API_BASE}/{FIVETRAN_API_VERSION}" @property def api_connector_url(self) -> str: return f"{self.api_base_url}/{FIVETRAN_CONNECTOR_ENDPOINT}" def _make_connector_request( self, method: str, endpoint: str, data: Optional[str] = None ) -> Mapping[str, Any]: return self._make_and_handle_request( method, f"{FIVETRAN_CONNECTOR_ENDPOINT}/{endpoint}", data ) def _make_and_handle_request( self, method: str, endpoint: str, data: Optional[str] = None, params: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: """Creates, sends and handles a request to the desired Fivetran API endpoint. Args: method (str): The http method to use for this request (e.g. "POST", "GET", "PATCH"). endpoint (str): The Fivetran API endpoint to send this request to. data (Optional[str]): JSON-formatted data string to be included in the request. params (Optional[Dict[str, Any]]): JSON-formatted query params to be included in the request. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ response = self._make_request(method=method, endpoint=endpoint, data=data, params=params) try: response.raise_for_status() resp_dict = response.json() return resp_dict["data"] if "data" in resp_dict else resp_dict except RequestException as e: raise Failure( f"Max retries ({self.request_max_retries}) exceeded with url: {response.url}. Caused by {e}" ) def _make_request( self, method: str, endpoint: str, data: Optional[str] = None, params: Optional[Mapping[str, Any]] = None, ) -> requests.Response: """Creates and sends a request to the desired Fivetran API endpoint. Args: method (str): The http method to use for this request (e.g. "POST", "GET", "PATCH"). endpoint (str): The Fivetran API endpoint to send this request to. data (Optional[str]): JSON-formatted data string to be included in the request. params (Optional[Dict[str, Any]]): JSON-formatted query params to be included in the request. Returns: Optional[requests.Response]: The `requests.Response` object for the request. """ url = f"{self.api_base_url}/{endpoint}" headers = { "User-Agent": f"dagster-fivetran/{__version__}", "Content-Type": "application/json;version=2", } num_retries = 0 while True: try: response = requests.request( method=method, url=url, headers=headers, auth=self._auth, data=data, params=params, timeout=int(os.getenv("DAGSTER_FIVETRAN_API_REQUEST_TIMEOUT", "60")), ) response.raise_for_status() return response except RequestException as e: self._log.error("Request to Fivetran API failed: %s", e) if num_retries == self.request_max_retries: return response # type: ignore num_retries += 1 time.sleep(self.request_retry_delay) def get_connector_details(self, connector_id: str) -> Mapping[str, Any]: """Gets details about a given connector from the Fivetran API. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ return self._make_connector_request(method="GET", endpoint=connector_id) def list_connectors_for_group(self, group_id: str) -> Sequence[Mapping[str, Any]]: """Fetches a list of all connectors for a given group from the Fivetran API. Args: group_id (str): The Fivetran Group ID. Returns: List[Dict[str, Any]]: A List of parsed json data from the response to this request. """ results = [] cursor = None while True: data = self._make_and_handle_request( method="GET", endpoint=f"groups/{group_id}/connectors", params={ "limit": DAGSTER_FIVETRAN_LIST_CONNECTIONS_FOR_GROUP_INDIVIDUAL_REQUEST_LIMIT, **({"cursor": cursor} if cursor else {}), }, ) connectors = data["items"] cursor = data.get("nextCursor") results.extend(connectors) if not cursor: break return results def get_schema_config_for_connector( self, connector_id: str, raise_on_not_found_error: bool = True ) -> Mapping[str, Any]: """Fetches the connector schema config for a given connector from the Fivetran API. Args: connector_id (str): The Fivetran Connector ID. raise_on_not_found_error (bool): Whether to raise an exception if a 404 error is encountered. Defaults to True. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ response = self._make_request("GET", f"connectors/{connector_id}/schemas") try: response.raise_for_status() resp_dict = response.json() return resp_dict["data"] if "data" in resp_dict else resp_dict except RequestException as e: # In some cases, the schema config doesn't exist, # even if the connector is connected and the schema status is ready. # The Fivetran API request fails with a 404 error in that case. if ( not raise_on_not_found_error and e.response is not None and e.response.status_code == 404 ): self._log.warning( f"Schema config was not found for connector with ID {connector_id}." ) return {} else: # If the conditions are not met, we raise the error as we do for other endpoints. raise Failure( f"Max retries ({self.request_max_retries}) exceeded with url: {response.url}. Caused by {e}" ) def get_destination_details(self, destination_id: str) -> Mapping[str, Any]: """Fetches details about a given destination from the Fivetran API. Args: destination_id (str): The Fivetran Destination ID. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ return self._make_and_handle_request("GET", f"destinations/{destination_id}") def get_groups(self) -> Mapping[str, Any]: """Fetches all groups from the Fivetran API. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ return self._make_and_handle_request("GET", "groups") def update_schedule_type_for_connector( self, connector_id: str, schedule_type: str ) -> Mapping[str, Any]: """Updates the schedule type property of the connector to either "auto" or "manual". Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. schedule_type (str): Either "auto" (to turn the schedule on) or "manual" (to turn it off). Returns: Dict[str, Any]: Parsed json data representing the API response. """ schedule_types = {s for s in FivetranConnectorScheduleType} if schedule_type not in schedule_types: check.failed( f"The schedule_type for connector {connector_id} must be in {schedule_types}: " f"got '{schedule_type}'" ) return self._make_connector_request( method="PATCH", endpoint=connector_id, data=json.dumps({"schedule_type": schedule_type}) ) def get_columns_config_for_table( self, connector_id: str, schema_name: str, table_name: str ) -> Mapping[str, Any]: """Fetches the source table columns config for a given table from the Fivetran API. Args: connector_id (str): The Fivetran Connector ID. schema_name (str): The Fivetran Schema name. table_name (str): The Fivetran Table name. Returns: Dict[str, Any]: Parsed json data from the response to this request. """ return self._make_connector_request( method="GET", endpoint=f"{connector_id}/schemas/{schema_name}/tables/{table_name}/columns", ) def start_sync(self, connector_id: str) -> None: """Initiates a sync of a Fivetran connector. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. """ request_fn = partial( self._make_connector_request, method="POST", endpoint=f"{connector_id}/force" ) self._start_sync(request_fn=request_fn, connector_id=connector_id) def start_resync( self, connector_id: str, resync_parameters: Optional[Mapping[str, Sequence[str]]] = None ) -> None: """Initiates a historical sync of all data for multiple schema tables within a Fivetran connector. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. resync_parameters (Optional[Dict[str, List[str]]]): Optional resync parameters to send to the Fivetran API. An example payload can be found here: https://fivetran.com/docs/rest-api/connectors#request_7 """ request_fn = partial( self._make_connector_request, method="POST", endpoint=( f"{connector_id}/schemas/tables/resync" if resync_parameters is not None else f"{connector_id}/resync" ), data=json.dumps(resync_parameters) if resync_parameters is not None else None, ) self._start_sync(request_fn=request_fn, connector_id=connector_id) def _start_sync(self, request_fn: Callable[[], Mapping[str, Any]], connector_id: str) -> None: connector = FivetranConnector.from_connector_details( connector_details=self.get_connector_details(connector_id) ) connector.validate_syncable() if self.disable_schedule_on_trigger: self._log.info(f"Disabling Fivetran sync schedule for connector {connector_id}.") self.update_schedule_type_for_connector(connector_id, "manual") request_fn() self._log.info( f"Sync initialized for connector {connector_id}. View this sync in the Fivetran" " UI: " + connector.url ) def poll_sync( self, connector_id: str, previous_sync_completed_at: datetime, poll_interval: float = DEFAULT_POLL_INTERVAL, poll_timeout: Optional[float] = None, ) -> Mapping[str, Any]: """Given a Fivetran connector and the timestamp at which the previous sync completed, poll until the next sync completes. The previous sync completion time is necessary because the only way to tell when a sync completes is when this value changes. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. previous_sync_completed_at (datetime.datetime): The datetime of the previous completed sync (successful or otherwise) for this connector, prior to running this method. poll_interval (float): The time (in seconds) that will be waited between successive polls. poll_timeout (float): The maximum time that will wait before this operation is timed out. By default, this will never time out. Returns: Dict[str, Any]: Parsed json data representing the API response. """ poll_start = datetime.now() while True: connector_details = self.get_connector_details(connector_id) connector = FivetranConnector.from_connector_details( connector_details=connector_details ) self._log.info(f"Polled '{connector_id}'. Status: [{connector.sync_state}]") if connector.last_sync_completed_at > previous_sync_completed_at: break if poll_timeout and datetime.now() > poll_start + timedelta(seconds=poll_timeout): raise Failure( f"Sync for connector '{connector_id}' timed out after " f"{datetime.now() - poll_start}." ) # Sleep for the configured time interval before polling again. time.sleep(poll_interval) if not connector.is_last_sync_successful: raise Failure( f"Sync for connector '{connector_id}' failed!", metadata={ "connector_details": MetadataValue.json(connector_details), "log_url": MetadataValue.url(connector.url), }, ) return connector_details def sync_and_poll( self, connector_id: str, poll_interval: float = DEFAULT_POLL_INTERVAL, poll_timeout: Optional[float] = None, ) -> Optional[FivetranOutput]: """Initializes a sync operation for the given connector, and polls until it completes. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. poll_interval (float): The time (in seconds) that will be waited between successive polls. poll_timeout (float): The maximum time that will wait before this operation is timed out. By default, this will never time out. Returns: Optional[FivetranOutput]: Returns a :py:class:`~FivetranOutput` object containing details about the connector and the tables it synced. If the connector is not synced, None is returned. """ return self._sync_and_poll( sync_fn=self.start_sync, connector_id=connector_id, poll_interval=poll_interval, poll_timeout=poll_timeout, ) def resync_and_poll( self, connector_id: str, poll_interval: float = DEFAULT_POLL_INTERVAL, poll_timeout: Optional[float] = None, resync_parameters: Optional[Mapping[str, Sequence[str]]] = None, ) -> Optional[FivetranOutput]: """Initializes a historical resync operation for the given connector, and polls until it completes. Args: connector_id (str): The Fivetran Connector ID. You can retrieve this value from the "Setup" tab of a given connector in the Fivetran UI. resync_parameters (Dict[str, List[str]]): The payload to send to the Fivetran API. This should be a dictionary with schema names as the keys and a list of tables to resync as the values. poll_interval (float): The time (in seconds) that will be waited between successive polls. poll_timeout (float): The maximum time that will wait before this operation is timed out. By default, this will never time out. Returns: Optional[FivetranOutput]: Returns a :py:class:`~FivetranOutput` object containing details about the connector and the tables it synced. If the connector is not synced, None is returned. """ return self._sync_and_poll( sync_fn=partial(self.start_resync, resync_parameters=resync_parameters), connector_id=connector_id, poll_interval=poll_interval, poll_timeout=poll_timeout, ) def _sync_and_poll( self, sync_fn: Callable, connector_id: str, poll_interval: float = DEFAULT_POLL_INTERVAL, poll_timeout: Optional[float] = None, ) -> Optional[FivetranOutput]: schema_config_details = self.get_schema_config_for_connector(connector_id) connector = FivetranConnector.from_connector_details( connector_details=self.get_connector_details(connector_id) ) if connector.paused: self._log.warning( f"Cannot sync connector {connector.name} with ID {connector.id} because the connector is paused. " "Make sure the connector is enabled before syncing it." ) return None sync_fn(connector_id=connector_id) final_details = self.poll_sync( connector_id=connector_id, previous_sync_completed_at=connector.last_sync_completed_at, poll_interval=poll_interval, poll_timeout=poll_timeout, ) return FivetranOutput(connector_details=final_details, schema_config=schema_config_details)
FivetranClient
python
scrapy__scrapy
scrapy/http/response/text.py
{ "start": 10485, "end": 11212 }
class ____(ValueError): """ Raised when a URL cannot be obtained from a Selector """ def _url_from_selector(sel: parsel.Selector) -> str: if isinstance(sel.root, str): # e.g. ::attr(href) result return strip_html5_whitespace(sel.root) if not hasattr(sel.root, "tag"): raise _InvalidSelector(f"Unsupported selector: {sel}") if sel.root.tag not in ("a", "link"): raise _InvalidSelector( f"Only <a> and <link> elements are supported; got <{sel.root.tag}>" ) href = sel.root.get("href") if href is None: raise _InvalidSelector(f"<{sel.root.tag}> element has no href attribute: {sel}") return strip_html5_whitespace(href)
_InvalidSelector
python
gevent__gevent
src/gevent/events.py
{ "start": 5853, "end": 6316 }
class ____(object): """ The implementation of :class:`IPeriodicMonitorThreadStartedEvent`. .. versionchanged:: 24.11.1 Now actually implements the promised interface. """ #: The name of the setuptools entry point that is called when this #: event is emitted. ENTRY_POINT_NAME = 'gevent.plugins.hub.periodic_monitor_thread_started' def __init__(self, monitor): self.monitor = monitor
PeriodicMonitorThreadStartedEvent
python
django-haystack__django-haystack
test_haystack/elasticsearch5_tests/test_backend.py
{ "start": 6277, "end": 6719 }
class ____( indexes.SearchIndex, indexes.Indexable ): text = indexes.CharField(model_attr="foo", document=True) name = indexes.CharField(model_attr="author") pub_date = indexes.DateTimeField(model_attr="pub_date") text_auto = indexes.EdgeNgramField(model_attr="foo") name_auto = indexes.EdgeNgramField(model_attr="author") def get_model(self): return MockModel
Elasticsearch5AutocompleteMockModelSearchIndex
python
pytorch__pytorch
torch/_inductor/codegen/cpu_device_op_overrides.py
{ "start": 135, "end": 694 }
class ____(DeviceOpOverrides): def import_get_raw_stream_as(self, name: str) -> str: return dedent( """ def get_raw_stream(_): return 0 """ ) def cpp_kernel_type(self) -> str: return "void*" def set_device(self, device_idx: int) -> str: return "pass" def synchronize(self) -> str: return "pass" def device_guard(self, device_idx: int) -> str: return "pass" register_device_op_overrides("cpu", CpuDeviceOpOverrides())
CpuDeviceOpOverrides
python
pyca__cryptography
tests/hazmat/primitives/test_pkcs12.py
{ "start": 1399, "end": 10948 }
class ____: def _test_load_pkcs12_ec_keys(self, filename, password, backend): cert, key = _load_ca(backend) assert isinstance(key, ec.EllipticCurvePrivateKey) parsed_key, parsed_cert, parsed_more_certs = load_vectors_from_file( os.path.join("pkcs12", filename), lambda derfile: load_key_and_certificates( derfile.read(), password, backend ), mode="rb", ) assert isinstance(parsed_key, ec.EllipticCurvePrivateKey) assert parsed_cert == cert assert parsed_key.private_numbers() == key.private_numbers() assert parsed_more_certs == [] @pytest.mark.parametrize( ("filename", "password"), [ ("cert-key-aes256cbc.p12", b"cryptography"), ("cert-none-key-none.p12", b"cryptography"), ], ) def test_load_pkcs12_ec_keys(self, filename, password, backend): self._test_load_pkcs12_ec_keys(filename, password, backend) @pytest.mark.parametrize( ("filename", "password"), [ ("cert-rc2-key-3des.p12", b"cryptography"), ("no-password.p12", None), ], ) @pytest.mark.supported( only_if=lambda backend: backend.cipher_supported( RC2(b"0" * 16), CBC(b"0" * 8) ), skip_message="Does not support RC2", ) def test_load_pkcs12_ec_keys_rc2(self, filename, password, backend): if filename == "no-password.p12": ctx: typing.Any = pytest.warns(UserWarning) else: ctx = contextlib.nullcontext() with ctx: self._test_load_pkcs12_ec_keys(filename, password, backend) def test_load_key_and_cert_cert_only(self, backend): cert, _ = _load_ca(backend) parsed_key, parsed_cert, parsed_more_certs = load_vectors_from_file( os.path.join("pkcs12", "cert-aes256cbc-no-key.p12"), lambda data: load_key_and_certificates( data.read(), b"cryptography", backend ), mode="rb", ) assert parsed_cert is None assert parsed_key is None assert parsed_more_certs == [cert] def test_load_key_and_certificates_key_only(self, backend): _, key = _load_ca(backend) assert isinstance(key, ec.EllipticCurvePrivateKey) parsed_key, parsed_cert, parsed_more_certs = load_vectors_from_file( os.path.join("pkcs12", "no-cert-key-aes256cbc.p12"), lambda data: load_key_and_certificates( data.read(), b"cryptography", backend ), mode="rb", ) assert isinstance(parsed_key, ec.EllipticCurvePrivateKey) assert parsed_key.private_numbers() == key.private_numbers() assert parsed_cert is None assert parsed_more_certs == [] def test_load_pkcs12_key_only(self, backend): _, key = _load_ca(backend) assert isinstance(key, ec.EllipticCurvePrivateKey) p12 = load_vectors_from_file( os.path.join("pkcs12", "no-cert-key-aes256cbc.p12"), lambda data: load_pkcs12(data.read(), b"cryptography", backend), mode="rb", ) assert isinstance(p12.key, ec.EllipticCurvePrivateKey) assert p12.key.private_numbers() == key.private_numbers() assert p12.cert is None assert p12.additional_certs == [] def test_non_bytes(self, backend): with pytest.raises(TypeError): load_key_and_certificates( b"irrelevant", object(), # type: ignore[arg-type] backend, ) def test_not_a_pkcs12(self, backend): with pytest.raises(ValueError): load_key_and_certificates(b"invalid", b"pass", backend) def test_invalid_password(self, backend): with pytest.raises(ValueError): load_vectors_from_file( os.path.join("pkcs12", "cert-key-aes256cbc.p12"), lambda derfile: load_key_and_certificates( derfile.read(), b"invalid", backend ), mode="rb", ) def test_buffer_protocol(self, backend): p12 = load_vectors_from_file( os.path.join("pkcs12", "cert-key-aes256cbc.p12"), lambda derfile: derfile.read(), mode="rb", ) p12buffer = bytearray(p12) parsed_key, parsed_cert, parsed_more_certs = load_key_and_certificates( p12buffer, bytearray(b"cryptography"), backend ) assert parsed_key is not None assert parsed_cert is not None assert parsed_more_certs == [] @pytest.mark.parametrize( ("name", "name2", "name3", "filename", "password"), [ (None, None, None, "no-name-no-pwd.p12", None), (b"name", b"name2", b"name3", "name-all-no-pwd.p12", None), (b"name", None, None, "name-1-no-pwd.p12", None), (None, b"name2", b"name3", "name-2-3-no-pwd.p12", None), (None, b"name2", None, "name-2-no-pwd.p12", None), (None, None, b"name3", "name-3-no-pwd.p12", None), ( "☺".encode(), "ä".encode(), "ç".encode(), "name-unicode-no-pwd.p12", None, ), (None, None, None, "no-name-pwd.p12", b"password"), (b"name", b"name2", b"name3", "name-all-pwd.p12", b"password"), (b"name", None, None, "name-1-pwd.p12", b"password"), (None, b"name2", b"name3", "name-2-3-pwd.p12", b"password"), (None, b"name2", None, "name-2-pwd.p12", b"password"), (None, None, b"name3", "name-3-pwd.p12", b"password"), ( "☺".encode(), "ä".encode(), "ç".encode(), "name-unicode-pwd.p12", b"password", ), ], ) def test_load_object( self, filename, name, name2, name3, password, backend ): cert, key = _load_ca(backend) cert2 = _load_cert( backend, os.path.join("x509", "cryptography.io.pem") ) cert3 = _load_cert(backend, os.path.join("x509", "letsencryptx3.pem")) pkcs12 = load_vectors_from_file( os.path.join("pkcs12", filename), lambda derfile: load_pkcs12(derfile.read(), password, backend), mode="rb", ) assert pkcs12.cert is not None assert pkcs12.cert.certificate == cert assert pkcs12.cert.friendly_name == name assert isinstance(pkcs12.key, ec.EllipticCurvePrivateKey) assert pkcs12.key.private_numbers() == key.private_numbers() assert len(pkcs12.additional_certs) == 2 assert pkcs12.additional_certs[0].certificate == cert2 assert pkcs12.additional_certs[0].friendly_name == name2 assert pkcs12.additional_certs[1].certificate == cert3 assert pkcs12.additional_certs[1].friendly_name == name3 @pytest.mark.parametrize( ("name2", "name3", "filename", "password"), [ (None, None, "no-cert-no-name-no-pwd.p12", None), (b"name2", b"name3", "no-cert-name-all-no-pwd.p12", None), (b"name2", None, "no-cert-name-2-no-pwd.p12", None), (None, b"name3", "no-cert-name-3-no-pwd.p12", None), ( "☹".encode(), "ï".encode(), "no-cert-name-unicode-no-pwd.p12", None, ), (None, None, "no-cert-no-name-pwd.p12", b"password"), (b"name2", b"name3", "no-cert-name-all-pwd.p12", b"password"), (b"name2", None, "no-cert-name-2-pwd.p12", b"password"), (None, b"name3", "no-cert-name-3-pwd.p12", b"password"), ( "☹".encode(), "ï".encode(), "no-cert-name-unicode-pwd.p12", b"password", ), ], ) def test_load_object_no_cert_key( self, filename, name2, name3, password, backend ): cert2 = _load_cert( backend, os.path.join("x509", "cryptography.io.pem") ) cert3 = _load_cert(backend, os.path.join("x509", "letsencryptx3.pem")) pkcs12 = load_vectors_from_file( os.path.join("pkcs12", filename), lambda derfile: load_pkcs12(derfile.read(), password, backend), mode="rb", ) assert pkcs12.cert is None assert pkcs12.key is None assert len(pkcs12.additional_certs) == 2 assert pkcs12.additional_certs[0].certificate == cert2 assert pkcs12.additional_certs[0].friendly_name == name2 assert pkcs12.additional_certs[1].certificate == cert3 assert pkcs12.additional_certs[1].friendly_name == name3 def _load_cert(backend, path): return load_vectors_from_file( path, lambda pemfile: x509.load_pem_x509_certificate( pemfile.read(), backend ), mode="rb", ) def _load_ca(backend): cert = _load_cert(backend, os.path.join("pkcs12", "ca", "ca.pem")) key = load_vectors_from_file( os.path.join("pkcs12", "ca", "ca_key.pem"), lambda pemfile: load_pem_private_key(pemfile.read(), None, backend), mode="rb", ) return cert, key @pytest.mark.skip_fips( reason="PKCS12 unsupported in FIPS mode. So much bad crypto in it." )
TestPKCS12Loading
python
apache__airflow
providers/pinecone/tests/unit/pinecone/operators/test_pinecone.py
{ "start": 1420, "end": 3543 }
class ____: def test_vector_ingest_operator_execution(self, dummy_dag): """ Test the execution of the PineconeVectorIngestOperator. Ensures that the upsert method on the hook is correctly called. """ test_vectors = [("id1", [1.0, 2.0, 3.0], {"meta": "data"})] task = PineconeIngestOperator( task_id="ingest_vectors", index_name="test_index", input_vectors=test_vectors, dag=dummy_dag, ) with patch( "airflow.providers.pinecone.operators.pinecone.PineconeIngestOperator.hook", new_callable=MockPineconeHook, ) as mock_hook_instance: mock_hook_instance.upsert = Mock() task.execute(context={}) mock_hook_instance.upsert.assert_called_once_with( index_name="test_index", vectors=test_vectors, namespace="", batch_size=None, ) def test_vector_ingest_operator_with_extra_args(self, dummy_dag): """ Test the execution of the PineconeVectorIngestOperator with additional parameters. """ test_vectors = [("id1", [1.0, 2.0, 3.0], {"meta": "data"})] task = PineconeIngestOperator( task_id="ingest_vectors", index_name="test_index", input_vectors=test_vectors, namespace="test_namespace", batch_size=100, upsert_kwargs={"custom_param": "value"}, dag=dummy_dag, ) with patch( "airflow.providers.pinecone.operators.pinecone.PineconeIngestOperator.hook", new_callable=MockPineconeHook, ) as mock_hook_instance: mock_hook_instance.upsert = Mock() task.execute(context={}) mock_hook_instance.upsert.assert_called_once_with( index_name="test_index", vectors=test_vectors, namespace="test_namespace", batch_size=100, custom_param="value", )
TestPineconeVectorIngestOperator
python
dagster-io__dagster
python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_assets.py
{ "start": 121572, "end": 124970 }
class ____(ReadonlyGraphQLContextTestMatrix): def test_asset_wipe_read_only(self, graphql_context: WorkspaceRequestContext): """Test that asset wipe fails in read-only context.""" # First create a materialization define_asset_job("integers_asset_job", [integers_asset]).resolve( asset_graph=AssetGraph.from_assets([integers_asset]) ).execute_in_process( partition_key="0", instance=graphql_context.instance, ) asset_keys = graphql_context.instance.all_asset_keys() assert AssetKey("integers_asset") in asset_keys # Attempt to wipe should fail result = execute_dagster_graphql( graphql_context, WIPE_ASSETS, variables={"assetPartitionRanges": [{"assetKey": {"path": ["integers_asset"]}}]}, ) assert result.data assert result.data["wipeAssets"] assert result.data["wipeAssets"]["__typename"] == "UnauthorizedError" def test_asset_wipe_per_code_location_permissions( self, graphql_context: WorkspaceRequestContext ): """Test that asset wipe works when there are permissions in one code location but not others.""" location_name = main_repo_location_name() # Create a context with permissions in one location with define_out_of_process_context( os.path.join(os.path.dirname(__file__), "repo.py"), "test_repo", graphql_context.instance, read_only=True, read_only_locations={location_name: False}, # not read-only in this specific location ) as read_only_context: assert read_only_context.read_only # Should succeed for assets in the location with permissions result = execute_dagster_graphql( read_only_context, WIPE_ASSETS, variables={"assetPartitionRanges": [{"assetKey": {"path": ["integers_asset"]}}]}, ) assert result.data assert result.data["wipeAssets"] assert result.data["wipeAssets"]["__typename"] == "AssetWipeSuccess" # Should fail for assets not in any location result = execute_dagster_graphql( read_only_context, WIPE_ASSETS, variables={"assetPartitionRanges": [{"assetKey": {"path": ["doesnotexist"]}}]}, ) assert result.data assert result.data["wipeAssets"] assert result.data["wipeAssets"]["__typename"] == "UnauthorizedError" # unauthorized for assets in a different location with define_out_of_process_context( os.path.join(os.path.dirname(__file__), "repo.py"), "test_repo", graphql_context.instance, read_only=True, read_only_locations={"other_location_name": False}, ) as read_only_context: assert read_only_context.read_only result = execute_dagster_graphql( read_only_context, WIPE_ASSETS, variables={"assetPartitionRanges": [{"assetKey": {"path": ["integers_asset"]}}]}, ) assert result.data assert result.data["wipeAssets"] assert result.data["wipeAssets"]["__typename"] == "UnauthorizedError"
TestAssetWipeReadOnly
python
jupyterlab__jupyterlab
jupyterlab/extensions/manager.py
{ "start": 5814, "end": 6245 }
class ____: """Extension manager metadata. Attributes: name: Extension manager name to be displayed can_install: Whether the extension manager can un-/install packages (default False) install_path: Installation path for the extensions (default None); e.g. environment path """ name: str can_install: bool = False install_path: Optional[str] = None @dataclass
ExtensionManagerMetadata
python
simonw__datasette
datasette/utils/__init__.py
{ "start": 34298, "end": 35188 }
class ____(Exception): pass # Can replace with sqlite-utils when I add that dependency def find_spatialite(): for path in SPATIALITE_PATHS: if os.path.exists(path): return path raise SpatialiteNotFound async def initial_path_for_datasette(datasette): """Return suggested path for opening this Datasette, based on number of DBs and tables""" databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"]) if len(databases) == 1: db_name = next(iter(databases.keys())) path = datasette.urls.database(db_name) # Does this DB only have one table? db = next(iter(databases.values())) tables = await db.table_names() if len(tables) == 1: path = datasette.urls.table(db_name, tables[0]) else: path = datasette.urls.instance() return path
SpatialiteNotFound
python
walkccc__LeetCode
solutions/2706. Buy Two Chocolates/2706.py
{ "start": 0, "end": 337 }
class ____: def buyChoco(self, prices: list[int], money: int) -> int: min1 = math.inf min2 = math.inf for price in prices: if price <= min1: min2 = min1 min1 = price elif price < min2: min2 = price minCost = min1 + min2 return money if minCost > money else money - minCost
Solution
python
ZoranPandovski__al-go-rithms
greedy/huffman_coding/python/huff_comp.py
{ "start": 24, "end": 298 }
class ____: def __init__(self, char, freq): self.char = char self.freq = freq self.left = None self.right = None def __cmp__(self, other): if(other == None): return -1 if(not isinstance(other, HeapNode)): return -1 return self.freq > other.freq
HeapNode
python
django__django
tests/admin_views/tests.py
{ "start": 176059, "end": 193601 }
class ____(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username="super", password="secret", email="super@example.com" ) cls.s1 = Section.objects.create(name="Test section") cls.a1 = Article.objects.create( content="<p>Middle content</p>", date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, ) cls.a2 = Article.objects.create( content="<p>Oldest content</p>", date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1, ) cls.a3 = Article.objects.create( content="<p>Newest content</p>", date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1, ) cls.p1 = PrePopulatedPost.objects.create( title="A Long Title", published=True, slug="a-long-title" ) cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True) cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False) cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_inheritance(self): Podcast.objects.create( name="This Week in Django", release_date=datetime.date.today() ) response = self.client.get(reverse("admin:admin_views_podcast_changelist")) self.assertEqual(response.status_code, 200) def test_inheritance_2(self): Vodcast.objects.create(name="This Week in Django", released=True) response = self.client.get(reverse("admin:admin_views_vodcast_changelist")) self.assertEqual(response.status_code, 200) def test_custom_pk(self): Language.objects.create(iso="en", name="English", english_name="English") response = self.client.get(reverse("admin:admin_views_language_changelist")) self.assertEqual(response.status_code, 200) def test_changelist_input_html(self): response = self.client.get(reverse("admin:admin_views_person_changelist")) # 2 inputs per object(the field and the hidden id field) = 6 # 4 management hidden fields = 4 # 4 action inputs (3 regular checkboxes, 1 checkbox to select all) # main form submit button = 1 # search field and search submit button = 2 # CSRF field = 2 # field to track 'select all' across paginated views = 1 # 6 + 4 + 4 + 1 + 2 + 2 + 1 = 20 inputs self.assertContains(response, "<input", count=21) # 1 select per object = 3 selects self.assertContains(response, "<select", count=4) def test_post_messages(self): # Ticket 12707: Saving inline editable should not show admin # action warnings data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } response = self.client.post( reverse("admin:admin_views_person_changelist"), data, follow=True ) self.assertEqual(len(response.context["messages"]), 1) def test_post_submission(self): data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } self.client.post(reverse("admin:admin_views_person_changelist"), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) # test a filtered page data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "form-0-alive": "checked", "form-1-id": str(self.per3.pk), "form-1-gender": "1", "form-1-alive": "checked", "_save": "Save", } self.client.post( reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data ) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) # test a searched page data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "_save": "Save", } self.client.post( reverse("admin:admin_views_person_changelist") + "?q=john", data ) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) def test_non_field_errors(self): """ Non-field errors are displayed for each of the forms in the changelist's formset. """ fd1 = FoodDelivery.objects.create( reference="123", driver="bill", restaurant="thai" ) fd2 = FoodDelivery.objects.create( reference="456", driver="bill", restaurant="india" ) fd3 = FoodDelivery.objects.create( reference="789", driver="bill", restaurant="pizza" ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "pizza", "_save": "Save", } response = self.client.post( reverse("admin:admin_views_fooddelivery_changelist"), data ) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' "with this Driver and Restaurant already exists.</li></ul></td></tr>", 1, html=True, ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", # Same data also. "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "thai", "_save": "Save", } response = self.client.post( reverse("admin:admin_views_fooddelivery_changelist"), data ) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' "with this Driver and Restaurant already exists.</li></ul></td></tr>", 2, html=True, ) def test_non_form_errors(self): # test if non-form errors are handled; ticket #12716 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post( reverse("admin:admin_views_person_changelist"), data ) self.assertContains(response, "Grace is not a Zombie") def test_non_form_errors_is_errorlist(self): # test if non-form errors are correctly handled; ticket #12878 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", "_save": "Save", } response = self.client.post( reverse("admin:admin_views_person_changelist"), data ) non_form_errors = response.context["cl"].formset.non_form_errors() self.assertIsInstance(non_form_errors, ErrorList) self.assertEqual( str(non_form_errors), str(ErrorList(["Grace is not a Zombie"], error_class="nonform")), ) def test_list_editable_ordering(self): collector = Collector.objects.create(id=1, name="Frederick Clegg") Category.objects.create(id=1, order=1, collector=collector) Category.objects.create(id=2, order=2, collector=collector) Category.objects.create(id=3, order=0, collector=collector) Category.objects.create(id=4, order=0, collector=collector) # NB: The order values must be changed so that the items are reordered. data = { "form-TOTAL_FORMS": "4", "form-INITIAL_FORMS": "4", "form-MAX_NUM_FORMS": "0", "form-0-order": "14", "form-0-id": "1", "form-0-collector": "1", "form-1-order": "13", "form-1-id": "2", "form-1-collector": "1", "form-2-order": "1", "form-2-id": "3", "form-2-collector": "1", "form-3-order": "0", "form-3-id": "4", "form-3-collector": "1", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post( reverse("admin:admin_views_category_changelist"), data ) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) def test_list_editable_pagination(self): """ Pagination works for list_editable items. """ UnorderedObject.objects.create(id=1, name="Unordered object #1") UnorderedObject.objects.create(id=2, name="Unordered object #2") UnorderedObject.objects.create(id=3, name="Unordered object #3") response = self.client.get( reverse("admin:admin_views_unorderedobject_changelist") ) self.assertContains(response, "Unordered object #3") self.assertContains(response, "Unordered object #2") self.assertNotContains(response, "Unordered object #1") response = self.client.get( reverse("admin:admin_views_unorderedobject_changelist") + "?p=2" ) self.assertNotContains(response, "Unordered object #3") self.assertNotContains(response, "Unordered object #2") self.assertContains(response, "Unordered object #1") def test_list_editable_action_submit(self): # List editable changes should not be executed if the action "Go" # button is used to submit the form. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": "1", "form-1-gender": "2", "form-1-id": "2", "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": "3", "index": "0", "_selected_action": ["3"], "action": ["", "delete_selected"], } self.client.post(reverse("admin:admin_views_person_changelist"), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1) def test_list_editable_action_choices(self): # List editable changes should be executed if the "Save" button is # used to submit the form - any action choices should be ignored. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", "_selected_action": ["1"], "action": ["", "delete_selected"], } self.client.post(reverse("admin:admin_views_person_changelist"), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) def test_list_editable_popup(self): """ Fields should not be list-editable in popups. """ response = self.client.get(reverse("admin:admin_views_person_changelist")) self.assertNotEqual(response.context["cl"].list_editable, ()) response = self.client.get( reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR ) self.assertEqual(response.context["cl"].list_editable, ()) def test_pk_hidden_fields(self): """ hidden pk fields aren't displayed in the table body and their corresponding human-readable value is displayed instead. The hidden pk fields are displayed but separately (not in the table) and only once. """ story1 = Story.objects.create( title="The adventures of Guido", content="Once upon a time in Djangoland..." ) story2 = Story.objects.create( title="Crouching Tiger, Hidden Python", content="The Python was sneaking into...", ) response = self.client.get(reverse("admin:admin_views_story_changelist")) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n' "</div>" % (story2.id, story1.id), html=True, ) self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1) self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1) def test_pk_hidden_fields_with_list_display_links(self): """Similarly as test_pk_hidden_fields, but when the hidden pk fields are referenced in list_display_links. Refs #12475. """ story1 = OtherStory.objects.create( title="The adventures of Guido", content="Once upon a time in Djangoland...", ) story2 = OtherStory.objects.create( title="Crouching Tiger, Hidden Python", content="The Python was sneaking into...", ) link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,)) link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,)) response = self.client.get(reverse("admin:admin_views_otherstory_changelist")) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n' "</div>" % (story2.id, story1.id), html=True, ) self.assertContains( response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1, ) self.assertContains( response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1, ) @override_settings(ROOT_URLCONF="admin_views.urls")
AdminViewListEditable
python
palantir__python-language-server
versioneer.py
{ "start": 19477, "end": 52511 }
class ____(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.18) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")}
NotThisMethod
python
pennersr__django-allauth
allauth/account/views.py
{ "start": 47151, "end": 48330 }
class ____(_BaseVerifyPhoneView): @method_decorator(never_cache) def dispatch(self, request, *args, **kwargs): self.process = flows.phone_verification.ChangePhoneVerificationProcess.resume( request ) if not self.process: return HttpResponseRedirect(reverse("account_change_phone")) return super().dispatch(request, *args, **kwargs) def respond_process_succeeded(self, form): return HttpResponseRedirect(reverse("account_change_phone")) def respond_process_failed(self, form): return HttpResponseRedirect(reverse("account_change_phone")) def get_context_data(self, **kwargs): ret = super().get_context_data(**kwargs) ret.update({"cancel_url": reverse("account_change_phone")}) return ret @method_decorator(login_not_required, name="dispatch") def verify_phone(request): if request.user.is_authenticated: return _VerifyPhoneChangeView.as_view()(request) return _VerifyPhoneSignupView.as_view()(request) @method_decorator(login_required, name="dispatch") @method_decorator(rate_limit(action="change_phone"), name="dispatch")
_VerifyPhoneChangeView
python
readthedocs__readthedocs.org
readthedocs/integrations/models.py
{ "start": 14705, "end": 15408 }
class ____(Integration): integration_type_id = Integration.API_WEBHOOK has_sync = False class Meta: proxy = True def save(self, *args, **kwargs): """Ensure model has token data before saving.""" try: token = self.provider_data.get("token") except (AttributeError, TypeError): token = None finally: if token is None: token = default_token() self.provider_data = {"token": token} super().save(*args, **kwargs) @property def token(self): """Get or generate a secret token for authentication.""" return self.provider_data.get("token")
GenericAPIWebhook
python
milvus-io__pymilvus
pymilvus/client/grpc_handler.py
{ "start": 1884, "end": 3921 }
class ____: def __init__(self, conns: object, connection_name: str, kwargs: object) -> None: self.connection_name = connection_name self.conns = conns self._kwargs = kwargs self.is_idle_state = False self.reconnect_lock = threading.Lock() def reset_db_name(self, db_name: str): self._kwargs["db_name"] = db_name def check_state_and_reconnect_later(self): check_after_seconds = 3 logger.debug(f"state is idle, schedule reconnect in {check_after_seconds} seconds") time.sleep(check_after_seconds) if not self.is_idle_state: logger.debug("idle state changed, skip reconnect") return with self.reconnect_lock: logger.info("reconnect on idle state") self.is_idle_state = False try: logger.debug("try disconnecting old connection...") self.conns.disconnect(self.connection_name) except Exception: logger.warning("disconnect failed: {e}") finally: reconnected = False while not reconnected: try: logger.debug("try reconnecting...") self.conns.connect(self.connection_name, **self._kwargs) reconnected = True except Exception as e: logger.warning( f"reconnect failed: {e}, try again after {check_after_seconds} seconds" ) time.sleep(check_after_seconds) logger.info("reconnected") def reconnect_on_idle(self, state: object): logger.debug(f"state change to: {state}") with self.reconnect_lock: if state.value[1] != "idle": self.is_idle_state = False return self.is_idle_state = True threading.Thread(target=self.check_state_and_reconnect_later).start()
ReconnectHandler
python
pandas-dev__pandas
pandas/errors/__init__.py
{ "start": 29849, "end": 30312 }
class ____(OSError): """ Error is raised when executing SQL with bad syntax or SQL that throws an error. Raised by :func:`.pandas.read_sql` when a bad SQL statement is passed in. See Also -------- read_sql : Read SQL query or database table into a DataFrame. Examples -------- >>> from sqlite3 import connect >>> conn = connect(":memory:") >>> pd.read_sql("select * test", conn) # doctest: +SKIP """
DatabaseError
python
pypa__pip
src/pip/_internal/models/scheme.py
{ "start": 322, "end": 575 }
class ____: """A Scheme holds paths which are used as the base directories for artifacts associated with a Python package. """ __slots__ = SCHEME_KEYS platlib: str purelib: str headers: str scripts: str data: str
Scheme
python
tensorflow__tensorflow
tensorflow/python/autograph/operators/data_structures.py
{ "start": 7735, "end": 10001 }
class ____( collections.namedtuple('ListPopOpts', ('element_dtype', 'element_shape'))): pass def list_pop(list_, i, opts): """The list pop function. Note: it is unspecified where list_ will be mutated or not. If list_ is a TensorFlow entity, it will not be typically mutated. If list_ is a plain list, it will be. In general, if the list is mutated then the return value should point to the original entity. Args: list_: An entity that supports pop semantics. i: Optional index to pop from. May be None. opts: A ListPopOpts. Returns: Tuple (x, out_list_): out_list_: same as list_, after the removal was performed. x: the removed element value. Raises: ValueError: if list_ is not of a known list-like type or the operation is not supported for that type. """ assert isinstance(opts, ListPopOpts) if isinstance(list_, tensor_array_ops.TensorArray): raise ValueError('TensorArray does not support item removal') elif tensor_util.is_tf_type(list_): if list_.dtype == dtypes.variant: return _tf_tensor_list_pop(list_, i, opts) else: raise ValueError( 'tensor lists are expected to be Tensors with dtype=tf.variant,' ' instead found %s' % list_) else: return _py_list_pop(list_, i) def _tf_tensor_list_pop(list_, i, opts): """Overload of list_pop that stages a Tensor list pop.""" if i is not None: raise NotImplementedError('tensor lists only support removing from the end') if opts.element_dtype is None: raise ValueError('cannot pop from a list without knowing its element ' 'type; use set_element_type to annotate it') if opts.element_shape is None: raise ValueError('cannot pop from a list without knowing its element ' 'shape; use set_element_type to annotate it') list_out, x = list_ops.tensor_list_pop_back( list_, element_dtype=opts.element_dtype) x.set_shape(opts.element_shape) return list_out, x def _py_list_pop(list_, i): """Overload of list_pop that executes a Python list append.""" if i is None: x = list_.pop() else: x = list_.pop(i) return list_, x # TODO(mdan): Look into reducing duplication between all these containers.
ListPopOpts
python
dagster-io__dagster
python_modules/libraries/dagster-dg-cli/dagster_dg_cli/api_layer/api/run_event.py
{ "start": 387, "end": 2356 }
class ____: """API for run events operations.""" client: IGraphQLClient def get_events( self, run_id: str, event_type: Optional[str] = None, step_key: Optional[str] = None, limit: int = 100, after_cursor: Optional[str] = None, ) -> "RunEventList": """Get run events with filtering options.""" from dagster_dg_cli.api_layer.schemas.run_event import ( DgApiErrorInfo, DgApiRunEvent, RunEventLevel, RunEventList, ) events_data = get_run_events_via_graphql( self.client, run_id=run_id, limit=limit, after_cursor=after_cursor, event_type=event_type, step_key=step_key, ) # Helper function to convert error data to DgApiErrorInfo recursively def _convert_error_info(error_data: Optional[dict]) -> Optional[DgApiErrorInfo]: if not error_data: return None return DgApiErrorInfo( message=error_data.get("message", ""), className=error_data.get("className"), stack=error_data.get("stack"), cause=_convert_error_info(error_data.get("cause")), ) # Convert to Pydantic models events = [ DgApiRunEvent( run_id=e["runId"], message=e["message"], timestamp=e["timestamp"], level=RunEventLevel[e["level"]], step_key=e.get("stepKey"), event_type=e.get("eventType"), error=_convert_error_info(e.get("error")), ) for e in events_data["events"] ] return RunEventList( items=events, total=len(events), cursor=events_data.get("cursor"), has_more=events_data.get("hasMore", False), )
DgApiRunEventApi
python
getsentry__sentry
src/sentry/workflow_engine/endpoints/validators/error_detector.py
{ "start": 560, "end": 3012 }
class ____(BaseDetectorTypeValidator): fingerprinting_rules = serializers.CharField(required=False, allow_blank=True, allow_null=True) resolve_age = EmptyIntegerField( required=False, allow_null=True, help_text="Automatically resolve an issue if it hasn't been seen for this many hours. Set to `0` to disable auto-resolve.", ) def validate_type(self, value: str): type = super().validate_type(value) if type.slug != "error": raise serializers.ValidationError("Detector type must be error") return type def validate_condition_group(self, value): if value is not None: raise serializers.ValidationError( "Condition group is not supported for error detectors" ) return value def validate_fingerprinting_rules(self, value): if not value: return value try: FingerprintingConfig.from_config_string(value) except InvalidFingerprintingConfig as e: raise serializers.ValidationError(str(e)) return value def validate_resolve_age(self, value): if value is not None and value < 0: raise serializers.ValidationError("Resolve age must be a non-negative number") return value def create(self, validated_data): with transaction.atomic(router.db_for_write(Detector)): detector = Detector.objects.create( project_id=self.context["project"].id, name=validated_data["name"], # no workflow_condition_group type=validated_data["type"].slug, config={}, ) project: Project = detector.project # update configs, which are project options. continue using them for config in validated_data: if config in Detector.error_detector_project_options: project.update_option( Detector.error_detector_project_options[config], validated_data[config] ) create_audit_entry( request=self.context["request"], organization=self.context["organization"], target_object=detector.id, event=audit_log.get_event_id("DETECTOR_ADD"), data=detector.get_audit_log_data(), ) return detector
ErrorDetectorValidator
python
pydata__xarray
asv_bench/benchmarks/dataset.py
{ "start": 78, "end": 441 }
class ____: def setup(self): self.ds = Dataset( { "a": (("x", "y"), np.ones((300, 400))), "b": (("x", "y"), np.ones((300, 400))), } ) self.mean = self.ds.mean() self.std = self.ds.std() def time_normalize(self): (self.ds - self.mean) / self.std
DatasetBinaryOp
python
doocs__leetcode
solution/2500-2599/2535.Difference Between Element Sum and Digit Sum of an Array/Solution.py
{ "start": 0, "end": 226 }
class ____: def differenceOfSum(self, nums: List[int]) -> int: x = y = 0 for v in nums: x += v while v: y += v % 10 v //= 10 return x - y
Solution
python
kamyu104__LeetCode-Solutions
Python/transform-to-chessboard.py
{ "start": 145, "end": 933 }
class ____(object): def movesToChessboard(self, board): """ :type board: List[List[int]] :rtype: int """ N = len(board) result = 0 for count in (collections.Counter(map(tuple, board)), \ collections.Counter(itertools.izip(*board))): if len(count) != 2 or \ sorted(count.values()) != [N/2, (N+1)/2]: return -1 seq1, seq2 = count if any(x == y for x, y in itertools.izip(seq1, seq2)): return -1 begins = [int(seq1.count(1) * 2 > N)] if N%2 else [0, 1] result += min(sum(int(i%2 != v) for i, v in enumerate(seq1, begin)) \ for begin in begins) / 2 return result
Solution
python
charliermarsh__ruff
crates/ruff_linter/resources/test/fixtures/ruff/RUF023.py
{ "start": 4652, "end": 5150 }
class ____: __slots__ = ( "look", ( "a_veeeeeeeeeeeeeeeeeeery_long_parenthesized_item" ), ) __slots__ = ( "b", (( "c" )), "a" ) __slots__ = ("don't" "care" "about", "__slots__" "with", "concatenated" "strings") ############################################################ # Trailing-comma edge cases that should be flagged and fixed ############################################################
Klass5
python
spyder-ide__spyder
spyder/plugins/editor/utils/decoration.py
{ "start": 1191, "end": 6551 }
class ____(Manager, QObject): """ Manages the collection of TextDecoration that have been set on the editor widget. """ def __init__(self, editor): super().__init__(editor) self._decorations = {"misc": []} # Timer to not constantly update decorations. self.update_timer = QTimer(self) self.update_timer.setSingleShot(True) self.update_timer.setInterval(UPDATE_TIMEOUT) self.update_timer.timeout.connect( self._update) def add(self, decorations, key="misc"): """ Add text decorations on a CodeEditor instance. Don't add duplicated decorations, and order decorations according draw_order and the size of the selection. Args: decorations (sourcecode.api.TextDecoration) (could be a list) Returns: int: Amount of decorations added. """ if key != "misc" and self._decorations.get(key) is None: self._decorations[key] = [] current_decorations = self._decorations[key] added = 0 if isinstance(decorations, list): not_repeated = set(decorations) - set(current_decorations) current_decorations.extend(list(not_repeated)) self._decorations[key] = current_decorations added = len(not_repeated) elif decorations not in current_decorations: self._decorations[key].append(decorations) added = 1 if added > 0: self.update() return added def add_key(self, key, decorations): """Add decorations to key.""" self._decorations[key] = decorations self.update() def remove(self, decoration, key="misc"): """ Removes a text decoration from the editor. :param decoration: Text decoration to remove :type decoration: spyder.api.TextDecoration update: Bool: should the decorations be updated immediately? Set to False to avoid updating several times while removing several decorations """ try: self._decorations[key].remove(decoration) self.update() return True except (ValueError, KeyError): return False def remove_key(self, key): """Remove key""" try: del self._decorations[key] self.update() except KeyError: pass def get(self, key, default=None): """Get a key from decorations.""" return self._decorations.get(key, default) def clear(self): """Removes all text decoration from the editor.""" self._decorations = {"misc": []} self.update() def update(self): """ Update decorations. This starts a timer to update decorations only after UPDATE_TIMEOUT has passed. That avoids multiple calls to _update in a very short amount of time. """ self.update_timer.start() @Slot() def _update(self): """Update editor extra selections with added decorations. NOTE: Update TextDecorations to use editor font, using a different font family and point size could cause unwanted behaviors. """ editor = self.editor if editor is None: return try: font = editor.font() # Get the current visible block numbers first, last = editor.get_buffer_block_numbers() # Update visible decorations visible_decorations = [] for decoration in self._sorted_decorations(): need_update_sel = False cursor = decoration.cursor sel_start = cursor.selectionStart() # This is required to update extra selections from the point # an initial selection was made. # Fixes spyder-ide/spyder#14282 if sel_start is not None: doc = cursor.document() block_nb_start = doc.findBlock(sel_start).blockNumber() need_update_sel = first <= block_nb_start <= last block_nb = decoration.cursor.block().blockNumber() if (first <= block_nb <= last or need_update_sel or decoration.kind == 'current_cell'): visible_decorations.append(decoration) try: decoration.format.setFont( font, QTextCharFormat.FontPropertiesSpecifiedOnly) except (TypeError, AttributeError): # Qt < 5.3 decoration.format.setFontFamily(font.family()) decoration.format.setFontPointSize(font.pointSize()) editor.setExtraSelections(visible_decorations) except RuntimeError: # This is needed to fix spyder-ide/spyder#9173. return def __iter__(self): return iter(self._decorations) def __len__(self): return len(self._decorations) def _sorted_decorations(self): """Get all sorted decorations.""" return sorted( [v for key in self._decorations for v in self._decorations[key]], key=order_function )
TextDecorationsManager
python
jmcnamara__XlsxWriter
xlsxwriter/worksheet.py
{ "start": 7769, "end": 8038 }
class ____: """Type to hold user modified properties for a column.""" width: Optional[int] = None column_format: Optional["Format"] = None hidden: bool = False level: int = 0 collapsed: bool = False autofit: bool = False @dataclass
ColumnInfo
python
getsentry__sentry
tests/sentry/utils/test_function_cache.py
{ "start": 736, "end": 4352 }
class ____(TestCase): def assert_called_with_count(self, mock_test_func, text_search: str, count: int): assert ( len([ca for ca in mock_test_func.call_args_list if ca.args[0] == text_search]) == count ) def test(self) -> None: mock_test_func = create_autospec(count_func) mock_test_func.side_effect = count_func decorated_test_func = cache_func_for_models([(CacheModel, arg_extractor)])(mock_test_func) self.assert_called_with_count(mock_test_func, "test", 0) assert decorated_test_func("test") == 0 self.assert_called_with_count(mock_test_func, "test", 1) assert decorated_test_func("test") == 0 self.assert_called_with_count(mock_test_func, "test", 1) CacheModel.objects.create(some_field="test") # Since we're actively refetching the count should go to 2 here self.assert_called_with_count(mock_test_func, "test", 2) assert decorated_test_func("test") == 1 self.assert_called_with_count(mock_test_func, "test", 2) CacheModel.objects.create(some_field="test") self.assert_called_with_count(mock_test_func, "test", 3) assert decorated_test_func("test") == 2 self.assert_called_with_count(mock_test_func, "test", 3) CacheModel.objects.create(some_field="another_val") self.assert_called_with_count(mock_test_func, "test", 3) assert decorated_test_func("test") == 2 def test_no_recalculate(self) -> None: mock_test_func = create_autospec(count_func) mock_test_func.side_effect = count_func decorated_test_func = cache_func_for_models( [(CacheModel, arg_extractor)], recalculate=False )(mock_test_func) self.assert_called_with_count(mock_test_func, "test", 0) assert decorated_test_func("test") == 0 self.assert_called_with_count(mock_test_func, "test", 1) CacheModel.objects.create(some_field="test") # Since we're not actively refetching the count should remain the same here self.assert_called_with_count(mock_test_func, "test", 1) assert decorated_test_func("test") == 1 self.assert_called_with_count(mock_test_func, "test", 2) CacheModel.objects.create(some_field="test") self.assert_called_with_count(mock_test_func, "test", 2) assert decorated_test_func("test") == 2 self.assert_called_with_count(mock_test_func, "test", 3) CacheModel.objects.create(some_field="another_val") self.assert_called_with_count(mock_test_func, "test", 3) assert decorated_test_func("test") == 2 def test_batch(self) -> None: mock_test_func = create_autospec(count_func) mock_test_func.side_effect = count_func decorated_test_func = cache_func_for_models([(CacheModel, arg_extractor)])(mock_test_func) results = decorated_test_func.batch([("test1",), ("test2",), ("test3",)]) assert results == [0, 0, 0] assert mock_test_func.call_count == 3 results = decorated_test_func.batch([("test1",), ("test2",), ("test3",)]) assert results == [0, 0, 0] assert mock_test_func.call_count == 3 results = decorated_test_func.batch([("test1",), ("test4",), ("test3",)]) assert results == [0, 0, 0] assert mock_test_func.call_count == 4 CacheModel.objects.create(some_field="test1") results = decorated_test_func.batch([("test1",), ("test2",), ("test3",)]) assert results == [1, 0, 0] assert mock_test_func.call_count == 5
CacheFuncForModelsTest
python
pypa__warehouse
tests/functional/manage/test_views.py
{ "start": 4419, "end": 6942 }
class ____: @pytest.mark.usefixtures("_enable_organizations") def test_create_organization_application( self, pyramid_services, user_service, organization_service, db_request, monkeypatch, ): pyramid_services.register_service(user_service, IUserService, None) pyramid_services.register_service( organization_service, IOrganizationService, None ) user = UserFactory.create(name="old name") EmailFactory.create(primary=True, verified=True, public=True, user=user) db_request.user = user db_request.organization_access = True db_request.method = "POST" db_request.path = "/manage/organizations/" db_request.POST = MultiDict( { "name": "psf", "display_name": "Python Software Foundation", "orgtype": "Community", "link_url": "https://www.python.org/psf/", "description": ( "To promote, protect, and advance the Python programming " "language, and to support and facilitate the growth of a " "diverse and international community of Python programmers" ), "usage": ("We plan to host projects owned by the PSF"), "membership_size": "2-5", } ) db_request.registry.settings[ "warehouse.organizations.max_undecided_organization_applications" ] = 3 send_email = pretend.call_recorder(lambda *a, **kw: None) monkeypatch.setattr( org_views, "send_new_organization_requested_email", send_email ) org_views.ManageOrganizationsViews(db_request).create_organization_application() organization_application = ( organization_service.get_organization_applications_by_name( db_request.POST["name"] ) )[0] assert organization_application.name == db_request.POST["name"] assert organization_application.display_name == db_request.POST["display_name"] assert ( organization_application.orgtype == OrganizationType[db_request.POST["orgtype"]] ) assert organization_application.link_url == db_request.POST["link_url"] assert organization_application.description == db_request.POST["description"] assert organization_application.submitted_by == user
TestManageOrganizations
python
jmcnamara__XlsxWriter
xlsxwriter/test/comparison/test_properties01.py
{ "start": 315, "end": 1485 }
class ____(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.set_filename("properties01.xlsx") def test_create_file(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() workbook.set_properties( { "title": "This is an example spreadsheet", "subject": "With document properties", "author": "Someone", "manager": "Dr. Heinz Doofenshmirtz", "company": "of Wolves", "category": "Example spreadsheets", "keywords": "Sample, Example, Properties", "comments": "Created with Perl and Excel::Writer::XLSX", "status": "Quo", } ) worksheet.set_column("A:A", 70) worksheet.write( "A1", "Select 'Office Button -> Prepare -> Properties' to see the file properties.", ) workbook.close() self.assertExcelEqual()
TestCompareXLSXFiles
python
ray-project__ray
doc/source/ray-core/doc_code/limit_pending_tasks.py
{ "start": 70, "end": 879 }
class ____: async def heavy_compute(self): # taking a long time... # await asyncio.sleep(5) return actor = Actor.remote() NUM_TASKS = 1000 result_refs = [] # When NUM_TASKS is large enough, this will eventually OOM. for _ in range(NUM_TASKS): result_refs.append(actor.heavy_compute.remote()) ray.get(result_refs) # __without_backpressure_end__ # __with_backpressure_start__ MAX_NUM_PENDING_TASKS = 100 result_refs = [] for _ in range(NUM_TASKS): if len(result_refs) > MAX_NUM_PENDING_TASKS: # update result_refs to only # track the remaining tasks. ready_refs, result_refs = ray.wait(result_refs, num_returns=1) ray.get(ready_refs) result_refs.append(actor.heavy_compute.remote()) ray.get(result_refs) # __with_backpressure_end__
Actor
python
aio-libs__aiohttp
aiohttp/test_utils.py
{ "start": 2449, "end": 6111 }
class ____(ABC, Generic[_Request]): __test__ = False def __init__( self, *, scheme: str = "", host: str = "127.0.0.1", port: int | None = None, skip_url_asserts: bool = False, socket_factory: Callable[ [str, int, socket.AddressFamily], socket.socket ] = get_port_socket, **kwargs: Any, ) -> None: self.runner: BaseRunner[_Request] | None = None self._root: URL | None = None self.host = host self.port = port or 0 self._closed = False self.scheme = scheme self.skip_url_asserts = skip_url_asserts self.socket_factory = socket_factory async def start_server(self, **kwargs: Any) -> None: if self.runner: return self._ssl = kwargs.pop("ssl", None) self.runner = await self._make_runner(handler_cancellation=True, **kwargs) await self.runner.setup() absolute_host = self.host try: version = ipaddress.ip_address(self.host).version except ValueError: version = 4 if version == 6: absolute_host = f"[{self.host}]" family = socket.AF_INET6 if version == 6 else socket.AF_INET _sock = self.socket_factory(self.host, self.port, family) self.host, self.port = _sock.getsockname()[:2] site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) await site.start() server = site._server assert server is not None sockets = server.sockets assert sockets is not None self.port = sockets[0].getsockname()[1] if not self.scheme: self.scheme = "https" if self._ssl else "http" self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") @abstractmethod async def _make_runner(self, **kwargs: Any) -> BaseRunner[_Request]: """Return a new runner for the server.""" # TODO(PY311): Use Unpack to specify Server kwargs. def make_url(self, path: StrOrURL) -> URL: assert self._root is not None url = URL(path) if not self.skip_url_asserts: assert not url.absolute return self._root.join(url) else: return URL(str(self._root) + str(path)) @property def started(self) -> bool: return self.runner is not None @property def closed(self) -> bool: return self._closed @property def handler(self) -> Server[_Request]: # for backward compatibility # web.Server instance runner = self.runner assert runner is not None assert runner.server is not None return runner.server async def close(self) -> None: """Close all fixtures created by the test client. After that point, the TestClient is no longer usable. This is an idempotent function: running close multiple times will not have any additional effects. close is also run when the object is garbage collected, and on exit when used as a context manager. """ if self.started and not self.closed: assert self.runner is not None await self.runner.cleanup() self._root = None self.port = 0 self._closed = True async def __aenter__(self) -> Self: await self.start_server() return self async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, ) -> None: await self.close()
BaseTestServer
python
PrefectHQ__prefect
src/prefect/settings/models/tasks.py
{ "start": 268, "end": 993 }
class ____(PrefectBaseSettings): model_config: ClassVar[SettingsConfigDict] = build_settings_config( ("tasks", "runner") ) thread_pool_max_workers: Optional[int] = Field( default=None, gt=0, description="The maximum number of workers for ThreadPoolTaskRunner.", validation_alias=AliasChoices( AliasPath("thread_pool_max_workers"), "prefect_tasks_runner_thread_pool_max_workers", "prefect_task_runner_thread_pool_max_workers", ), ) process_pool_max_workers: Optional[int] = Field( default=None, gt=0, description="The maximum number of workers for ProcessPoolTaskRunner.", )
TasksRunnerSettings
python
pypa__pip
tests/unit/test_models_wheel.py
{ "start": 215, "end": 9906 }
class ____: def test_std_wheel_pattern(self) -> None: w = Wheel("simple-1.1.1-py2-none-any.whl") assert w.name == "simple" assert w.version == "1.1.1" assert w.build_tag == () assert w.file_tags == frozenset( [Tag(interpreter="py2", abi="none", platform="any")] ) def test_wheel_pattern_multi_values(self) -> None: w = Wheel("simple-1.1-py2.py3-abi1.abi2-any.whl") assert w.name == "simple" assert w.version == "1.1" assert w.build_tag == () assert w.file_tags == frozenset( [ Tag(interpreter="py2", abi="abi1", platform="any"), Tag(interpreter="py2", abi="abi2", platform="any"), Tag(interpreter="py3", abi="abi1", platform="any"), Tag(interpreter="py3", abi="abi2", platform="any"), ] ) def test_wheel_with_build_tag(self) -> None: # pip doesn't do anything with build tags, but theoretically, we might # see one, in this case the build tag = '4' w = Wheel("simple-1.1-4-py2-none-any.whl") assert w.name == "simple" assert w.version == "1.1" assert w.build_tag == (4, "") assert w.file_tags == frozenset( [Tag(interpreter="py2", abi="none", platform="any")] ) def test_single_digit_version(self) -> None: w = Wheel("simple-1-py2-none-any.whl") assert w.version == "1" def test_non_pep440_version(self) -> None: with pytest.raises(InvalidWheelFilename): Wheel("simple-_invalid_-py2-none-any.whl") def test_missing_version_raises(self) -> None: with pytest.raises(InvalidWheelFilename): Wheel("Cython-cp27-none-linux_x86_64.whl") def test_invalid_filename_raises(self) -> None: with pytest.raises(InvalidWheelFilename): Wheel("invalid.whl") def test_supported_single_version(self) -> None: """ Test single-version wheel is known to be supported """ w = Wheel("simple-0.1-py2-none-any.whl") assert w.supported(tags=[Tag("py2", "none", "any")]) def test_supported_multi_version(self) -> None: """ Test multi-version wheel is known to be supported """ w = Wheel("simple-0.1-py2.py3-none-any.whl") assert w.supported(tags=[Tag("py3", "none", "any")]) def test_not_supported_version(self) -> None: """ Test unsupported wheel is known to be unsupported """ w = Wheel("simple-0.1-py2-none-any.whl") assert not w.supported(tags=[Tag("py1", "none", "any")]) def test_supported_osx_version(self) -> None: """ Wheels built for macOS 10.6 are supported on 10.9 """ tags = compatibility_tags.get_supported( "27", platforms=["macosx_10_9_intel"], impl="cp" ) w = Wheel("simple-0.1-cp27-none-macosx_10_6_intel.whl") assert w.supported(tags=tags) w = Wheel("simple-0.1-cp27-none-macosx_10_9_intel.whl") assert w.supported(tags=tags) def test_not_supported_osx_version(self) -> None: """ Wheels built for macOS 10.9 are not supported on 10.6 """ tags = compatibility_tags.get_supported( "27", platforms=["macosx_10_6_intel"], impl="cp" ) w = Wheel("simple-0.1-cp27-none-macosx_10_9_intel.whl") assert not w.supported(tags=tags) def test_supported_multiarch_darwin(self) -> None: """ Multi-arch wheels (intel) are supported on components (i386, x86_64) """ universal = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_universal"], impl="cp" ) intel = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_intel"], impl="cp" ) x64 = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_x86_64"], impl="cp" ) i386 = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_i386"], impl="cp" ) ppc = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_ppc"], impl="cp" ) ppc64 = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_ppc64"], impl="cp" ) w = Wheel("simple-0.1-cp27-none-macosx_10_5_intel.whl") assert w.supported(tags=intel) assert w.supported(tags=x64) assert w.supported(tags=i386) assert not w.supported(tags=universal) assert not w.supported(tags=ppc) assert not w.supported(tags=ppc64) w = Wheel("simple-0.1-cp27-none-macosx_10_5_universal.whl") assert w.supported(tags=universal) assert w.supported(tags=intel) assert w.supported(tags=x64) assert w.supported(tags=i386) assert w.supported(tags=ppc) assert w.supported(tags=ppc64) def test_not_supported_multiarch_darwin(self) -> None: """ Single-arch wheels (x86_64) are not supported on multi-arch (intel) """ universal = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_universal"], impl="cp" ) intel = compatibility_tags.get_supported( "27", platforms=["macosx_10_5_intel"], impl="cp" ) w = Wheel("simple-0.1-cp27-none-macosx_10_5_i386.whl") assert not w.supported(tags=intel) assert not w.supported(tags=universal) w = Wheel("simple-0.1-cp27-none-macosx_10_5_x86_64.whl") assert not w.supported(tags=intel) assert not w.supported(tags=universal) def test_supported_ios_version(self) -> None: """ Wheels build for iOS 12.3 are supported on iOS 15.1 """ tags = compatibility_tags.get_supported( "313", platforms=["ios_15_1_arm64_iphoneos"], impl="cp" ) w = Wheel("simple-0.1-cp313-none-ios_12_3_arm64_iphoneos.whl") assert w.supported(tags=tags) w = Wheel("simple-0.1-cp313-none-ios_15_1_arm64_iphoneos.whl") assert w.supported(tags=tags) def test_not_supported_ios_version(self) -> None: """ Wheels built for macOS 15.1 are not supported on 12.3 """ tags = compatibility_tags.get_supported( "313", platforms=["ios_12_3_arm64_iphoneos"], impl="cp" ) w = Wheel("simple-0.1-cp313-none-ios_15_1_arm64_iphoneos.whl") assert not w.supported(tags=tags) def test_android(self) -> None: arm_old = compatibility_tags.get_supported( "313", platforms=["android_21_arm64_v8a"], impl="cp" ) arm_new = compatibility_tags.get_supported( "313", platforms=["android_30_arm64_v8a"], impl="cp" ) x86_old = compatibility_tags.get_supported( "313", platforms=["android_21_x86_64"], impl="cp" ) x86_new = compatibility_tags.get_supported( "313", platforms=["android_30_x86_64"], impl="cp" ) w = Wheel("simple-0.1-cp313-none-android_21_arm64_v8a.whl") assert w.supported(arm_old) assert w.supported(arm_new) assert not w.supported(x86_old) assert not w.supported(x86_new) w = Wheel("simple-0.1-cp313-none-android_22_arm64_v8a.whl") assert not w.supported(arm_old) assert w.supported(arm_new) assert not w.supported(x86_old) assert not w.supported(x86_new) w = Wheel("simple-0.1-cp313-none-android_31_arm64_v8a.whl") assert not w.supported(arm_old) assert not w.supported(arm_new) assert not w.supported(x86_old) assert not w.supported(x86_new) w = Wheel("simple-0.1-cp313-none-android_20_x86_64.whl") assert not w.supported(arm_old) assert not w.supported(arm_new) assert w.supported(x86_old) assert w.supported(x86_new) w = Wheel("simple-0.1-cp313-none-android_30_x86_64.whl") assert not w.supported(arm_old) assert not w.supported(arm_new) assert not w.supported(x86_old) assert w.supported(x86_new) w = Wheel("simple-0.1-cp313-none-android_31_x86_64.whl") assert not w.supported(arm_old) assert not w.supported(arm_new) assert not w.supported(x86_old) assert not w.supported(x86_new) def test_support_index_min(self) -> None: """ Test results from `support_index_min` """ tags = [ Tag("py2", "none", "TEST"), Tag("py2", "TEST", "any"), Tag("py2", "none", "any"), ] w = Wheel("simple-0.1-py2-none-any.whl") assert w.support_index_min(tags=tags) == 2 w = Wheel("simple-0.1-py2-none-TEST.whl") assert w.support_index_min(tags=tags) == 0 def test_support_index_min__none_supported(self) -> None: """ Test a wheel not supported by the given tags. """ w = Wheel("simple-0.1-py2-none-any.whl") with pytest.raises(ValueError): w.support_index_min(tags=[]) def test_version_underscore_conversion(self) -> None: """ Test that underscore versions are now invalid (no longer converted) """ with pytest.raises(InvalidWheelFilename): Wheel("simple-0.1_1-py2-none-any.whl") def test_invalid_wheel_raises(self) -> None: """ Test that wheel with invalid name now raises exception """ with pytest.raises(InvalidWheelFilename): Wheel("six-1.16.0_build1-py3-none-any.whl")
TestWheelFile
python
pypa__warehouse
tests/unit/admin/views/test_ipaddresses.py
{ "start": 1270, "end": 2446 }
class ____: def test_no_ip_address(self, db_request): db_request.matchdict["ip_address"] = None with pytest.raises(HTTPBadRequest): ip_views.ip_address_detail(db_request) def test_ip_address_not_found(self, db_request): db_request.matchdict["ip_address"] = "69.69.69.69" with pytest.raises(HTTPBadRequest): ip_views.ip_address_detail(db_request) def test_ip_address_found_no_unique_logins(self, db_request): ip_address = IpAddressFactory() db_request.matchdict["ip_address"] = str(ip_address.ip_address) result = ip_views.ip_address_detail(db_request) assert result == {"ip_address": ip_address, "unique_logins": []} def test_ip_address_found_with_unique_logins(self, db_request): ip_address = IpAddressFactory() unique_login = UserUniqueLoginFactory.create( ip_address=str(ip_address.ip_address) ) db_request.matchdict["ip_address"] = str(ip_address.ip_address) result = ip_views.ip_address_detail(db_request) assert result == {"ip_address": ip_address, "unique_logins": [unique_login]}
TestIpAddressDetail
python
airbytehq__airbyte
airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_manifest_only/declarative_component_schema.py
{ "start": 43571, "end": 50686 }
class ____(BaseModel): type: Literal["DatetimeBasedCursor"] cursor_field: str = Field( ..., description="The location of the value on a record that will be used as a bookmark during sync. To ensure no data loss, the API must return records in ascending order based on the cursor field. Nested fields are not supported, so the field must be at the top level of the record. You can use a combination of Add Field and Remove Field transformations to move the nested field to the top.", examples=["created_at", "{{ config['record_cursor'] }}"], title="Cursor Field", ) datetime_format: str = Field( ..., description="The datetime format used to format the datetime values that are sent in outgoing requests to the API. Use placeholders starting with \"%\" to describe the format the API is using. The following placeholders are available:\n * **%s**: Epoch unix timestamp - `1686218963`\n * **%s_as_float**: Epoch unix timestamp in seconds as float with microsecond precision - `1686218963.123456`\n * **%ms**: Epoch unix timestamp (milliseconds) - `1686218963123`\n * **%a**: Weekday (abbreviated) - `Sun`\n * **%A**: Weekday (full) - `Sunday`\n * **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)\n * **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`\n * **%b**: Month (abbreviated) - `Jan`\n * **%B**: Month (full) - `January`\n * **%m**: Month (zero-padded) - `01`, `02`, ..., `12`\n * **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`\n * **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`\n * **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`\n * **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`\n * **%p**: AM/PM indicator\n * **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`\n * **%S**: Second (zero-padded) - `00`, `01`, ..., `59`\n * **%f**: Microsecond (zero-padded to 6 digits) - `000000`\n * **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`\n * **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`\n * **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`\n * **%U**: Week number of the year (starting Sunday) - `00`, ..., `53`\n * **%W**: Week number of the year (starting Monday) - `00`, ..., `53`\n * **%c**: Date and time - `Tue Aug 16 21:30:00 1988`\n * **%x**: Date standard format - `08/16/1988`\n * **%X**: Time standard format - `21:30:00`\n * **%%**: Literal '%' character\n\n Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).\n", examples=["%Y-%m-%dT%H:%M:%S.%f%z", "%Y-%m-%d", "%s", "%ms", "%s_as_float"], title="Outgoing Datetime Format", ) start_datetime: Union[str, MinMaxDatetime] = Field( ..., description="The datetime that determines the earliest record that should be synced.", examples=["2020-01-1T00:00:00Z", "{{ config['start_time'] }}"], title="Start Datetime", ) cursor_datetime_formats: Optional[List[str]] = Field( None, description="The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the `datetime_format` will be used.", title="Cursor Datetime Formats", ) cursor_granularity: Optional[str] = Field( None, description="Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should be P1D, for %Y-%m-%dT%H:%M:%SZ the granularity should be PT1S. Given this field is provided, `step` needs to be provided as well.", examples=["PT1S"], title="Cursor Granularity", ) end_datetime: Optional[Union[str, MinMaxDatetime]] = Field( None, description="The datetime that determines the last record that should be synced. If not provided, `{{ now_utc() }}` will be used.", examples=["2021-01-1T00:00:00Z", "{{ now_utc() }}", "{{ day_delta(-1) }}"], title="End Datetime", ) end_time_option: Optional[RequestOption] = Field( None, description="Optionally configures how the end datetime will be sent in requests to the source API.", title="Inject End Time Into Outgoing HTTP Request", ) is_data_feed: Optional[bool] = Field( None, description="A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.", title="Whether the target API is formatted as a data feed", ) is_client_side_incremental: Optional[bool] = Field( None, description="If the target API endpoint does not take cursor values to filter records and returns all records anyway, the connector with this cursor will filter out records locally, and only emit new records from the last sync, hence incremental. This means that all records would be read from the API, but only new records will be emitted to the destination.", title="Whether the target API does not support filtering and returns all data (the cursor filters records in the client instead of the API side)", ) is_compare_strictly: Optional[bool] = Field( False, description="Set to True if the target API does not accept queries where the start time equal the end time.", title="Whether to skip requests if the start time equals the end time", ) lookback_window: Optional[str] = Field( None, description="Time interval before the start_datetime to read data for, e.g. P1M for looking back one month.", examples=["P1D", "P{{ config['lookback_days'] }}D"], title="Lookback Window", ) partition_field_end: Optional[str] = Field( None, description="Name of the partition start time field.", examples=["ending_time"], title="Partition Field End", ) partition_field_start: Optional[str] = Field( None, description="Name of the partition end time field.", examples=["starting_time"], title="Partition Field Start", ) start_time_option: Optional[RequestOption] = Field( None, description="Optionally configures how the start datetime will be sent in requests to the source API.", title="Inject Start Time Into Outgoing HTTP Request", ) step: Optional[str] = Field( None, description="The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well.", examples=["P1W", "{{ config['step_increment'] }}"], title="Step", ) parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
DatetimeBasedCursor
python
falconry__falcon
falcon/errors.py
{ "start": 3947, "end": 4054 }
class ____(TypeError): """The WebSocket message payload was not of the expected type."""
PayloadTypeError
python
scikit-learn__scikit-learn
sklearn/neural_network/_stochastic_optimizers.py
{ "start": 148, "end": 1971 }
class ____: """Base (Stochastic) gradient descent optimizer Parameters ---------- learning_rate_init : float, default=0.1 The initial learning rate used. It controls the step-size in updating the weights Attributes ---------- learning_rate : float the current learning rate """ def __init__(self, learning_rate_init=0.1): self.learning_rate_init = learning_rate_init self.learning_rate = float(learning_rate_init) def update_params(self, params, grads): """Update parameters with given gradients Parameters ---------- params : list of length = len(coefs_) + len(intercepts_) The concatenated list containing coefs_ and intercepts_ in MLP model. Used for initializing velocities and updating params grads : list of length = len(params) Containing gradients with respect to coefs_ and intercepts_ in MLP model. So length should be aligned with params """ updates = self._get_updates(grads) for param, update in zip((p for p in params), updates): param += update def iteration_ends(self, time_step): """Perform update to learning rate and potentially other states at the end of an iteration """ pass def trigger_stopping(self, msg, verbose): """Decides whether it is time to stop training Parameters ---------- msg : str Message passed in for verbose output verbose : bool Print message to stdin if True Returns ------- is_stopping : bool True if training needs to stop """ if verbose: print(msg + " Stopping.") return True
BaseOptimizer
python
Netflix__metaflow
metaflow/client/core.py
{ "start": 70581, "end": 80536 }
class ____(MetaflowObject): """ A `Run` represents an execution of a `Flow`. It is a container of `Step`s. Attributes ---------- data : MetaflowData a shortcut to run['end'].task.data, i.e. data produced by this run. successful : bool True if the run completed successfully. finished : bool True if the run completed. finished_at : datetime Time this run finished. code : MetaflowCode Code package for this run (if present). See `MetaflowCode`. trigger : MetaflowTrigger Information about event(s) that triggered this run (if present). See `MetaflowTrigger`. end_task : Task `Task` for the end step (if it is present already). """ _NAME = "run" _PARENT_CLASS = "flow" _CHILD_CLASS = "step" def _iter_filter(self, x): # exclude _parameters step return x.id[0] != "_" def steps(self, *tags: str) -> Iterator[Step]: """ [Legacy function - do not use] Returns an iterator over all `Step` objects in the step. This is an alias to iterating the object itself, i.e. ``` list(Run(...)) == list(Run(...).steps()) ``` Parameters ---------- tags : str No op (legacy functionality) Yields ------ Step `Step` objects in this run. """ return self._filtered_children(*tags) @property def code(self) -> Optional[MetaflowCode]: """ Returns the MetaflowCode object for this run, if present. Code is packed if atleast one `Step` runs remotely, else None is returned. Returns ------- MetaflowCode, optional Code package for this run """ # Note that this can be quite slow in the edge-case where the codepackage is only available # for the last step on the list. Steps are reverse-ordered, so the worst-case scenario is # if the start step executes remotely and every step after that is remote. # # TODO: A more optimized way of figuring out if a run has remote steps (and thus a codepackage) available. # This might require changes to the metadata-service as well. for step in self: if step.task: code = step.task.code if code: return code @property def data(self) -> Optional[MetaflowData]: """ Returns a container of data artifacts produced by this run. You can access data produced by this run as follows: ``` print(run.data.my_var) ``` This is a shorthand for `run['end'].task.data`. If the 'end' step has not yet executed, returns None. Returns ------- MetaflowData, optional Container of all artifacts produced by this task """ end = self.end_task if end: return end.data @property def successful(self) -> bool: """ Indicates whether or not the run completed successfully. A run is successful if its 'end' step is successful. Returns ------- bool True if the run completed successfully and False otherwise """ end = self.end_task if end: return end.successful else: return False @property def finished(self) -> bool: """ Indicates whether or not the run completed. A run completed if its 'end' step completed. Returns ------- bool True if the run completed and False otherwise """ end = self.end_task if end: return end.finished else: return False @property def finished_at(self) -> Optional[datetime]: """ Returns the datetime object of when the run finished (successfully or not). The completion time of a run is the same as the completion time of its 'end' step. If the 'end' step has not completed, returns None. Returns ------- datetime, optional Datetime of when the run finished """ end = self.end_task if end: return end.finished_at @property def end_task(self) -> Optional[Task]: """ Returns the Task corresponding to the 'end' step. This returns None if the end step does not yet exist. Returns ------- Task, optional The 'end' task """ try: end_step = self["end"] except KeyError: return None return end_step.task def add_tag(self, tag: str): """ Add a tag to this `Run`. Note that if the tag is already a system tag, it is not added as a user tag, and no error is thrown. Parameters ---------- tag : str Tag to add. """ # For backwards compatibility with Netflix's early version of this functionality, # this function shall accept both an individual tag AND iterables of tags. # # Iterable of tags support shall be removed in future once existing # usage has been migrated off. if is_stringish(tag): tag = [tag] return self.replace_tag([], tag) def add_tags(self, tags: Iterable[str]): """ Add one or more tags to this `Run`. Note that if any tag is already a system tag, it is not added as a user tag and no error is thrown. Parameters ---------- tags : Iterable[str] Tags to add. """ return self.replace_tag([], tags) def remove_tag(self, tag: str): """ Remove one tag from this `Run`. Removing a system tag is an error. Removing a non-existent user tag is a no-op. Parameters ---------- tag : str Tag to remove. """ # For backwards compatibility with Netflix's early version of this functionality, # this function shall accept both an individual tag AND iterables of tags. # # Iterable of tags support shall be removed in future once existing # usage has been migrated off. if is_stringish(tag): tag = [tag] return self.replace_tag(tag, []) def remove_tags(self, tags: Iterable[str]): """ Remove one or more tags to this `Run`. Removing a system tag will result in an error. Removing a non-existent user tag is a no-op. Parameters ---------- tags : Iterable[str] Tags to remove. """ return self.replace_tags(tags, []) def replace_tag(self, tag_to_remove: str, tag_to_add: str): """ Remove a tag and add a tag atomically. Removal is done first. The rules for `Run.add_tag` and `Run.remove_tag` also apply here. Parameters ---------- tag_to_remove : str Tag to remove. tag_to_add : str Tag to add. """ # For backwards compatibility with Netflix's early version of this functionality, # this function shall accept both individual tags AND iterables of tags. # # Iterable of tags support shall be removed in future once existing # usage has been migrated off. if is_stringish(tag_to_remove): tag_to_remove = [tag_to_remove] if is_stringish(tag_to_add): tag_to_add = [tag_to_add] return self.replace_tags(tag_to_remove, tag_to_add) def replace_tags(self, tags_to_remove: Iterable[str], tags_to_add: Iterable[str]): """ Remove and add tags atomically; the removal is done first. The rules for `Run.add_tag` and `Run.remove_tag` also apply here. Parameters ---------- tags_to_remove : Iterable[str] Tags to remove. tags_to_add : Iterable[str] Tags to add. """ flow_id = self.path_components[0] final_user_tags = self._metaflow.metadata.mutate_user_tags_for_run( flow_id, self.id, tags_to_remove=tags_to_remove, tags_to_add=tags_to_add ) # refresh Run object with the latest tags self._user_tags = frozenset(final_user_tags) self._tags = frozenset([*self._user_tags, *self._system_tags]) def __iter__(self) -> Iterator[Step]: """ Iterate over all children Step of this Run Yields ------ Step A Step in this Run """ for s in super(Run, self).__iter__(): yield s def __getitem__(self, name: str) -> Step: """ Returns the Step object with the step name 'name' Parameters ---------- name : str Step name Returns ------- Step Step for this step name in this Run Raises ------ KeyError If the name does not identify a valid Step object """ return super(Run, self).__getitem__(name) def __getstate__(self): return super(Run, self).__getstate__() def __setstate__(self, state): super(Run, self).__setstate__(state) @property def trigger(self) -> Optional[Trigger]: """ Returns a container of events that triggered this run. This returns None if the run was not triggered by any events. Returns ------- Trigger, optional Container of triggering events """ if "start" in self and self["start"].task: meta = self["start"].task.metadata_dict.get("execution-triggers") if meta: return Trigger(json.loads(meta)) return None
Run
python
apache__airflow
providers/amazon/src/airflow/providers/amazon/aws/operators/ec2.py
{ "start": 16132, "end": 19700 }
class ____(AwsBaseOperator[EC2Hook]): """ Hibernate Amazon EC2 instances. .. seealso:: For more information on how to use this operator, take a look at the guide: :ref:`howto/operator:EC2HibernateInstanceOperator` :param instance_ids: ID of the instance(s) to be hibernated. :param aws_conn_id: The Airflow connection used for AWS credentials. If this is ``None`` or empty then the default boto3 behaviour is used. If running Airflow in a distributed manner and aws_conn_id is None or empty, then default boto3 configuration would be used (and must be maintained on each worker node). :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used. :param verify: Whether or not to verify SSL certificates. See: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html :param poll_interval: Number of seconds to wait before attempting to check state of instance. Only used if wait_for_completion is True. Default is 20. :param max_attempts: Maximum number of attempts when checking state of instance. Only used if wait_for_completion is True. Default is 20. :param wait_for_completion: If True, the operator will wait for the instance to be in the `stopped` state before returning. """ aws_hook_class = EC2Hook operator_extra_links = (EC2InstanceDashboardLink(),) template_fields: Sequence[str] = aws_template_fields("instance_ids", "region_name") ui_color = "#eeaa11" ui_fgcolor = "#ffffff" def __init__( self, *, instance_ids: str | list[str], poll_interval: int = 20, max_attempts: int = 20, wait_for_completion: bool = False, **kwargs, ): super().__init__(**kwargs) self.instance_ids = instance_ids self.poll_interval = poll_interval self.max_attempts = max_attempts self.wait_for_completion = wait_for_completion @property def _hook_parameters(self) -> dict[str, Any]: return {**super()._hook_parameters, "api_type": "client_type"} def execute(self, context: Context): if isinstance(self.instance_ids, str): self.instance_ids = [self.instance_ids] self.log.info("Hibernating EC2 instances %s", ", ".join(self.instance_ids)) instances = self.hook.get_instances(instance_ids=self.instance_ids) # Console link is for EC2 dashboard list, not individual instances EC2InstanceDashboardLink.persist( context=context, operator=self, region_name=self.hook.conn_region_name, aws_partition=self.hook.conn_partition, instance_ids=EC2InstanceDashboardLink.format_instance_id_filter(self.instance_ids), ) for instance in instances: hibernation_options = instance.get("HibernationOptions") if not hibernation_options or not hibernation_options["Configured"]: raise AirflowException(f"Instance {instance['InstanceId']} is not configured for hibernation") self.hook.conn.stop_instances(InstanceIds=self.instance_ids, Hibernate=True) if self.wait_for_completion: self.hook.get_waiter("instance_stopped").wait( InstanceIds=self.instance_ids, WaiterConfig={ "Delay": self.poll_interval, "MaxAttempts": self.max_attempts, }, )
EC2HibernateInstanceOperator
python
django__django
tests/test_utils/tests.py
{ "start": 17107, "end": 23955 }
class ____(SimpleTestCase): def test_usage(self): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/base.html") with self.assertTemplateUsed(template_name="template_used/base.html"): render_to_string("template_used/base.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/include.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/base.html") render_to_string("template_used/base.html") def test_nested_usage(self): with self.assertTemplateUsed("template_used/base.html"): with self.assertTemplateUsed("template_used/include.html"): render_to_string("template_used/include.html") with self.assertTemplateUsed("template_used/extends.html"): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateUsed("template_used/base.html"): with self.assertTemplateUsed("template_used/alternative.html"): render_to_string("template_used/alternative.html") render_to_string("template_used/base.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateNotUsed("template_used/base.html"): render_to_string("template_used/alternative.html") render_to_string("template_used/base.html") def test_not_used(self): with self.assertTemplateNotUsed("template_used/base.html"): pass with self.assertTemplateNotUsed("template_used/alternative.html"): pass def test_error_message_no_template_used(self): msg = "No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name="template_used/base.html"): pass with self.assertRaisesMessage(AssertionError, msg): response = self.client.get("/test_utils/no_template_used/") self.assertTemplateUsed(response, "template_used/base.html") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): self.client.get("/test_utils/no_template_used/") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): template = Template("template_used/alternative.html", name=None) template.render(Context()) def test_error_message_unexpected_template_used(self): msg = ( "Template 'template_used/base.html' was not a template used to render " "the response. Actual template(s) used: template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/alternative.html") def test_msg_prefix(self): msg_prefix = "Prefix" msg = f"{msg_prefix}: No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( "template_used/base.html", msg_prefix=msg_prefix ): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( template_name="template_used/base.html", msg_prefix=msg_prefix, ): pass msg = ( f"{msg_prefix}: Template 'template_used/base.html' was not a " f"template used to render the response. Actual template(s) used: " f"template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( "template_used/base.html", msg_prefix=msg_prefix ): render_to_string("template_used/alternative.html") def test_count(self): with self.assertTemplateUsed("template_used/base.html", count=2): render_to_string("template_used/base.html") render_to_string("template_used/base.html") msg = ( "Template 'template_used/base.html' was expected to be rendered " "3 time(s) but was actually rendered 2 time(s)." ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html", count=3): render_to_string("template_used/base.html") render_to_string("template_used/base.html") def test_failure(self): msg = "response and/or template_name argument must be provided" with self.assertRaisesMessage(TypeError, msg): with self.assertTemplateUsed(): pass msg = "No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(""): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(""): render_to_string("template_used/base.html") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name=""): pass msg = ( "Template 'template_used/base.html' was not a template used to " "render the response. Actual template(s) used: " "template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/alternative.html") def test_assert_used_on_http_response(self): response = HttpResponse() msg = "%s() is only usable on responses fetched using the Django test Client." with self.assertRaisesMessage(ValueError, msg % "assertTemplateUsed"): self.assertTemplateUsed(response, "template.html") with self.assertRaisesMessage(ValueError, msg % "assertTemplateNotUsed"): self.assertTemplateNotUsed(response, "template.html") @override_settings(ROOT_URLCONF="test_utils.urls")
AssertTemplateUsedContextManagerTests
python
pyinstaller__pyinstaller
bootloader/waflib/Tools/tex.py
{ "start": 10398, "end": 10544 }
class ____(Task.Task): run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' color = 'BLUE' after = ['latex', 'pdflatex', 'xelatex']
dvips
python
astropy__astropy
astropy/table/tests/test_init_table.py
{ "start": 12500, "end": 12768 }
class ____(BaseInitFromDictLike): def _setup(self, table_type): self.data = { "a": Column([1, 3], name="x"), "b": [2, 4], "c": np.array([3, 5], dtype="i8"), } @pytest.mark.usefixtures("table_type")
TestInitFromDict
python
getsentry__sentry
src/sentry/api/endpoints/organization_events_trends.py
{ "start": 2840, "end": 16329 }
class ____(OrganizationEventsV2EndpointBase): publish_status = { "GET": ApiPublishStatus.PRIVATE, } trend_columns = { "p50": "percentile_range({column}, 0.5, {condition}, {boundary}) as {query_alias}", "p75": "percentile_range({column}, 0.75, {condition}, {boundary}) as {query_alias}", "p95": "percentile_range({column}, 0.95, {condition}, {boundary}) as {query_alias}", "p99": "percentile_range({column}, 0.99, {condition}, {boundary}) as {query_alias}", "avg": "avg_range({column}, {condition}, {boundary}) as {query_alias}", "variance": "variance_range(transaction.duration, {condition}, {boundary}) as {query_alias}", "count_range": "count_range({condition}, {boundary}) as {query_alias}", "percentage": "percentage({alias}_2, {alias}_1) as {query_alias}", "difference": "minus({alias}_2,{alias}_1) as {query_alias}", "t_test": "t_test({avg}_1, {avg}_2, variance_range_1, variance_range_2, count_range_1, count_range_2)", } snql_trend_columns = { "p50": "percentile_range({column}, 0.5, {condition}, {boundary})", "p75": "percentile_range({column}, 0.75, {condition}, {boundary})", "p95": "percentile_range({column}, 0.95, {condition}, {boundary})", "p99": "percentile_range({column}, 0.99, {condition}, {boundary})", "avg": "avg_range({column}, {condition}, {boundary})", "variance": "variance_range(transaction.duration, {condition}, {boundary})", "count_range": "count_range({condition}, {boundary})", "percentage": "percentage({alias}_2, {alias}_1)", "difference": "minus({alias}_2,{alias}_1)", "t_test": "t_test({avg}_1, {avg}_2, variance_range_1, variance_range_2, count_range_1, count_range_2)", } def resolve_trend_columns( self, query: TrendQueryBuilder, baseline_function: str, column: str, middle: str, ) -> TrendColumns: """Construct the columns needed to calculate high confidence trends This is the snql version of get_trend_columns, which should be replaced once we're migrated """ if baseline_function not in self.snql_trend_columns: raise ParseError(detail=f"{baseline_function} is not a supported trend function") aggregate_column = self.snql_trend_columns[baseline_function] aggregate_range_1 = query.resolve_function( aggregate_column.format(column=column, condition="greater", boundary=middle), overwrite_alias="aggregate_range_1", ) aggregate_range_2 = query.resolve_function( aggregate_column.format( column=column, condition="lessOrEquals", boundary=middle, ), overwrite_alias="aggregate_range_2", ) count_column = self.snql_trend_columns["count_range"] count_range_1 = query.resolve_function( count_column.format(condition="greater", boundary=middle), overwrite_alias="count_range_1", ) count_range_2 = query.resolve_function( count_column.format(condition="lessOrEquals", boundary=middle), overwrite_alias="count_range_2", ) variance_column = self.snql_trend_columns["variance"] variance_range_1 = query.resolve_function( variance_column.format(condition="greater", boundary=middle), overwrite_alias="variance_range_1", ) variance_range_2 = query.resolve_function( variance_column.format(condition="lessOrEquals", boundary=middle), overwrite_alias="variance_range_2", ) # Only add average when its not the baseline if baseline_function != "avg": avg_column = self.snql_trend_columns["avg"] avg_range_1 = query.resolve_function( avg_column.format( column=column, condition="greater", boundary=middle, ) ) avg_range_2 = query.resolve_function( avg_column.format( column=column, condition="lessOrEquals", boundary=middle, ) ) # avg will be added as the baseline else: avg_range_1 = aggregate_range_1 avg_range_2 = aggregate_range_2 t_test = function_aliases.resolve_division( Function("minus", [avg_range_1, avg_range_2]), Function( "sqrt", [ Function( "plus", [ Function( "divide", [ variance_range_1, count_range_1, ], ), Function( "divide", [ variance_range_2, count_range_2, ], ), ], ), ], ), "t_test", ) trend_percentage = function_aliases.resolve_division( aggregate_range_2, aggregate_range_1, "trend_percentage" ) trend_difference = Function( "minus", [ aggregate_range_2, aggregate_range_1, ], "trend_difference", ) count_percentage = function_aliases.resolve_division( count_range_2, count_range_1, "count_percentage" ) return { "aggregate_range_1": aggregate_range_1, "aggregate_range_2": aggregate_range_2, "count_range_1": count_range_1, "count_range_2": count_range_2, "t_test": t_test, "trend_percentage": trend_percentage, "trend_difference": trend_difference, "count_percentage": count_percentage, } @staticmethod def get_snql_function_aliases(trend_columns: TrendColumns, trend_type: str) -> dict[str, Alias]: """Construct a dict of aliases this is because certain conditions behave differently depending on the trend type like trend_percentage and trend_difference """ return { "trend_percentage()": Alias( lambda aggregate_filter: Condition( trend_columns["trend_percentage"], Op( CORRESPONDENCE_MAP[aggregate_filter.operator] if trend_type == IMPROVED else aggregate_filter.operator ), 1 + (aggregate_filter.value.value * (-1 if trend_type == IMPROVED else 1)), ), ["percentage", "transaction.duration"], trend_columns["trend_percentage"], ), "trend_difference()": Alias( lambda aggregate_filter: Condition( trend_columns["trend_difference"], Op( CORRESPONDENCE_MAP[aggregate_filter.operator] if trend_type == IMPROVED else aggregate_filter.operator ), ( -1 * aggregate_filter.value.value if trend_type == IMPROVED else aggregate_filter.value.value ), ), ["minus", "transaction.duration"], trend_columns["trend_difference"], ), "confidence()": Alias( lambda aggregate_filter: Condition( trend_columns["t_test"], Op( CORRESPONDENCE_MAP[aggregate_filter.operator] if trend_type == REGRESSION else aggregate_filter.operator ), ( -1 * aggregate_filter.value.value if trend_type == REGRESSION else aggregate_filter.value.value ), ), None, trend_columns["t_test"], ), "count_percentage()": Alias( lambda aggregate_filter: Condition( trend_columns["count_percentage"], Op(aggregate_filter.operator), aggregate_filter.value.value, ), ["percentage", "count"], trend_columns["count_percentage"], ), } def has_feature(self, organization, request): return features.has("organizations:performance-view", organization, actor=request.user) def get(self, request: Request, organization: Organization) -> Response: if not self.has_feature(organization, request): return Response(status=404) try: snuba_params = self.get_snuba_params(request, organization) except NoProjects: return Response([]) with sentry_sdk.start_span(op="discover.endpoint", name="trend_dates"): middle_date = request.GET.get("middle") if middle_date: try: middle_dt = parse_datetime_string(middle_date) except InvalidQuery: raise ParseError(detail=f"{middle_date} is not a valid date format") if middle_dt <= snuba_params.start_date or middle_dt >= snuba_params.end_date: raise ParseError( detail="The middle date should be within the duration of the query" ) else: middle_dt = snuba_params.start_date + timedelta( seconds=(snuba_params.date_range).total_seconds() * 0.5 ) middle = middle_dt.strftime(DateArg.date_format) trend_type = request.GET.get("trendType", REGRESSION) if trend_type not in TREND_TYPES: raise ParseError(detail=f"{trend_type} is not a supported trend type") trend_function = request.GET.get("trendFunction", "p50()") try: function, columns, _ = parse_function(trend_function) except InvalidSearchQuery as error: raise ParseError(detail=str(error)) if len(columns) == 0: # Default to duration column = "transaction.duration" else: column = columns[0] selected_columns = self.get_field_list(organization, request) orderby = self.get_orderby(request) query = request.GET.get("query") with handle_query_errors(): trend_query = TrendQueryBuilder( dataset=Dataset.Discover, params={}, snuba_params=snuba_params, selected_columns=selected_columns, config=QueryBuilderConfig( auto_fields=False, auto_aggregations=True, use_aggregate_conditions=True, ), ) snql_trend_columns = self.resolve_trend_columns(trend_query, function, column, middle) trend_query.columns.extend(snql_trend_columns.values()) trend_query.aggregates.extend(snql_trend_columns.values()) trend_query.params.aliases = self.get_snql_function_aliases( snql_trend_columns, trend_type ) # Both orderby and conditions need to be resolved after the columns because of aliasing trend_query.orderby = trend_query.resolve_orderby(orderby) trend_query.groupby = trend_query.resolve_groupby() where, having = trend_query.resolve_conditions(query) trend_query.where += where trend_query.having += having def data_fn(offset, limit): trend_query.offset = Offset(offset) trend_query.limit = Limit(limit) result = raw_snql_query( trend_query.get_snql_query(), referrer="api.trends.get-percentage-change", ) result = trend_query.process_results(result) return result with handle_query_errors(): return self.paginate( request=request, paginator=GenericOffsetPaginator(data_fn=data_fn), on_results=self.build_result_handler( request, organization, snuba_params, trend_function, selected_columns, orderby, query, ), default_per_page=5, max_per_page=5, ) def build_result_handler( self, request, organization, snuba_params, trend_function, selected_columns, orderby, query, ): raise NotImplementedError @region_silo_endpoint
OrganizationEventsTrendsEndpointBase
python
jmcnamara__XlsxWriter
xlsxwriter/test/comparison/test_table20.py
{ "start": 315, "end": 1256 }
class ____(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.set_filename("table01.xlsx") # Ignore increased shared string count. self.ignore_files = ["xl/sharedStrings.xml"] def test_create_file(self): """Test the creation of a simple XlsxWriter file with tables.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.set_column("C:F", 10.288) worksheet.add_table("C3:F13") # The following should be ignored since it contains duplicate headers. # Ignore the warning. import warnings warnings.filterwarnings("ignore") worksheet.add_table( "G3:H3", {"columns": [{"header": "Column1"}, {"header": "Column1"}]} ) workbook.close() self.assertExcelEqual()
TestCompareXLSXFiles