body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def _specialized_reduce_window(reducer, identity, operand, *, window_dimensions, window_strides, padding, base_dilation, window_dilation, _in_avals, _out_aval, name=None): 'Wraps the TensorFlow reduce window operation based on a reducer and an\n\n identity function defining the initial value of the reduction depending on\n the dtype of the operand.\n\n Args:\n reducer: reduction function of type TfVal -> TfVal -> TfVal\n identity: function that takes a TensorFlow dtype as a parameter and returns\n the starting value of the reduction.\n operand: N dimensional array containing elements of type T\n window_dimensions: array of integers for window dimension values\n window_strides: array of integers for window stride values\n padding: array of pairs of integers for padding values\n base_dilation: array of integers for base dilation values\n window_dilation: array of integers for window dilation values\n name: the name of the specialized reduce window primitive for which this\n conversion function is called. This information may help to choose a\n different conversion path (optional)\n\n Returns:\n The reduced operand.\n ' if ((not _enable_xla) and (name in ['reduce_window_max', 'reduce_window_sum'])): return _try_tf_pool(name, operand, window_dimensions, window_strides, padding, base_dilation, window_dilation) return _common_reduce_window(operand, identity(operand.dtype), reducer, window_dimensions, window_strides, padding, base_dilation, window_dilation, _in_avals, _out_aval)
-2,846,017,248,677,970,000
Wraps the TensorFlow reduce window operation based on a reducer and an identity function defining the initial value of the reduction depending on the dtype of the operand. Args: reducer: reduction function of type TfVal -> TfVal -> TfVal identity: function that takes a TensorFlow dtype as a parameter and returns the starting value of the reduction. operand: N dimensional array containing elements of type T window_dimensions: array of integers for window dimension values window_strides: array of integers for window stride values padding: array of pairs of integers for padding values base_dilation: array of integers for base dilation values window_dilation: array of integers for window dilation values name: the name of the specialized reduce window primitive for which this conversion function is called. This information may help to choose a different conversion path (optional) Returns: The reduced operand.
jax/experimental/jax2tf/jax2tf.py
_specialized_reduce_window
ho-oto/jax
python
def _specialized_reduce_window(reducer, identity, operand, *, window_dimensions, window_strides, padding, base_dilation, window_dilation, _in_avals, _out_aval, name=None): 'Wraps the TensorFlow reduce window operation based on a reducer and an\n\n identity function defining the initial value of the reduction depending on\n the dtype of the operand.\n\n Args:\n reducer: reduction function of type TfVal -> TfVal -> TfVal\n identity: function that takes a TensorFlow dtype as a parameter and returns\n the starting value of the reduction.\n operand: N dimensional array containing elements of type T\n window_dimensions: array of integers for window dimension values\n window_strides: array of integers for window stride values\n padding: array of pairs of integers for padding values\n base_dilation: array of integers for base dilation values\n window_dilation: array of integers for window dilation values\n name: the name of the specialized reduce window primitive for which this\n conversion function is called. This information may help to choose a\n different conversion path (optional)\n\n Returns:\n The reduced operand.\n ' if ((not _enable_xla) and (name in ['reduce_window_max', 'reduce_window_sum'])): return _try_tf_pool(name, operand, window_dimensions, window_strides, padding, base_dilation, window_dilation) return _common_reduce_window(operand, identity(operand.dtype), reducer, window_dimensions, window_strides, padding, base_dilation, window_dilation, _in_avals, _out_aval)
@functools.partial(bool_to_int8, argnums=0) def _gather(operand, start_indices, *, dimension_numbers, slice_sizes, _in_avals, _out_aval): 'Tensorflow implementation of gather.' del _in_avals if (not _enable_xla): raise _xla_disabled_error('gather') proto = _gather_dimensions_proto(start_indices.shape, dimension_numbers) slice_sizes_tf = _eval_shape(slice_sizes) out = tfxla.gather(operand, start_indices, proto, slice_sizes_tf, False) out.set_shape(_aval_to_tf_shape(_out_aval)) return out
-6,924,293,510,355,507,000
Tensorflow implementation of gather.
jax/experimental/jax2tf/jax2tf.py
_gather
ho-oto/jax
python
@functools.partial(bool_to_int8, argnums=0) def _gather(operand, start_indices, *, dimension_numbers, slice_sizes, _in_avals, _out_aval): del _in_avals if (not _enable_xla): raise _xla_disabled_error('gather') proto = _gather_dimensions_proto(start_indices.shape, dimension_numbers) slice_sizes_tf = _eval_shape(slice_sizes) out = tfxla.gather(operand, start_indices, proto, slice_sizes_tf, False) out.set_shape(_aval_to_tf_shape(_out_aval)) return out
def _batched_cond_while(*args: TfVal, cond_nconsts: int, cond_jaxpr: core.ClosedJaxpr, body_nconsts: int, body_jaxpr: core.ClosedJaxpr) -> Sequence[TfVal]: 'Interprets a while_loop with a batched condition.\n\n A batched while has a conditional that returns a tensor of booleans, and\n a body that returns a list of tensors whose leading dimensions match those\n of the conditional tensor.\n\n We need to turn it into a while with scalar boolean conditional. We will\n expand the loop carry to include a prefix with the current tensor boolean\n condition. We prepend to the loop the first calculation of the tensor boolean\n condition. The loop condition will use a "reduce_any" to calculate a scalar\n boolean from the tensor boolean condition. The end of the loop body will\n compute the new carry using a "tf.where", and we compute the new tensor\n boolean condition.\n ' (cond_consts, body_consts, init_carry) = util.split_list(args, [cond_nconsts, body_nconsts]) (init_pred_b,) = _interpret_jaxpr(cond_jaxpr, *cond_consts, *init_carry) assert (init_pred_b is not core.unit) def new_cond_tf_func(pred_b: TfVal, *carry: TfVal) -> TfVal: pred = tf.reduce_any(pred_b, axis=list(range(len(pred_b.shape)))) return pred def new_body_tf_func(pred_b: TfVal, *carry: TfVal) -> Sequence[TfVal]: new_carry: Sequence[TfVal] = _interpret_jaxpr(body_jaxpr, *body_consts, *carry) def select_one_carry(new_c: TfVal, c: TfVal) -> TfVal: pred_b_bcast = _broadcast_in_dim(pred_b, shape=new_c.shape, broadcast_dimensions=list(range(len(pred_b.shape)))) return tf.where(pred_b_bcast, new_c, c) selected_carry: Sequence[TfVal] = list(util.safe_map(select_one_carry, new_carry, carry)) (next_pred_b,) = _interpret_jaxpr(cond_jaxpr, *cond_consts, *selected_carry) return (next_pred_b, *selected_carry) (_, *res_carry) = tf.while_loop(new_cond_tf_func, new_body_tf_func, (init_pred_b, *init_carry)) return res_carry
7,896,480,710,783,111,000
Interprets a while_loop with a batched condition. A batched while has a conditional that returns a tensor of booleans, and a body that returns a list of tensors whose leading dimensions match those of the conditional tensor. We need to turn it into a while with scalar boolean conditional. We will expand the loop carry to include a prefix with the current tensor boolean condition. We prepend to the loop the first calculation of the tensor boolean condition. The loop condition will use a "reduce_any" to calculate a scalar boolean from the tensor boolean condition. The end of the loop body will compute the new carry using a "tf.where", and we compute the new tensor boolean condition.
jax/experimental/jax2tf/jax2tf.py
_batched_cond_while
ho-oto/jax
python
def _batched_cond_while(*args: TfVal, cond_nconsts: int, cond_jaxpr: core.ClosedJaxpr, body_nconsts: int, body_jaxpr: core.ClosedJaxpr) -> Sequence[TfVal]: 'Interprets a while_loop with a batched condition.\n\n A batched while has a conditional that returns a tensor of booleans, and\n a body that returns a list of tensors whose leading dimensions match those\n of the conditional tensor.\n\n We need to turn it into a while with scalar boolean conditional. We will\n expand the loop carry to include a prefix with the current tensor boolean\n condition. We prepend to the loop the first calculation of the tensor boolean\n condition. The loop condition will use a "reduce_any" to calculate a scalar\n boolean from the tensor boolean condition. The end of the loop body will\n compute the new carry using a "tf.where", and we compute the new tensor\n boolean condition.\n ' (cond_consts, body_consts, init_carry) = util.split_list(args, [cond_nconsts, body_nconsts]) (init_pred_b,) = _interpret_jaxpr(cond_jaxpr, *cond_consts, *init_carry) assert (init_pred_b is not core.unit) def new_cond_tf_func(pred_b: TfVal, *carry: TfVal) -> TfVal: pred = tf.reduce_any(pred_b, axis=list(range(len(pred_b.shape)))) return pred def new_body_tf_func(pred_b: TfVal, *carry: TfVal) -> Sequence[TfVal]: new_carry: Sequence[TfVal] = _interpret_jaxpr(body_jaxpr, *body_consts, *carry) def select_one_carry(new_c: TfVal, c: TfVal) -> TfVal: pred_b_bcast = _broadcast_in_dim(pred_b, shape=new_c.shape, broadcast_dimensions=list(range(len(pred_b.shape)))) return tf.where(pred_b_bcast, new_c, c) selected_carry: Sequence[TfVal] = list(util.safe_map(select_one_carry, new_carry, carry)) (next_pred_b,) = _interpret_jaxpr(cond_jaxpr, *cond_consts, *selected_carry) return (next_pred_b, *selected_carry) (_, *res_carry) = tf.while_loop(new_cond_tf_func, new_body_tf_func, (init_pred_b, *init_carry)) return res_carry
def split_to_logical_devices(tensor: TfVal, partition_dimensions: pxla.PartitionsOrReplicated): 'Like TPUMPStrategy.experimental_split_to_logical_devices.\n\n For jax2tf purposes we want to avoid needing to thread the `strategy` object\n through the generated computation. It seems that the original function needs\n the strategy object only for error checking, which we assume is done upstream\n by JAX.\n\n Args:\n tensor: Input tensor to annotate.\n partition_dimensions: A list of integers, with one integer per tensor\n dimension, specifying in how many parts the dimension should be split. The\n product of integers must equal the number of devices per replica.\n use_sharding_op: whether to use a sharding op, or not.\n\n Returns:\n an annotated tensor.\n ' if (partition_dimensions is None): return xla_sharding.replicate(tensor, use_sharding_op=True) num_partition_splits = np.prod(partition_dimensions) tile_assignment = np.arange(num_partition_splits).reshape(partition_dimensions) return xla_sharding.tile(tensor, tile_assignment, use_sharding_op=True)
743,899,538,745,615,900
Like TPUMPStrategy.experimental_split_to_logical_devices. For jax2tf purposes we want to avoid needing to thread the `strategy` object through the generated computation. It seems that the original function needs the strategy object only for error checking, which we assume is done upstream by JAX. Args: tensor: Input tensor to annotate. partition_dimensions: A list of integers, with one integer per tensor dimension, specifying in how many parts the dimension should be split. The product of integers must equal the number of devices per replica. use_sharding_op: whether to use a sharding op, or not. Returns: an annotated tensor.
jax/experimental/jax2tf/jax2tf.py
split_to_logical_devices
ho-oto/jax
python
def split_to_logical_devices(tensor: TfVal, partition_dimensions: pxla.PartitionsOrReplicated): 'Like TPUMPStrategy.experimental_split_to_logical_devices.\n\n For jax2tf purposes we want to avoid needing to thread the `strategy` object\n through the generated computation. It seems that the original function needs\n the strategy object only for error checking, which we assume is done upstream\n by JAX.\n\n Args:\n tensor: Input tensor to annotate.\n partition_dimensions: A list of integers, with one integer per tensor\n dimension, specifying in how many parts the dimension should be split. The\n product of integers must equal the number of devices per replica.\n use_sharding_op: whether to use a sharding op, or not.\n\n Returns:\n an annotated tensor.\n ' if (partition_dimensions is None): return xla_sharding.replicate(tensor, use_sharding_op=True) num_partition_splits = np.prod(partition_dimensions) tile_assignment = np.arange(num_partition_splits).reshape(partition_dimensions) return xla_sharding.tile(tensor, tile_assignment, use_sharding_op=True)
def _register_checkpoint_pytrees(): 'Registers TF custom container types as pytrees.' m = tf.Module() m.a = (tf.Module(), tf.Module()) m.b = [tf.Module(), tf.Module()] m.c = {'a': tf.Module()} tuple_wrapper = type(m.a) list_wrapper = type(m.b) dict_wrapper = type(m.c) assert (tuple_wrapper is not tuple) assert (list_wrapper is not list) assert (dict_wrapper is not dict) jax.tree_util.register_pytree_node(tuple_wrapper, (lambda xs: (tuple(xs), None)), (lambda _, xs: tuple(xs))) jax.tree_util.register_pytree_node(list_wrapper, (lambda xs: (tuple(xs), None)), (lambda _, xs: list(xs))) jax.tree_util.register_pytree_node(dict_wrapper, (lambda s: (tuple(s.values()), tuple(s.keys()))), (lambda k, xs: dict(zip(k, xs))))
1,120,869,914,977,872,300
Registers TF custom container types as pytrees.
jax/experimental/jax2tf/jax2tf.py
_register_checkpoint_pytrees
ho-oto/jax
python
def _register_checkpoint_pytrees(): m = tf.Module() m.a = (tf.Module(), tf.Module()) m.b = [tf.Module(), tf.Module()] m.c = {'a': tf.Module()} tuple_wrapper = type(m.a) list_wrapper = type(m.b) dict_wrapper = type(m.c) assert (tuple_wrapper is not tuple) assert (list_wrapper is not list) assert (dict_wrapper is not dict) jax.tree_util.register_pytree_node(tuple_wrapper, (lambda xs: (tuple(xs), None)), (lambda _, xs: tuple(xs))) jax.tree_util.register_pytree_node(list_wrapper, (lambda xs: (tuple(xs), None)), (lambda _, xs: list(xs))) jax.tree_util.register_pytree_node(dict_wrapper, (lambda s: (tuple(s.values()), tuple(s.keys()))), (lambda k, xs: dict(zip(k, xs))))
def input_aval(arg: TfVal, polymorphic_shape: Optional[str]) -> core.AbstractValue: 'The abstract value for an input.' (raw_shape, dtype) = _tfval_shape_dtype(arg) aval_shape = shape_poly.parse_spec(polymorphic_shape, raw_shape) for (i, d) in enumerate(aval_shape): if (type(d) is int): assert (d == np.shape(arg)[i]) elif ((type(d) is shape_poly.DimVar) and (d not in shapeenv)): shapeenv[d] = tf.shape(arg)[i] else: pass return core.ShapedArray(aval_shape, dtype)
7,555,518,512,864,666,000
The abstract value for an input.
jax/experimental/jax2tf/jax2tf.py
input_aval
ho-oto/jax
python
def input_aval(arg: TfVal, polymorphic_shape: Optional[str]) -> core.AbstractValue: (raw_shape, dtype) = _tfval_shape_dtype(arg) aval_shape = shape_poly.parse_spec(polymorphic_shape, raw_shape) for (i, d) in enumerate(aval_shape): if (type(d) is int): assert (d == np.shape(arg)[i]) elif ((type(d) is shape_poly.DimVar) and (d not in shapeenv)): shapeenv[d] = tf.shape(arg)[i] else: pass return core.ShapedArray(aval_shape, dtype)
def pure(self, val: Union[(TfVal, core.Unit)]) -> TensorFlowTracer: 'Lifts a non-Tracer into the TensorFlowTracer.\n\n This function may be called by way of trace.full_raise.\n\n The value may be a core.unit. During JAX transformations we sometimes\n produce a Jaxpr that has arguments of abstract value core.abstract_unit\n and results equal to core.unit. These are arguments and results that are\n not used in the computation.\n\n In TF world, we represent core.unit as NaN. This is safe, as these values\n should never be used.\n ' if (val is core.unit): return TensorFlowTracer(self, tf.constant(np.nan, tf.float32), core.abstract_unit) else: (shape, dtype) = _tfval_shape_dtype(val) return TensorFlowTracer(self, val, core.ShapedArray(shape, dtype))
7,683,155,972,124,569,000
Lifts a non-Tracer into the TensorFlowTracer. This function may be called by way of trace.full_raise. The value may be a core.unit. During JAX transformations we sometimes produce a Jaxpr that has arguments of abstract value core.abstract_unit and results equal to core.unit. These are arguments and results that are not used in the computation. In TF world, we represent core.unit as NaN. This is safe, as these values should never be used.
jax/experimental/jax2tf/jax2tf.py
pure
ho-oto/jax
python
def pure(self, val: Union[(TfVal, core.Unit)]) -> TensorFlowTracer: 'Lifts a non-Tracer into the TensorFlowTracer.\n\n This function may be called by way of trace.full_raise.\n\n The value may be a core.unit. During JAX transformations we sometimes\n produce a Jaxpr that has arguments of abstract value core.abstract_unit\n and results equal to core.unit. These are arguments and results that are\n not used in the computation.\n\n In TF world, we represent core.unit as NaN. This is safe, as these values\n should never be used.\n ' if (val is core.unit): return TensorFlowTracer(self, tf.constant(np.nan, tf.float32), core.abstract_unit) else: (shape, dtype) = _tfval_shape_dtype(val) return TensorFlowTracer(self, val, core.ShapedArray(shape, dtype))
def _sanitize_vmr(df): "Filters entries with '511' error, impossibly high speed, abnormally\n high vessel width, as well as singletons (only one entry) from vessel\n movement DataFrame.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Sanitized vessel movement report DataFrame.\n " df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Beam ft'] >= 500)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Course'] == 511)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Heading'] == 511)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'VSPD kn'] >= 40)].index)), :] singleton = (df.loc[:, 'MMSI'].value_counts() == 1) single_mmsi = df.loc[:, 'MMSI'].value_counts()[singleton].index.values df = df.loc[(~ df.loc[:, 'MMSI'].isin(single_mmsi)), :] return df
-3,351,735,100,606,929,000
Filters entries with '511' error, impossibly high speed, abnormally high vessel width, as well as singletons (only one entry) from vessel movement DataFrame. Args: df: Vessel movement DataFrame. Returns: Sanitized vessel movement report DataFrame.
src/process_maritime_data.py
_sanitize_vmr
maritime-whale/maritime-whale
python
def _sanitize_vmr(df): "Filters entries with '511' error, impossibly high speed, abnormally\n high vessel width, as well as singletons (only one entry) from vessel\n movement DataFrame.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Sanitized vessel movement report DataFrame.\n " df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Beam ft'] >= 500)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Course'] == 511)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'Heading'] == 511)].index)), :] df = df.loc[(~ df.index.isin(df[(df.loc[:, 'VSPD kn'] >= 40)].index)), :] singleton = (df.loc[:, 'MMSI'].value_counts() == 1) single_mmsi = df.loc[:, 'MMSI'].value_counts()[singleton].index.values df = df.loc[(~ df.loc[:, 'MMSI'].isin(single_mmsi)), :] return df
def _wrangle_vmr(df, rename): 'Rounds, renames, and sanitizes vessel movment DataFrame. Creates new\n columns.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Cleaned vessel movement report DataFrame.\n ' df.rename(rename, axis=1, inplace=True) df.loc[:, 'LOA ft'] = ((df.loc[:, 'A'] + df.loc[:, 'B']) * M_TO_FT) df.loc[:, 'LOA ft'] = df.loc[:, 'LOA ft'].round(0) df.loc[:, 'Beam ft'] = ((df.loc[:, 'C'] + df.loc[:, 'D']) * M_TO_FT) df.loc[:, 'Beam ft'] = df.loc[:, 'Beam ft'].round(0) df.loc[:, 'Latitude'] = df.loc[:, 'Latitude'].round(5) df.loc[:, 'Longitude'] = df.loc[:, 'Longitude'].round(5) df = _sanitize_vmr(df) df = df.loc[(df.loc[:, 'LOA ft'] >= SUB_PANAMAX), :] df.loc[:, 'Date/Time UTC'] = df.loc[:, 'Date/Time UTC'].str.strip('UTC') df.loc[:, 'Date/Time UTC'] = pd.to_datetime(df.loc[:, 'Date/Time UTC']) df = df.loc[:, ['Date/Time UTC', 'Name', 'MMSI', 'LOA ft', 'Latitude', 'Longitude', 'Course', 'AIS Type', 'Heading', 'VSPD kn', 'Beam ft']] return df
-989,017,253,561,720,400
Rounds, renames, and sanitizes vessel movment DataFrame. Creates new columns. Args: df: Vessel movement DataFrame. Returns: Cleaned vessel movement report DataFrame.
src/process_maritime_data.py
_wrangle_vmr
maritime-whale/maritime-whale
python
def _wrangle_vmr(df, rename): 'Rounds, renames, and sanitizes vessel movment DataFrame. Creates new\n columns.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Cleaned vessel movement report DataFrame.\n ' df.rename(rename, axis=1, inplace=True) df.loc[:, 'LOA ft'] = ((df.loc[:, 'A'] + df.loc[:, 'B']) * M_TO_FT) df.loc[:, 'LOA ft'] = df.loc[:, 'LOA ft'].round(0) df.loc[:, 'Beam ft'] = ((df.loc[:, 'C'] + df.loc[:, 'D']) * M_TO_FT) df.loc[:, 'Beam ft'] = df.loc[:, 'Beam ft'].round(0) df.loc[:, 'Latitude'] = df.loc[:, 'Latitude'].round(5) df.loc[:, 'Longitude'] = df.loc[:, 'Longitude'].round(5) df = _sanitize_vmr(df) df = df.loc[(df.loc[:, 'LOA ft'] >= SUB_PANAMAX), :] df.loc[:, 'Date/Time UTC'] = df.loc[:, 'Date/Time UTC'].str.strip('UTC') df.loc[:, 'Date/Time UTC'] = pd.to_datetime(df.loc[:, 'Date/Time UTC']) df = df.loc[:, ['Date/Time UTC', 'Name', 'MMSI', 'LOA ft', 'Latitude', 'Longitude', 'Course', 'AIS Type', 'Heading', 'VSPD kn', 'Beam ft']] return df
def _filter_blacklisters(df, blacklist): "Checks vessel AIS types and ommits blacklisted vessel types from the\n filtered data. Appends ommitted vessels' MMSI's to blacklist.txt.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Filtered vessel movement DataFrame.\n " df = df.loc[(~ df.loc[:, 'MMSI'].isin(blacklist)), :] new_blacklisters = [] for j in range(df.shape[0]): if (df.iloc[j]['AIS Type'] in AUTO_BLACKLIST): new_blacklisters.append(df.iloc[j]['MMSI']) with open('../cache/blacklist.txt', 'a') as f: contents = [str(mmsi) for mmsi in new_blacklisters] if contents: f.write(('\n'.join(contents) + '\n')) df = df.loc[(~ df.loc[:, 'MMSI'].isin(new_blacklisters)), :] return df
-8,168,049,235,057,531,000
Checks vessel AIS types and ommits blacklisted vessel types from the filtered data. Appends ommitted vessels' MMSI's to blacklist.txt. Args: df: Vessel movement DataFrame. Returns: Filtered vessel movement DataFrame.
src/process_maritime_data.py
_filter_blacklisters
maritime-whale/maritime-whale
python
def _filter_blacklisters(df, blacklist): "Checks vessel AIS types and ommits blacklisted vessel types from the\n filtered data. Appends ommitted vessels' MMSI's to blacklist.txt.\n\n Args:\n df: Vessel movement DataFrame.\n\n Returns:\n Filtered vessel movement DataFrame.\n " df = df.loc[(~ df.loc[:, 'MMSI'].isin(blacklist)), :] new_blacklisters = [] for j in range(df.shape[0]): if (df.iloc[j]['AIS Type'] in AUTO_BLACKLIST): new_blacklisters.append(df.iloc[j]['MMSI']) with open('../cache/blacklist.txt', 'a') as f: contents = [str(mmsi) for mmsi in new_blacklisters] if contents: f.write(('\n'.join(contents) + '\n')) df = df.loc[(~ df.loc[:, 'MMSI'].isin(new_blacklisters)), :] return df
def _fold_vmr(ports, i): "Reduces movement report to a DataFrame with a single entry for each\n vessel at the point of it's maximum speed in the channel. Includes a column\n with the vessel's mean speed.\n " mean = pd.DataFrame(ports[i].groupby(['Name', 'MMSI'])['VSPD kn'].mean()).rename({'VSPD kn': 'Mean Speed kn'}, axis=1).round(1) maxes = pd.DataFrame(ports[i].groupby(['Name', 'MMSI'])['VSPD kn'].max()).rename({'VSPD kn': 'Max Speed kn'}, axis=1) merged_speeds = maxes.merge(mean, on=['Name', 'MMSI']) max_dict = merged_speeds.loc[:, 'Max Speed kn'].to_dict() columns = {'Longitude': [], 'Latitude': [], 'Date/Time UTC': [], 'LOA ft': [], 'Course': [], 'AIS Type': [], 'WSPD mph': [], 'GST mph': [], 'WDIR degT': [], 'Buoy Source': [], 'Beam ft': [], 'Heading': [], 'Course Behavior': [], 'Effective Beam ft': [], 'Class': [], 'Location': [], 'Yaw deg': [], 'Transit': [], '% Channel Occupied': []} for (key, value) in max_dict.items(): for k in columns.keys(): columns[k].append(ports[i][((ports[i].loc[:, 'Name'] == key[0]) & (ports[i].loc[:, 'VSPD kn'] == value))][k].iloc[0]) for key in columns.keys(): merged_speeds[key] = columns[key] merged_speeds = merged_speeds.reset_index() fold_res = merged_speeds fold_res.sort_values('Max Speed kn', ascending=False, inplace=True) return fold_res
-9,088,070,299,274,554,000
Reduces movement report to a DataFrame with a single entry for each vessel at the point of it's maximum speed in the channel. Includes a column with the vessel's mean speed.
src/process_maritime_data.py
_fold_vmr
maritime-whale/maritime-whale
python
def _fold_vmr(ports, i): "Reduces movement report to a DataFrame with a single entry for each\n vessel at the point of it's maximum speed in the channel. Includes a column\n with the vessel's mean speed.\n " mean = pd.DataFrame(ports[i].groupby(['Name', 'MMSI'])['VSPD kn'].mean()).rename({'VSPD kn': 'Mean Speed kn'}, axis=1).round(1) maxes = pd.DataFrame(ports[i].groupby(['Name', 'MMSI'])['VSPD kn'].max()).rename({'VSPD kn': 'Max Speed kn'}, axis=1) merged_speeds = maxes.merge(mean, on=['Name', 'MMSI']) max_dict = merged_speeds.loc[:, 'Max Speed kn'].to_dict() columns = {'Longitude': [], 'Latitude': [], 'Date/Time UTC': [], 'LOA ft': [], 'Course': [], 'AIS Type': [], 'WSPD mph': [], 'GST mph': [], 'WDIR degT': [], 'Buoy Source': [], 'Beam ft': [], 'Heading': [], 'Course Behavior': [], 'Effective Beam ft': [], 'Class': [], 'Location': [], 'Yaw deg': [], 'Transit': [], '% Channel Occupied': []} for (key, value) in max_dict.items(): for k in columns.keys(): columns[k].append(ports[i][((ports[i].loc[:, 'Name'] == key[0]) & (ports[i].loc[:, 'VSPD kn'] == value))][k].iloc[0]) for key in columns.keys(): merged_speeds[key] = columns[key] merged_speeds = merged_speeds.reset_index() fold_res = merged_speeds fold_res.sort_values('Max Speed kn', ascending=False, inplace=True) return fold_res
def _add_channel_occ(ports, i): 'Creates the channel occupancy column.' channel_width = [[800, 400, 1000, 500], [600, 300, 600, 300]] for row in range(len(ports[i])): vessel_class = ports[i].loc[(row, 'Class')] transit_type = ports[i].loc[(row, 'Transit')] eff_beam = ports[i].loc[(row, 'Effective Beam ft')] if ((vessel_class == 'Post-Panamax') & (transit_type == 'One-way Transit')): occ = ((eff_beam / channel_width[i][0]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Post-Panamax') & (transit_type == 'Two-way Transit')): occ = ((eff_beam / channel_width[i][1]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Panamax') & (transit_type == 'One-way Transit')): occ = ((eff_beam / channel_width[i][2]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Panamax') & (transit_type == 'Two-way Transit')): occ = ((eff_beam / channel_width[i][3]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) else: sys.stderr.write(('Error: Undefined Class and ' + 'transit combination...\n')) ports[i].loc[(row, '% Channel Occupied')] = float('NaN') return ports[i]
-5,192,526,610,262,582,000
Creates the channel occupancy column.
src/process_maritime_data.py
_add_channel_occ
maritime-whale/maritime-whale
python
def _add_channel_occ(ports, i): channel_width = [[800, 400, 1000, 500], [600, 300, 600, 300]] for row in range(len(ports[i])): vessel_class = ports[i].loc[(row, 'Class')] transit_type = ports[i].loc[(row, 'Transit')] eff_beam = ports[i].loc[(row, 'Effective Beam ft')] if ((vessel_class == 'Post-Panamax') & (transit_type == 'One-way Transit')): occ = ((eff_beam / channel_width[i][0]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Post-Panamax') & (transit_type == 'Two-way Transit')): occ = ((eff_beam / channel_width[i][1]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Panamax') & (transit_type == 'One-way Transit')): occ = ((eff_beam / channel_width[i][2]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) elif ((vessel_class == 'Panamax') & (transit_type == 'Two-way Transit')): occ = ((eff_beam / channel_width[i][3]) * 100) ports[i].loc[(row, '% Channel Occupied')] = round(occ, 2) else: sys.stderr.write(('Error: Undefined Class and ' + 'transit combination...\n')) ports[i].loc[(row, '% Channel Occupied')] = float('NaN') return ports[i]
def _add_vessel_class(df): "Creates 'Class' column based on vessel LOA ft." df.loc[:, 'Class'] = 'Panamax' post_row = (df.loc[:, 'LOA ft'] > 965) post_loc = df.loc[post_row, :].index post_pan = df.index.isin(post_loc) df.loc[(post_pan, 'Class')] = 'Post-Panamax' return df
-4,006,407,265,137,668,000
Creates 'Class' column based on vessel LOA ft.
src/process_maritime_data.py
_add_vessel_class
maritime-whale/maritime-whale
python
def _add_vessel_class(df): df.loc[:, 'Class'] = 'Panamax' post_row = (df.loc[:, 'LOA ft'] > 965) post_loc = df.loc[post_row, :].index post_pan = df.index.isin(post_loc) df.loc[(post_pan, 'Class')] = 'Post-Panamax' return df
def _course_behavior(df, ranges): "Creates 'Course Behavior' column based on channel specific course ranges.\n " course_behavior = ('Outbound', 'Inbound') df = df[(((df.loc[:, 'Course'] >= ranges[0][0]) & (df.loc[:, 'Course'] <= ranges[0][1])) | ((df.loc[:, 'Course'] >= ranges[1][0]) & (df.loc[:, 'Course'] <= ranges[1][1])))] df.loc[:, 'Course'] = round(df.loc[:, 'Course']).astype('int') df.loc[:, 'Course Behavior'] = df.loc[:, 'Course'].copy() courses = {} for (behavior, bounds) in zip(course_behavior, ranges): lower_bound = bounds[0] upper_bound = bounds[1] for j in range(lower_bound, (upper_bound + 1)): courses[j] = behavior df.loc[:, 'Course Behavior'] = df.loc[:, 'Course Behavior'].replace(courses).astype('str') return df
2,992,146,113,907,691,000
Creates 'Course Behavior' column based on channel specific course ranges.
src/process_maritime_data.py
_course_behavior
maritime-whale/maritime-whale
python
def _course_behavior(df, ranges): "\n " course_behavior = ('Outbound', 'Inbound') df = df[(((df.loc[:, 'Course'] >= ranges[0][0]) & (df.loc[:, 'Course'] <= ranges[0][1])) | ((df.loc[:, 'Course'] >= ranges[1][0]) & (df.loc[:, 'Course'] <= ranges[1][1])))] df.loc[:, 'Course'] = round(df.loc[:, 'Course']).astype('int') df.loc[:, 'Course Behavior'] = df.loc[:, 'Course'].copy() courses = {} for (behavior, bounds) in zip(course_behavior, ranges): lower_bound = bounds[0] upper_bound = bounds[1] for j in range(lower_bound, (upper_bound + 1)): courses[j] = behavior df.loc[:, 'Course Behavior'] = df.loc[:, 'Course Behavior'].replace(courses).astype('str') return df
def process_report(path): 'Processes data from vessel movement report. Adds data from wind buoys,\n performs meeting and passing analysis. Creates other relevant columns.\n\n Args:\n path: Relative path to raw vessel movement report (CSV).\n\n Returns:\n Two pairs of two DataFrames cooresponding to the movement report.\n The first pair of DataFrames contains all vessel movements belonging to\n Charleston and Savannah, respectively. The second pair of DataFrames\n stores the vessel movement entries at which each vessel achieved\n its maximum speed. Again, the first DataFrame in the pair belongs to\n Charleston and the second DataFrame belongs to Savannah.\n ' blacklist = [int(mmsi) for mmsi in open('../cache/blacklist.txt', 'r').readlines()] df = pd.read_csv(path) df = _wrangle_vmr(df, {'DATETIME (UTC)': 'Date/Time UTC', 'NAME': 'Name', 'LATITUDE': 'Latitude', 'LONGITUDE': 'Longitude', 'SPEED': 'VSPD kn', 'COURSE': 'Course', 'HEADING': 'Heading', 'AIS TYPE': 'AIS Type'}) ch_course_ranges = ((100, 140), (280, 320)) sv_course_ranges = ((100, 160), (280, 340)) channel_midpoint = ((- 79.74169), (- 80.78522)) course_ranges = (ch_course_ranges, sv_course_ranges) ports = [None, None] buoys = [{'41004': None}, {'41008': None}] alt_buoys = [{'41008': None}, {'41004': None}] for i in range(len(ports)): ch_df = (df.loc[:, 'Latitude'] >= 32.033) sv_df = (df.loc[:, 'Latitude'] < 32.033) ports[i] = (df[ch_df] if (i == 0) else df[sv_df]) if (not len(ports[i])): empty = pd.DataFrame({'Date/Time UTC': [], 'Name': [], 'MMSI': [], 'Max Speed kn': [], 'Mean Speed kn': [], 'LOA ft': [], 'Beam ft': [], 'Class': [], 'AIS Type': [], 'Course': [], 'Heading': [], 'Course Behavior': [], 'Yaw deg': [], 'Effective Beam ft': [], 'WDIR degT': [], 'WSPD mph': [], 'GST mph': [], 'Buoy Source': [], 'Location': [], 'Latitude': [], 'Longitude': [], 'Transit': [], '% Channel Occupied': []}) ports[i] = [empty, empty] continue ports[i].loc[:, 'Location'] = 'Nearshore' off_row = (ports[i].loc[:, 'Longitude'] > channel_midpoint[i]) off_loc = ports[i].loc[off_row, :].index offshore_indices = ports[i].index.isin(off_loc) ports[i].loc[(offshore_indices, 'Location')] = 'Offshore' ports[i] = add_wind(ports, i, buoys, alt_buoys) ports[i] = _course_behavior(ports[i], course_ranges[i]) ports[i] = _add_vessel_class(ports[i]) ports[i].loc[:, 'Yaw deg'] = abs((ports[i].loc[:, 'Course'] - ports[i].loc[:, 'Heading'])) eff_beam = [] loa = ports[i].loc[:, 'LOA ft'].values beam = ports[i].loc[:, 'Beam ft'].values yaw = ports[i].loc[:, 'Yaw deg'].values for l in range(ports[i].shape[0]): eff_beam.append(round(((math.cos(math.radians((90 - yaw[l]))) * loa[l]) + (math.cos(math.radians(yaw[l])) * beam[l])))) ports[i].loc[:, 'Effective Beam ft'] = eff_beam ports[i].loc[:, 'Effective Beam ft'] = ports[i].loc[:, 'Effective Beam ft'].round(0) ports[i] = _filter_blacklisters(ports[i], blacklist) stamps = len(ports[i].loc[:, 'Date/Time UTC']) round_times = [ports[i].loc[:, 'Date/Time UTC'].iloc[ii].floor('Min') for ii in range(stamps)] ports[i].loc[:, 'rounded date'] = round_times mp = meetpass(ports[i]) two_way = twoway(ports[i], mp) ports[i].loc[:, 'Transit'] = 'One-way Transit' if (not isinstance(two_way, type(None))): two_way_indices = ports[i].index.isin(two_way.index) ports[i].loc[(two_way_indices, 'Transit')] = 'Two-way Transit' ports[i] = ports[i].reset_index() ports[i] = _add_channel_occ(ports, i) all_res = ports[i] if (i % 2): all_res = all_res[(((all_res.loc[:, 'Latitude'] <= 32.02838) & (all_res.loc[:, 'Latitude'] >= 31.9985)) | (all_res.loc[:, 'Latitude'] <= 31.99183))] else: all_res = all_res[(all_res.loc[:, 'Latitude'] >= 32.667473)] fold_res = _fold_vmr(ports, i) fold_res = fold_res.loc[:, ('Date/Time UTC', 'Name', 'MMSI', 'Max Speed kn', 'Mean Speed kn', 'LOA ft', 'Beam ft', 'Class', 'AIS Type', 'Course', 'Heading', 'Course Behavior', 'Yaw deg', 'Effective Beam ft', 'WDIR degT', 'WSPD mph', 'GST mph', 'Buoy Source', 'Location', 'Latitude', 'Longitude', 'Transit', '% Channel Occupied')] all_res = all_res.loc[:, ('Name', 'MMSI', 'VSPD kn', 'WSPD mph', 'Transit', '% Channel Occupied', 'Yaw deg', 'Effective Beam ft', 'LOA ft', 'Beam ft', 'Class', 'AIS Type', 'Course', 'Heading', 'Course Behavior', 'WDIR degT', 'GST mph', 'Buoy Source', 'Location', 'Latitude', 'Longitude', 'Date/Time UTC')] ports[i] = [fold_res, all_res] return (ports[0], ports[1])
8,551,263,354,269,532,000
Processes data from vessel movement report. Adds data from wind buoys, performs meeting and passing analysis. Creates other relevant columns. Args: path: Relative path to raw vessel movement report (CSV). Returns: Two pairs of two DataFrames cooresponding to the movement report. The first pair of DataFrames contains all vessel movements belonging to Charleston and Savannah, respectively. The second pair of DataFrames stores the vessel movement entries at which each vessel achieved its maximum speed. Again, the first DataFrame in the pair belongs to Charleston and the second DataFrame belongs to Savannah.
src/process_maritime_data.py
process_report
maritime-whale/maritime-whale
python
def process_report(path): 'Processes data from vessel movement report. Adds data from wind buoys,\n performs meeting and passing analysis. Creates other relevant columns.\n\n Args:\n path: Relative path to raw vessel movement report (CSV).\n\n Returns:\n Two pairs of two DataFrames cooresponding to the movement report.\n The first pair of DataFrames contains all vessel movements belonging to\n Charleston and Savannah, respectively. The second pair of DataFrames\n stores the vessel movement entries at which each vessel achieved\n its maximum speed. Again, the first DataFrame in the pair belongs to\n Charleston and the second DataFrame belongs to Savannah.\n ' blacklist = [int(mmsi) for mmsi in open('../cache/blacklist.txt', 'r').readlines()] df = pd.read_csv(path) df = _wrangle_vmr(df, {'DATETIME (UTC)': 'Date/Time UTC', 'NAME': 'Name', 'LATITUDE': 'Latitude', 'LONGITUDE': 'Longitude', 'SPEED': 'VSPD kn', 'COURSE': 'Course', 'HEADING': 'Heading', 'AIS TYPE': 'AIS Type'}) ch_course_ranges = ((100, 140), (280, 320)) sv_course_ranges = ((100, 160), (280, 340)) channel_midpoint = ((- 79.74169), (- 80.78522)) course_ranges = (ch_course_ranges, sv_course_ranges) ports = [None, None] buoys = [{'41004': None}, {'41008': None}] alt_buoys = [{'41008': None}, {'41004': None}] for i in range(len(ports)): ch_df = (df.loc[:, 'Latitude'] >= 32.033) sv_df = (df.loc[:, 'Latitude'] < 32.033) ports[i] = (df[ch_df] if (i == 0) else df[sv_df]) if (not len(ports[i])): empty = pd.DataFrame({'Date/Time UTC': [], 'Name': [], 'MMSI': [], 'Max Speed kn': [], 'Mean Speed kn': [], 'LOA ft': [], 'Beam ft': [], 'Class': [], 'AIS Type': [], 'Course': [], 'Heading': [], 'Course Behavior': [], 'Yaw deg': [], 'Effective Beam ft': [], 'WDIR degT': [], 'WSPD mph': [], 'GST mph': [], 'Buoy Source': [], 'Location': [], 'Latitude': [], 'Longitude': [], 'Transit': [], '% Channel Occupied': []}) ports[i] = [empty, empty] continue ports[i].loc[:, 'Location'] = 'Nearshore' off_row = (ports[i].loc[:, 'Longitude'] > channel_midpoint[i]) off_loc = ports[i].loc[off_row, :].index offshore_indices = ports[i].index.isin(off_loc) ports[i].loc[(offshore_indices, 'Location')] = 'Offshore' ports[i] = add_wind(ports, i, buoys, alt_buoys) ports[i] = _course_behavior(ports[i], course_ranges[i]) ports[i] = _add_vessel_class(ports[i]) ports[i].loc[:, 'Yaw deg'] = abs((ports[i].loc[:, 'Course'] - ports[i].loc[:, 'Heading'])) eff_beam = [] loa = ports[i].loc[:, 'LOA ft'].values beam = ports[i].loc[:, 'Beam ft'].values yaw = ports[i].loc[:, 'Yaw deg'].values for l in range(ports[i].shape[0]): eff_beam.append(round(((math.cos(math.radians((90 - yaw[l]))) * loa[l]) + (math.cos(math.radians(yaw[l])) * beam[l])))) ports[i].loc[:, 'Effective Beam ft'] = eff_beam ports[i].loc[:, 'Effective Beam ft'] = ports[i].loc[:, 'Effective Beam ft'].round(0) ports[i] = _filter_blacklisters(ports[i], blacklist) stamps = len(ports[i].loc[:, 'Date/Time UTC']) round_times = [ports[i].loc[:, 'Date/Time UTC'].iloc[ii].floor('Min') for ii in range(stamps)] ports[i].loc[:, 'rounded date'] = round_times mp = meetpass(ports[i]) two_way = twoway(ports[i], mp) ports[i].loc[:, 'Transit'] = 'One-way Transit' if (not isinstance(two_way, type(None))): two_way_indices = ports[i].index.isin(two_way.index) ports[i].loc[(two_way_indices, 'Transit')] = 'Two-way Transit' ports[i] = ports[i].reset_index() ports[i] = _add_channel_occ(ports, i) all_res = ports[i] if (i % 2): all_res = all_res[(((all_res.loc[:, 'Latitude'] <= 32.02838) & (all_res.loc[:, 'Latitude'] >= 31.9985)) | (all_res.loc[:, 'Latitude'] <= 31.99183))] else: all_res = all_res[(all_res.loc[:, 'Latitude'] >= 32.667473)] fold_res = _fold_vmr(ports, i) fold_res = fold_res.loc[:, ('Date/Time UTC', 'Name', 'MMSI', 'Max Speed kn', 'Mean Speed kn', 'LOA ft', 'Beam ft', 'Class', 'AIS Type', 'Course', 'Heading', 'Course Behavior', 'Yaw deg', 'Effective Beam ft', 'WDIR degT', 'WSPD mph', 'GST mph', 'Buoy Source', 'Location', 'Latitude', 'Longitude', 'Transit', '% Channel Occupied')] all_res = all_res.loc[:, ('Name', 'MMSI', 'VSPD kn', 'WSPD mph', 'Transit', '% Channel Occupied', 'Yaw deg', 'Effective Beam ft', 'LOA ft', 'Beam ft', 'Class', 'AIS Type', 'Course', 'Heading', 'Course Behavior', 'WDIR degT', 'GST mph', 'Buoy Source', 'Location', 'Latitude', 'Longitude', 'Date/Time UTC')] ports[i] = [fold_res, all_res] return (ports[0], ports[1])
def toggleCons(state): "\n\tToggle the display state of all joint buffers ('Con') in the scene\n\t@param state: The display state to set the joint buffers to\n\t@type state: bool\n\t" conList = mc.ls('*Con*_jnt', type='joint') for conJnt in conList: if state: glTools.utils.base.displayOverride(conJnt, overrideEnable=1, overrideLOD=0) mc.setAttr((conJnt + '.drawStyle'), 0) else: glTools.utils.base.displayOverride(conJnt, overrideEnable=1, overrideLOD=1) mc.setAttr((conJnt + '.drawStyle'), 2) if mc.getAttr((conJnt + '.radius'), se=True): mc.setAttr((conJnt + '.radius'), 0.0) mc.setAttr((conJnt + '.radius'), cb=False) if mc.getAttr((conJnt + '.ro'), se=True): mc.setAttr((conJnt + '.ro'), cb=False) return conList
-961,874,778,851,118,700
Toggle the display state of all joint buffers ('Con') in the scene @param state: The display state to set the joint buffers to @type state: bool
utils/cleanup.py
toggleCons
Lynn5160/glTools
python
def toggleCons(state): "\n\tToggle the display state of all joint buffers ('Con') in the scene\n\t@param state: The display state to set the joint buffers to\n\t@type state: bool\n\t" conList = mc.ls('*Con*_jnt', type='joint') for conJnt in conList: if state: glTools.utils.base.displayOverride(conJnt, overrideEnable=1, overrideLOD=0) mc.setAttr((conJnt + '.drawStyle'), 0) else: glTools.utils.base.displayOverride(conJnt, overrideEnable=1, overrideLOD=1) mc.setAttr((conJnt + '.drawStyle'), 2) if mc.getAttr((conJnt + '.radius'), se=True): mc.setAttr((conJnt + '.radius'), 0.0) mc.setAttr((conJnt + '.radius'), cb=False) if mc.getAttr((conJnt + '.ro'), se=True): mc.setAttr((conJnt + '.ro'), cb=False) return conList
def toggleEnds(state): "\n\tToggle the display state of all joint buffers ('Con') in the scene\n\t@param state: The display state to set the joint buffers to\n\t@type state: bool\n\t" endList = mc.ls('*End_jnt', type='joint') for endJnt in endList: if state: glTools.utils.base.displayOverride(endJnt, overrideEnable=1, overrideLOD=0) mc.setAttr((endJnt + '.drawStyle'), 0) else: glTools.utils.base.displayOverride(endJnt, overrideEnable=1, overrideLOD=1) mc.setAttr((endJnt + '.drawStyle'), 2) if mc.getAttr((endJnt + '.radius'), se=True): mc.setAttr((endJnt + '.radius'), 0.0) mc.setAttr((endJnt + '.radius'), cb=False) if mc.getAttr((endJnt + '.ro'), se=True): mc.setAttr((endJnt + '.ro'), cb=False) return endList
-2,402,898,115,426,025,000
Toggle the display state of all joint buffers ('Con') in the scene @param state: The display state to set the joint buffers to @type state: bool
utils/cleanup.py
toggleEnds
Lynn5160/glTools
python
def toggleEnds(state): "\n\tToggle the display state of all joint buffers ('Con') in the scene\n\t@param state: The display state to set the joint buffers to\n\t@type state: bool\n\t" endList = mc.ls('*End_jnt', type='joint') for endJnt in endList: if state: glTools.utils.base.displayOverride(endJnt, overrideEnable=1, overrideLOD=0) mc.setAttr((endJnt + '.drawStyle'), 0) else: glTools.utils.base.displayOverride(endJnt, overrideEnable=1, overrideLOD=1) mc.setAttr((endJnt + '.drawStyle'), 2) if mc.getAttr((endJnt + '.radius'), se=True): mc.setAttr((endJnt + '.radius'), 0.0) mc.setAttr((endJnt + '.radius'), cb=False) if mc.getAttr((endJnt + '.ro'), se=True): mc.setAttr((endJnt + '.ro'), cb=False) return endList
def disableDrawingOverrides(grp): '\n\tDisable drawing overrides for all DAG descendents of the specified transform node.\n\t@param state: The transform under which all descendent node drawing overrides will be disabled.\n\t@type state: bool\n\t' if (not mc.objExists(grp)): raise Exception((('Transform "' + grp) + '" does not exists!')) if (not glTools.utils.transform.isTransform(grp)): raise Exception((('Object "' + grp) + '" is not a valid transform!')) nodeList = (mc.ls((mc.listRelatives(grp, ad=True, pa=True) or []), dag=True) or []) if (not nodeList): return [] overrideName = 'overrideEnabled' for node in nodeList: overrideAttr = ((node + '.') + overrideName) if (not mc.attributeQuery(overrideName, n=node, ex=True)): print((('Override attribute "' + overrideAttr) + '" does not exist! Skipping...')) continue overrideConn = (mc.listConnections(overrideAttr, s=True, d=False) or []) if overrideConn: print((((('Found incoming connection for override attribute "' + overrideAttr) + '"! (') + overrideConn[0]) + ')')) print('Disconnecting attribute and disabling drawing overrides...') mc.disconnectAttr(overrideConn[0], overrideAttr) try: mc.setAttr(overrideAttr, 0) except: pass return nodeList
3,805,511,487,215,523,300
Disable drawing overrides for all DAG descendents of the specified transform node. @param state: The transform under which all descendent node drawing overrides will be disabled. @type state: bool
utils/cleanup.py
disableDrawingOverrides
Lynn5160/glTools
python
def disableDrawingOverrides(grp): '\n\tDisable drawing overrides for all DAG descendents of the specified transform node.\n\t@param state: The transform under which all descendent node drawing overrides will be disabled.\n\t@type state: bool\n\t' if (not mc.objExists(grp)): raise Exception((('Transform "' + grp) + '" does not exists!')) if (not glTools.utils.transform.isTransform(grp)): raise Exception((('Object "' + grp) + '" is not a valid transform!')) nodeList = (mc.ls((mc.listRelatives(grp, ad=True, pa=True) or []), dag=True) or []) if (not nodeList): return [] overrideName = 'overrideEnabled' for node in nodeList: overrideAttr = ((node + '.') + overrideName) if (not mc.attributeQuery(overrideName, n=node, ex=True)): print((('Override attribute "' + overrideAttr) + '" does not exist! Skipping...')) continue overrideConn = (mc.listConnections(overrideAttr, s=True, d=False) or []) if overrideConn: print((((('Found incoming connection for override attribute "' + overrideAttr) + '"! (') + overrideConn[0]) + ')')) print('Disconnecting attribute and disabling drawing overrides...') mc.disconnectAttr(overrideConn[0], overrideAttr) try: mc.setAttr(overrideAttr, 0) except: pass return nodeList
def uniqueNameCheck(objList=[], transformsOnly=False): '\n\tReturn a list of nodes with non unique names\n\t@param objList: List of scene objects to check. If empty, use all existing scene nodes.\n\t@type objList: list\n\t@param transformsOnly: Check transform names only\n\t@type transformsOnly: bool\n\t' if (not objList): objList = mc.ls() if transformsOnly: nodeList = mc.ls(objList, transforms=True) else: nodeList = mc.ls(objList, dag=True) nonUniqueList = [i for i in nodeList if i.count('|')] return nonUniqueList
4,268,287,513,546,254,300
Return a list of nodes with non unique names @param objList: List of scene objects to check. If empty, use all existing scene nodes. @type objList: list @param transformsOnly: Check transform names only @type transformsOnly: bool
utils/cleanup.py
uniqueNameCheck
Lynn5160/glTools
python
def uniqueNameCheck(objList=[], transformsOnly=False): '\n\tReturn a list of nodes with non unique names\n\t@param objList: List of scene objects to check. If empty, use all existing scene nodes.\n\t@type objList: list\n\t@param transformsOnly: Check transform names only\n\t@type transformsOnly: bool\n\t' if (not objList): objList = mc.ls() if transformsOnly: nodeList = mc.ls(objList, transforms=True) else: nodeList = mc.ls(objList, dag=True) nonUniqueList = [i for i in nodeList if i.count('|')] return nonUniqueList
def validNameCheck(objList=[]): '\n\tCheck for valid names in the specified list of nodes\n\t@param objList: List of objects to check valid names for. If empty use all scene transforms\n\t@type objList: list\n\t' if (not objList): objList = mc.ls() if (not objList): return [] defNodes = ['dof1', 'time1', 'lambert1', 'postProcessList1', 'sequenceManager1', 'lightLinker1', 'renderGlobalsList1', 'dynController1', 'lightList1', 'particleCloud1', 'shaderGlow1'] objList = [obj for obj in objList if (not defNodes.count(obj))] objList = [obj for obj in objList if (not obj.startswith('default'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectTypeFilter'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectNameFilter'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectScriptFilter'))] result = [] for obj in objList: if obj.count('pasted'): result.append(obj) if obj.count('poly'): result.append(obj) if obj.count('__'): result.append(obj) digitSearch = re.search('(\\d+)$', obj) if (digitSearch and glTools.utils.transform.isTransform(obj)): if digitSearch.group(0): result.append(obj) result = list(set(result)) return result
452,050,471,396,400,640
Check for valid names in the specified list of nodes @param objList: List of objects to check valid names for. If empty use all scene transforms @type objList: list
utils/cleanup.py
validNameCheck
Lynn5160/glTools
python
def validNameCheck(objList=[]): '\n\tCheck for valid names in the specified list of nodes\n\t@param objList: List of objects to check valid names for. If empty use all scene transforms\n\t@type objList: list\n\t' if (not objList): objList = mc.ls() if (not objList): return [] defNodes = ['dof1', 'time1', 'lambert1', 'postProcessList1', 'sequenceManager1', 'lightLinker1', 'renderGlobalsList1', 'dynController1', 'lightList1', 'particleCloud1', 'shaderGlow1'] objList = [obj for obj in objList if (not defNodes.count(obj))] objList = [obj for obj in objList if (not obj.startswith('default'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectTypeFilter'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectNameFilter'))] objList = [obj for obj in objList if (not (mc.nodeType(obj) == 'objectScriptFilter'))] result = [] for obj in objList: if obj.count('pasted'): result.append(obj) if obj.count('poly'): result.append(obj) if obj.count('__'): result.append(obj) digitSearch = re.search('(\\d+)$', obj) if (digitSearch and glTools.utils.transform.isTransform(obj)): if digitSearch.group(0): result.append(obj) result = list(set(result)) return result
def shapeNameCheck(objList=[], typeList=['mesh', 'nurbsCurve', 'nurbsSurface'], skipIntermediates=True, skipMultipleShapes=False, strict=True): '\n\tReturn a list of incorrectly named geometry shape nodes.\n\t@param objList: List of objects to check for valid shape names. If empty, get all nodes of the specified type.\n\t@type objList: list\n\t@param typeList: List of shape types to check for valid names.\n\t@type typeList: list\n\t@param skipIntermediates: Skip intermediate shapes.\n\t@type skipIntermediates: bool\n\t@param skipMultipleShapes: Skip objects with multiple shape nodes.\n\t@type skipMultipleShapes: bool\n\t@param strict: Shape name must match parent+"Shape" to pass.\n\t@type strict: bool\n\t' if (not objList): objList = mc.ls(type=typeList) shapeList = [] for obj in objList: if glTools.utils.transform.isTransform(obj): objShapes = mc.listRelatives(obj, s=True, pa=True) if (not objShapes): continue if ((len(objShapes) > 1) and skipMultipleShapes): continue tShapeList = mc.listRelatives(obj, s=True, ni=skipIntermediates, pa=True) for shape in tShapeList: shapeList.append(obj) elif glTools.utils.shape.isShape(obj): shapeList.append(obj) else: print((('Unable to determine shape from object "' + obj) + '"! Skipping...')) invalidShapeNameList = [] for shape in shapeList: if (not typeList.count(mc.objectType(shape))): continue if (skipIntermediates and mc.getAttr((shape + '.intermediateObject'))): continue parent = mc.listRelatives(shape, p=True, pa=True)[0] shapeSN = mc.ls(shape, sn=True)[0] parentSN = mc.ls(parent, sn=True)[0] if (strict and (shape != (parent + 'Shape'))): invalidShapeNameList.append(shape) if (not shapeSN.startswith(parentSN)): invalidShapeNameList.append(shape) elif (not shapeSN.count('Shape')): invalidShapeNameList.append(shape) return invalidShapeNameList
1,147,560,374,976,238,800
Return a list of incorrectly named geometry shape nodes. @param objList: List of objects to check for valid shape names. If empty, get all nodes of the specified type. @type objList: list @param typeList: List of shape types to check for valid names. @type typeList: list @param skipIntermediates: Skip intermediate shapes. @type skipIntermediates: bool @param skipMultipleShapes: Skip objects with multiple shape nodes. @type skipMultipleShapes: bool @param strict: Shape name must match parent+"Shape" to pass. @type strict: bool
utils/cleanup.py
shapeNameCheck
Lynn5160/glTools
python
def shapeNameCheck(objList=[], typeList=['mesh', 'nurbsCurve', 'nurbsSurface'], skipIntermediates=True, skipMultipleShapes=False, strict=True): '\n\tReturn a list of incorrectly named geometry shape nodes.\n\t@param objList: List of objects to check for valid shape names. If empty, get all nodes of the specified type.\n\t@type objList: list\n\t@param typeList: List of shape types to check for valid names.\n\t@type typeList: list\n\t@param skipIntermediates: Skip intermediate shapes.\n\t@type skipIntermediates: bool\n\t@param skipMultipleShapes: Skip objects with multiple shape nodes.\n\t@type skipMultipleShapes: bool\n\t@param strict: Shape name must match parent+"Shape" to pass.\n\t@type strict: bool\n\t' if (not objList): objList = mc.ls(type=typeList) shapeList = [] for obj in objList: if glTools.utils.transform.isTransform(obj): objShapes = mc.listRelatives(obj, s=True, pa=True) if (not objShapes): continue if ((len(objShapes) > 1) and skipMultipleShapes): continue tShapeList = mc.listRelatives(obj, s=True, ni=skipIntermediates, pa=True) for shape in tShapeList: shapeList.append(obj) elif glTools.utils.shape.isShape(obj): shapeList.append(obj) else: print((('Unable to determine shape from object "' + obj) + '"! Skipping...')) invalidShapeNameList = [] for shape in shapeList: if (not typeList.count(mc.objectType(shape))): continue if (skipIntermediates and mc.getAttr((shape + '.intermediateObject'))): continue parent = mc.listRelatives(shape, p=True, pa=True)[0] shapeSN = mc.ls(shape, sn=True)[0] parentSN = mc.ls(parent, sn=True)[0] if (strict and (shape != (parent + 'Shape'))): invalidShapeNameList.append(shape) if (not shapeSN.startswith(parentSN)): invalidShapeNameList.append(shape) elif (not shapeSN.count('Shape')): invalidShapeNameList.append(shape) return invalidShapeNameList
def intermediateShapesCheck(objList=[]): '\n\tReturn a list of intermediate shapes.\n\t@param objList: List of objects to check for intermediate shapes.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, transforms=True) result = [] for obj in objList: shapes = mc.listRelatives(obj, s=True, pa=True) if (not shapes): shapes = [] for shape in shapes: if mc.objExists((shape + '.intermediateObject')): if mc.getAttr((shape + '.intermediateObject')): result.append(shape) return result
2,082,085,046,468,773,400
Return a list of intermediate shapes. @param objList: List of objects to check for intermediate shapes. @type objList: list
utils/cleanup.py
intermediateShapesCheck
Lynn5160/glTools
python
def intermediateShapesCheck(objList=[]): '\n\tReturn a list of intermediate shapes.\n\t@param objList: List of objects to check for intermediate shapes.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, transforms=True) result = [] for obj in objList: shapes = mc.listRelatives(obj, s=True, pa=True) if (not shapes): shapes = [] for shape in shapes: if mc.objExists((shape + '.intermediateObject')): if mc.getAttr((shape + '.intermediateObject')): result.append(shape) return result
def multipleShapeCheck(objList=[]): '\n\tReturn a list of transforms with multiple shape nodes\n\t@param objList: List of objects to check for multiple shapes.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, dag=True) result = [] for transform in objList: if (not glTools.utils.transform.isTransform(transform)): transform = mc.listRelatives(transform, p=True)[0] shapeList = mc.listRelatives(transform, s=True) if (not shapeList): continue shapeList = mc.ls(shapeList, type=['mesh', 'nurbsSurface', 'nurbsCurve']) if (len(shapeList) > 1): result.append(transform) return result
-4,158,443,164,518,131,000
Return a list of transforms with multiple shape nodes @param objList: List of objects to check for multiple shapes. @type objList: list
utils/cleanup.py
multipleShapeCheck
Lynn5160/glTools
python
def multipleShapeCheck(objList=[]): '\n\tReturn a list of transforms with multiple shape nodes\n\t@param objList: List of objects to check for multiple shapes.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, dag=True) result = [] for transform in objList: if (not glTools.utils.transform.isTransform(transform)): transform = mc.listRelatives(transform, p=True)[0] shapeList = mc.listRelatives(transform, s=True) if (not shapeList): continue shapeList = mc.ls(shapeList, type=['mesh', 'nurbsSurface', 'nurbsCurve']) if (len(shapeList) > 1): result.append(transform) return result
def constructionHistoryCheck(geoList=[]): '\n\tReturn a list of nodes that contain construction history\n\t@param objList: List of objects to check for construction history.\n\t@type objList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) else: geoList = mc.listRelatives(geoList, s=True, pa=True) result = [] for geo in geoList: hist = mc.listHistory(geo) if hist.count(geo): hist.remove(geo) ignore = mc.ls(hist, type=['groupId', 'shadingEngine', 'transform']) hist = list((set(hist) - set(ignore))) if hist: obj = mc.listRelatives(geo, p=True, pa=True) result.extend(obj) if result: result = list(set(result)) return result
-5,603,059,709,136,841,000
Return a list of nodes that contain construction history @param objList: List of objects to check for construction history. @type objList: list
utils/cleanup.py
constructionHistoryCheck
Lynn5160/glTools
python
def constructionHistoryCheck(geoList=[]): '\n\tReturn a list of nodes that contain construction history\n\t@param objList: List of objects to check for construction history.\n\t@type objList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) else: geoList = mc.listRelatives(geoList, s=True, pa=True) result = [] for geo in geoList: hist = mc.listHistory(geo) if hist.count(geo): hist.remove(geo) ignore = mc.ls(hist, type=['groupId', 'shadingEngine', 'transform']) hist = list((set(hist) - set(ignore))) if hist: obj = mc.listRelatives(geo, p=True, pa=True) result.extend(obj) if result: result = list(set(result)) return result
def userAttrCheck(objList=[], includeShapes=False): '\n\tReturn a list of user defined attributes for a specified list of nodes (and shapes).\n\t@param objList: List of objects to check for user defined attributes.\n\t@type objList: list\n\t@param includeShapes: Also check shapes for user defined attributes.\n\t@type includeShapes: bool\n\t' result = [] if (not objList): objList = mc.ls() for obj in objList: userAttrs = mc.listAttr(obj, ud=True) if (not userAttrs): userAttrs = [] for attr in userAttrs: result.append(((obj + '.') + attr)) if includeShapes: shapes = mc.listRelatives(obj, s=True) if (not shapes): shapes = [] for shape in shapes: userAttrs = mc.listAttr(shape, ud=True) if (not userAttrs): userAttrs = [] for attr in userAttrs: result.append(((shape + '.') + attr)) return result
5,971,153,598,812,576,000
Return a list of user defined attributes for a specified list of nodes (and shapes). @param objList: List of objects to check for user defined attributes. @type objList: list @param includeShapes: Also check shapes for user defined attributes. @type includeShapes: bool
utils/cleanup.py
userAttrCheck
Lynn5160/glTools
python
def userAttrCheck(objList=[], includeShapes=False): '\n\tReturn a list of user defined attributes for a specified list of nodes (and shapes).\n\t@param objList: List of objects to check for user defined attributes.\n\t@type objList: list\n\t@param includeShapes: Also check shapes for user defined attributes.\n\t@type includeShapes: bool\n\t' result = [] if (not objList): objList = mc.ls() for obj in objList: userAttrs = mc.listAttr(obj, ud=True) if (not userAttrs): userAttrs = [] for attr in userAttrs: result.append(((obj + '.') + attr)) if includeShapes: shapes = mc.listRelatives(obj, s=True) if (not shapes): shapes = [] for shape in shapes: userAttrs = mc.listAttr(shape, ud=True) if (not userAttrs): userAttrs = [] for attr in userAttrs: result.append(((shape + '.') + attr)) return result
def emptyGroupCheck(objList=[]): '\n\tList empty groups.\n\t@param objList: List of transforms to check.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, transforms=True) result = [] for grp in objList: if (not mc.listRelatives(grp, ad=True)): result.append(grp) return result
-2,007,782,837,976,040,000
List empty groups. @param objList: List of transforms to check. @type objList: list
utils/cleanup.py
emptyGroupCheck
Lynn5160/glTools
python
def emptyGroupCheck(objList=[]): '\n\tList empty groups.\n\t@param objList: List of transforms to check.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) else: objList = mc.ls(objList, transforms=True) result = [] for grp in objList: if (not mc.listRelatives(grp, ad=True)): result.append(grp) return result
def emptySetCheck(setList=[]): '\n\tReturn a list of empty sets\n\t@param setList: List of sets to check.\n\t@type setList: list\n\t' if (not setList): setList = mc.ls(sets=True) result = [] for setName in setList: if (not mc.ls(setName, sets=True)): continue if setName.startswith('default'): continue if setName.startswith('initial'): continue if (not mc.sets(setName, q=True)): result.append(setName) return result
-3,456,770,229,150,419,500
Return a list of empty sets @param setList: List of sets to check. @type setList: list
utils/cleanup.py
emptySetCheck
Lynn5160/glTools
python
def emptySetCheck(setList=[]): '\n\tReturn a list of empty sets\n\t@param setList: List of sets to check.\n\t@type setList: list\n\t' if (not setList): setList = mc.ls(sets=True) result = [] for setName in setList: if (not mc.ls(setName, sets=True)): continue if setName.startswith('default'): continue if setName.startswith('initial'): continue if (not mc.sets(setName, q=True)): result.append(setName) return result
def emptyLayerCheck(layerList=[]): '\n\tReturn a list if empty layers\n\t@param layerList: List of layers to check. If empty, use all existing layers in current scene.\n\t@type layerList: list\n\t' if (not layerList): layerList = mc.ls(type=['displayLayer', 'renderLayer', 'animLayer']) else: layerList = mc.ls(layerList, type=['displayLayer', 'renderLayer', 'animLayer']) result = [] for layer in layerList: if (not mc.ls(layer, type=['displayLayer', 'renderLayer', 'animLayer'])): continue if layer.startswith('default'): continue if (not glTools.utils.layer.memberList(layer)): result.append(layer) return result
3,768,880,266,767,983,600
Return a list if empty layers @param layerList: List of layers to check. If empty, use all existing layers in current scene. @type layerList: list
utils/cleanup.py
emptyLayerCheck
Lynn5160/glTools
python
def emptyLayerCheck(layerList=[]): '\n\tReturn a list if empty layers\n\t@param layerList: List of layers to check. If empty, use all existing layers in current scene.\n\t@type layerList: list\n\t' if (not layerList): layerList = mc.ls(type=['displayLayer', 'renderLayer', 'animLayer']) else: layerList = mc.ls(layerList, type=['displayLayer', 'renderLayer', 'animLayer']) result = [] for layer in layerList: if (not mc.ls(layer, type=['displayLayer', 'renderLayer', 'animLayer'])): continue if layer.startswith('default'): continue if (not glTools.utils.layer.memberList(layer)): result.append(layer) return result
def animCurveCheck(curveTypeList=['animCurveTL', 'animCurveTA', 'animCurveTT', 'animCurveTU', 'animCurveUL', 'animCurveUA', 'animCurveUT', 'animCurveUU']): '\n\tReturn a list of all existing animCurves of a specified type.\n\t@param curveList: List of animCurve types to consider.\n\t@type curveList: list\n\t@param curveTypeList: List of animCurve types to consider.\n\t@type curveTypeList: list\n\t' animCurves = [] for curveType in curveTypeList: curveList = mc.ls(type=curveType) if curveList: animCurves.extend(curveList) return animCurves
9,166,902,759,146,648,000
Return a list of all existing animCurves of a specified type. @param curveList: List of animCurve types to consider. @type curveList: list @param curveTypeList: List of animCurve types to consider. @type curveTypeList: list
utils/cleanup.py
animCurveCheck
Lynn5160/glTools
python
def animCurveCheck(curveTypeList=['animCurveTL', 'animCurveTA', 'animCurveTT', 'animCurveTU', 'animCurveUL', 'animCurveUA', 'animCurveUT', 'animCurveUU']): '\n\tReturn a list of all existing animCurves of a specified type.\n\t@param curveList: List of animCurve types to consider.\n\t@type curveList: list\n\t@param curveTypeList: List of animCurve types to consider.\n\t@type curveTypeList: list\n\t' animCurves = [] for curveType in curveTypeList: curveList = mc.ls(type=curveType) if curveList: animCurves.extend(curveList) return animCurves
def unusedShadingNodeCheck(): '\n\tReturn a list of unused shading nodes.\n\t' return glTools.utils.shader.listUnusedShadingNodes()
4,373,095,908,657,191,000
Return a list of unused shading nodes.
utils/cleanup.py
unusedShadingNodeCheck
Lynn5160/glTools
python
def unusedShadingNodeCheck(): '\n\t\n\t' return glTools.utils.shader.listUnusedShadingNodes()
def noGeometryShaderCheck(geoList=[]): '\n\tReturn a list of non intermediate geometry shapes with no shader assignment.\n\t@param geoList: List of geometry to check for shader assignments.\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(type=['mesh', 'nurbsSurface'], ni=True) else: geoList += (mc.ls((mc.listRelatives(geoList, ad=True, pa=True) or []), type=['mesh', 'nurbsSurface'], ni=True) or []) geoList = mc.ls(geoList, type=['mesh', 'nurbsSurface'], ni=True) noShaderList = [] for geo in geoList: SG = glTools.utils.shader.getSG(geo) if (not SG): noShaderList.append(geo) return noShaderList
122,524,147,550,964,450
Return a list of non intermediate geometry shapes with no shader assignment. @param geoList: List of geometry to check for shader assignments. @type geoList: list
utils/cleanup.py
noGeometryShaderCheck
Lynn5160/glTools
python
def noGeometryShaderCheck(geoList=[]): '\n\tReturn a list of non intermediate geometry shapes with no shader assignment.\n\t@param geoList: List of geometry to check for shader assignments.\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(type=['mesh', 'nurbsSurface'], ni=True) else: geoList += (mc.ls((mc.listRelatives(geoList, ad=True, pa=True) or []), type=['mesh', 'nurbsSurface'], ni=True) or []) geoList = mc.ls(geoList, type=['mesh', 'nurbsSurface'], ni=True) noShaderList = [] for geo in geoList: SG = glTools.utils.shader.getSG(geo) if (not SG): noShaderList.append(geo) return noShaderList
def unusedReferenceCheck(): '\n\tReturn a list of unused reference nodes.\n\t' result = [] refList = glTools.utils.reference.listReferences() for ref in refList: try: refFile = glTools.utils.reference.getReferenceFile(ref) except: result.append(ref) return result
4,255,924,351,814,690,000
Return a list of unused reference nodes.
utils/cleanup.py
unusedReferenceCheck
Lynn5160/glTools
python
def unusedReferenceCheck(): '\n\t\n\t' result = [] refList = glTools.utils.reference.listReferences() for ref in refList: try: refFile = glTools.utils.reference.getReferenceFile(ref) except: result.append(ref) return result
def unknownNodeCheck(): '\n\tReturn a list of unknown nodes.\n\t' result = mc.ls(type='unknown') if (not result): result = [] return result
5,835,212,241,212,312,000
Return a list of unknown nodes.
utils/cleanup.py
unknownNodeCheck
Lynn5160/glTools
python
def unknownNodeCheck(): '\n\t\n\t' result = mc.ls(type='unknown') if (not result): result = [] return result
def checkTransforms(objList=[], tol=1e-10): '\n\tCheck for non-zero transforms\n\t@param objList: List of transforms to check.\n\t@type objList: list\n\t@param tol: Value tolerance.\n\t@type tol: float\n\t' if (not objList): objList = mc.ls(transforms=True) if (not objList): return [] transformList = [] for obj in objList: if (obj == 'persp'): continue if (obj == 'front'): continue if (obj == 'side'): continue if (obj == 'top'): continue if (abs(mc.getAttr((obj + '.tx'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.ty'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.tz'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.rx'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.ry'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.rz'))) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sx')) - 1.0)) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sy')) - 1.0)) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sz')) - 1.0)) > tol): transformList.append(obj) continue return transformList
-8,042,067,614,690,949,000
Check for non-zero transforms @param objList: List of transforms to check. @type objList: list @param tol: Value tolerance. @type tol: float
utils/cleanup.py
checkTransforms
Lynn5160/glTools
python
def checkTransforms(objList=[], tol=1e-10): '\n\tCheck for non-zero transforms\n\t@param objList: List of transforms to check.\n\t@type objList: list\n\t@param tol: Value tolerance.\n\t@type tol: float\n\t' if (not objList): objList = mc.ls(transforms=True) if (not objList): return [] transformList = [] for obj in objList: if (obj == 'persp'): continue if (obj == 'front'): continue if (obj == 'side'): continue if (obj == 'top'): continue if (abs(mc.getAttr((obj + '.tx'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.ty'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.tz'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.rx'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.ry'))) > tol): transformList.append(obj) continue if (abs(mc.getAttr((obj + '.rz'))) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sx')) - 1.0)) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sy')) - 1.0)) > tol): transformList.append(obj) continue if (abs((mc.getAttr((obj + '.sz')) - 1.0)) > tol): transformList.append(obj) continue return transformList
def displayOverridesCheck(objList=[]): '\n\tCheck all/specified objects for display overrides\n\t@param objList: List of DAG nodes to check. If empty, use all DAG nodes in scene\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(dag=True) else: objList = mc.ls(objList, dag=True) displayOverrideList = [] for obj in objList: if mc.getAttr((obj + '.overrideEnabled')): displayOverrideList.append(obj) return displayOverrideList
6,985,345,146,070,258,000
Check all/specified objects for display overrides @param objList: List of DAG nodes to check. If empty, use all DAG nodes in scene @type objList: list
utils/cleanup.py
displayOverridesCheck
Lynn5160/glTools
python
def displayOverridesCheck(objList=[]): '\n\tCheck all/specified objects for display overrides\n\t@param objList: List of DAG nodes to check. If empty, use all DAG nodes in scene\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(dag=True) else: objList = mc.ls(objList, dag=True) displayOverrideList = [] for obj in objList: if mc.getAttr((obj + '.overrideEnabled')): displayOverrideList.append(obj) return displayOverrideList
def shapeNameFix(shape): '\n\tFix incorrectly named geometry shape node\n\t@param objList: List of objects to check for valid shape names.\n\t@type objList: list\n\t@param typeList: List of shape types to check for valid names.\n\t@type typeList: list\n\t@param skipIntermediates: Skip intermediate shapes\n\t@type skipIntermediates: bool\n\t' parent = mc.listRelatives(shape, p=True)[0] shapeName = (parent + 'Shape') if mc.objExists(shapeName): raise Exception((((('Shape "' + shapeName) + '" already exists! Unable to rename shape "') + shape) + '"!')) newShape = mc.rename(shape, shapeName) return newShape
8,285,677,100,888,544,000
Fix incorrectly named geometry shape node @param objList: List of objects to check for valid shape names. @type objList: list @param typeList: List of shape types to check for valid names. @type typeList: list @param skipIntermediates: Skip intermediate shapes @type skipIntermediates: bool
utils/cleanup.py
shapeNameFix
Lynn5160/glTools
python
def shapeNameFix(shape): '\n\tFix incorrectly named geometry shape node\n\t@param objList: List of objects to check for valid shape names.\n\t@type objList: list\n\t@param typeList: List of shape types to check for valid names.\n\t@type typeList: list\n\t@param skipIntermediates: Skip intermediate shapes\n\t@type skipIntermediates: bool\n\t' parent = mc.listRelatives(shape, p=True)[0] shapeName = (parent + 'Shape') if mc.objExists(shapeName): raise Exception((((('Shape "' + shapeName) + '" already exists! Unable to rename shape "') + shape) + '"!')) newShape = mc.rename(shape, shapeName) return newShape
def deleteIntermediateShapes(objList=[]): '\n\tDelete all intermediate shapes in the scene\n\t' intermediateShapeList = intermediateShapesCheck(objList) if intermediateShapeList: mc.delete(intermediateShapeList) return intermediateShapeList
2,769,470,549,769,540,000
Delete all intermediate shapes in the scene
utils/cleanup.py
deleteIntermediateShapes
Lynn5160/glTools
python
def deleteIntermediateShapes(objList=[]): '\n\t\n\t' intermediateShapeList = intermediateShapesCheck(objList) if intermediateShapeList: mc.delete(intermediateShapeList) return intermediateShapeList
def deleteConstructionHistory(geoList=[]): '\n\tDelete construction history for specified geometry\n\t@param geoList: List of objects to delete for construction history from.\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) for geo in geoList: mc.delete(geo, ch=True) return geoList
1,295,568,664,482,571,300
Delete construction history for specified geometry @param geoList: List of objects to delete for construction history from. @type geoList: list
utils/cleanup.py
deleteConstructionHistory
Lynn5160/glTools
python
def deleteConstructionHistory(geoList=[]): '\n\tDelete construction history for specified geometry\n\t@param geoList: List of objects to delete for construction history from.\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) for geo in geoList: mc.delete(geo, ch=True) return geoList
def deleteUserAttrs(nodeList=[], includeShapes=False): '\n\tDelete user defined attributes from the specified list of nodes\n\t@param nodeList: List of nodes to delete user defined attrs from. If empty, assume all nodes.\n\t@type nodeList: list\n\t@param includeShapes: Delete user attributes \n\t@type includeShapes: bool\n\t' if (not nodeList): nodeList = mc.ls() for node in nodeList: glTools.utils.attribute.deleteUserAttrs(node) if includeShapes: shapes = mc.listRelatives(node, s=True) for shape in shapes: glTools.utils.attribute.deleteUserAttrs(shape)
2,575,054,338,181,821,400
Delete user defined attributes from the specified list of nodes @param nodeList: List of nodes to delete user defined attrs from. If empty, assume all nodes. @type nodeList: list @param includeShapes: Delete user attributes @type includeShapes: bool
utils/cleanup.py
deleteUserAttrs
Lynn5160/glTools
python
def deleteUserAttrs(nodeList=[], includeShapes=False): '\n\tDelete user defined attributes from the specified list of nodes\n\t@param nodeList: List of nodes to delete user defined attrs from. If empty, assume all nodes.\n\t@type nodeList: list\n\t@param includeShapes: Delete user attributes \n\t@type includeShapes: bool\n\t' if (not nodeList): nodeList = mc.ls() for node in nodeList: glTools.utils.attribute.deleteUserAttrs(node) if includeShapes: shapes = mc.listRelatives(node, s=True) for shape in shapes: glTools.utils.attribute.deleteUserAttrs(shape)
def deleteEmptyGroups(objList=[]): '\n\tDelete empty groups\n\t' emptyGrpList = emptyGroupCheck(objList=objList) if emptyGrpList: mc.delete(emptyGrpList) return emptyGrpList
1,703,937,593,853,136,400
Delete empty groups
utils/cleanup.py
deleteEmptyGroups
Lynn5160/glTools
python
def deleteEmptyGroups(objList=[]): '\n\t\n\t' emptyGrpList = emptyGroupCheck(objList=objList) if emptyGrpList: mc.delete(emptyGrpList) return emptyGrpList
def deleteEmptySets(setList=[]): '\n\tDelete empty groups\n\t' emptySetList = emptySetCheck(setList=setList) if emptySetList: mc.delete(emptySetList) return emptySetList
-2,143,277,007,654,361,600
Delete empty groups
utils/cleanup.py
deleteEmptySets
Lynn5160/glTools
python
def deleteEmptySets(setList=[]): '\n\t\n\t' emptySetList = emptySetCheck(setList=setList) if emptySetList: mc.delete(emptySetList) return emptySetList
def deleteEmptyLayers(layerList=[]): '\n\tDelete empty groups\n\t' emptyLayerList = emptyLayerCheck(layerList=layerList) if emptyLayerList: mc.delete(emptyLayerList) return emptyLayerList
-8,304,039,507,788,755,000
Delete empty groups
utils/cleanup.py
deleteEmptyLayers
Lynn5160/glTools
python
def deleteEmptyLayers(layerList=[]): '\n\t\n\t' emptyLayerList = emptyLayerCheck(layerList=layerList) if emptyLayerList: mc.delete(emptyLayerList) return emptyLayerList
def deleteUnknownNodes(): '\n\tDelete all node of type "unknown" in the scene\n\t' unknownNodes = (unknownNodeCheck() or []) for node in unknownNodes: try: mc.lockNode(node, l=False) mc.delete(node) except: print((('Problem deleting unknown node "' + node) + '"!')) return unknownNodes
5,508,941,397,906,166,000
Delete all node of type "unknown" in the scene
utils/cleanup.py
deleteUnknownNodes
Lynn5160/glTools
python
def deleteUnknownNodes(): '\n\t\n\t' unknownNodes = (unknownNodeCheck() or []) for node in unknownNodes: try: mc.lockNode(node, l=False) mc.delete(node) except: print((('Problem deleting unknown node "' + node) + '"!')) return unknownNodes
def deleteNodesByType(nodeTypeList=[]): '\n\tDelete nodes of the specified type(s).\n\t@param nodeTypeList: List of node types to delete.\n\t@type nodeTypeList: list\n\t' if (not nodeTypeList): return [] nodeList = mc.ls(type=nodeTypeList) if nodeList: mc.delete(nodeList) else: nodeList = [] return nodeList
275,726,427,221,433,860
Delete nodes of the specified type(s). @param nodeTypeList: List of node types to delete. @type nodeTypeList: list
utils/cleanup.py
deleteNodesByType
Lynn5160/glTools
python
def deleteNodesByType(nodeTypeList=[]): '\n\tDelete nodes of the specified type(s).\n\t@param nodeTypeList: List of node types to delete.\n\t@type nodeTypeList: list\n\t' if (not nodeTypeList): return [] nodeList = mc.ls(type=nodeTypeList) if nodeList: mc.delete(nodeList) else: nodeList = [] return nodeList
def deleteUnusedReferenceNodes(): '\n\tDelete all unused reference nodes in the scene\n\t' mm.eval('RNdeleteUnused')
1,429,711,747,913,798,000
Delete all unused reference nodes in the scene
utils/cleanup.py
deleteUnusedReferenceNodes
Lynn5160/glTools
python
def deleteUnusedReferenceNodes(): '\n\t\n\t' mm.eval('RNdeleteUnused')
def deleteEmptySets(setList=[]): '\n\tDelete empty object sets\n\t@param setList: A list of sets to check. If empty, chack all sets in current scene.\n\t@type setList: list\n\t' if (not setList): setList = mc.ls(sets=True) emptySetList = [] for set in setList: if (not mc.sets(set, q=True)): emptySetList.append(set) for emptySet in emptySetList: try: mc.delete(emptySet) except: pass return emptySetList
1,971,482,680,095,184,000
Delete empty object sets @param setList: A list of sets to check. If empty, chack all sets in current scene. @type setList: list
utils/cleanup.py
deleteEmptySets
Lynn5160/glTools
python
def deleteEmptySets(setList=[]): '\n\tDelete empty object sets\n\t@param setList: A list of sets to check. If empty, chack all sets in current scene.\n\t@type setList: list\n\t' if (not setList): setList = mc.ls(sets=True) emptySetList = [] for set in setList: if (not mc.sets(set, q=True)): emptySetList.append(set) for emptySet in emptySetList: try: mc.delete(emptySet) except: pass return emptySetList
def deleteAllSets(excludeList=[]): '\n\tDelete unused object sets\n\t@param excludeList: A list of sets to exclude from the list of unused sets.\n\t@type excludeList: list\n\t' setList = mc.ls(sets=True) if excludeList: excludeSetList = mc.ls(excludeList, sets=True) setList = list((set(setList) - set(excludeSetList))) for deleteSet in setList: try: mc.delete(deleteSet) except: pass return setList
-5,507,416,627,643,570,000
Delete unused object sets @param excludeList: A list of sets to exclude from the list of unused sets. @type excludeList: list
utils/cleanup.py
deleteAllSets
Lynn5160/glTools
python
def deleteAllSets(excludeList=[]): '\n\tDelete unused object sets\n\t@param excludeList: A list of sets to exclude from the list of unused sets.\n\t@type excludeList: list\n\t' setList = mc.ls(sets=True) if excludeList: excludeSetList = mc.ls(excludeList, sets=True) setList = list((set(setList) - set(excludeSetList))) for deleteSet in setList: try: mc.delete(deleteSet) except: pass return setList
def deleteUnusedShadingNodes(): '\n\tDelete all unused shading nodes in the scene\n\t' mm.eval('MLdeleteUnused')
2,175,521,215,862,827,300
Delete all unused shading nodes in the scene
utils/cleanup.py
deleteUnusedShadingNodes
Lynn5160/glTools
python
def deleteUnusedShadingNodes(): '\n\t\n\t' mm.eval('MLdeleteUnused')
def deleteDisplayLayers(): '\n\tDelete all display layers\n\t' displayLayers = mc.ls(type='displayLayer') displayLayers.remove('defaultLayer') if displayLayers: mc.delete(displayLayers) return displayLayers
4,235,346,424,302,717,000
Delete all display layers
utils/cleanup.py
deleteDisplayLayers
Lynn5160/glTools
python
def deleteDisplayLayers(): '\n\t\n\t' displayLayers = mc.ls(type='displayLayer') displayLayers.remove('defaultLayer') if displayLayers: mc.delete(displayLayers) return displayLayers
def deleteRenderLayers(): '\n\tDelete all render layers\n\t' renderLayers = mc.ls(type='renderLayer') renderLayers.remove('defaultRenderLayer') if renderLayers: mc.delete(renderLayers) return renderLayers
907,741,467,256,312,300
Delete all render layers
utils/cleanup.py
deleteRenderLayers
Lynn5160/glTools
python
def deleteRenderLayers(): '\n\t\n\t' renderLayers = mc.ls(type='renderLayer') renderLayers.remove('defaultRenderLayer') if renderLayers: mc.delete(renderLayers) return renderLayers
def assignInitialShadingGroup(geoList=[]): '\n\tAssign initialShadingGroup (lambert1) to specified geometry.\n\t@param geoList: List of geometry to apply default shader to. If empty, use all scene geometry\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) if (not geoList): return [] mc.sets(geoList, fe='initialShadingGroup') return geoList
-4,432,954,743,299,561,500
Assign initialShadingGroup (lambert1) to specified geometry. @param geoList: List of geometry to apply default shader to. If empty, use all scene geometry @type geoList: list
utils/cleanup.py
assignInitialShadingGroup
Lynn5160/glTools
python
def assignInitialShadingGroup(geoList=[]): '\n\tAssign initialShadingGroup (lambert1) to specified geometry.\n\t@param geoList: List of geometry to apply default shader to. If empty, use all scene geometry\n\t@type geoList: list\n\t' if (not geoList): geoList = mc.ls(geometry=True) if (not geoList): return [] mc.sets(geoList, fe='initialShadingGroup') return geoList
def zeroTransforms(objList=[]): '\n\tReset transform values\n\t@param objList: List of transforms to zero out.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) if (not objList): return [] for obj in objList: if mc.getAttr((obj + '.tx'), se=True): mc.setAttr((obj + '.tx'), 0) if mc.getAttr((obj + '.ty'), se=True): mc.setAttr((obj + '.ty'), 0) if mc.getAttr((obj + '.tz'), se=True): mc.setAttr((obj + '.tz'), 0) if mc.getAttr((obj + '.rx'), se=True): mc.setAttr((obj + '.rx'), 0) if mc.getAttr((obj + '.ry'), se=True): mc.setAttr((obj + '.ry'), 0) if mc.getAttr((obj + '.rz'), se=True): mc.setAttr((obj + '.rz'), 0) if mc.getAttr((obj + '.sx'), se=True): mc.setAttr((obj + '.sx'), 0) if mc.getAttr((obj + '.sy'), se=True): mc.setAttr((obj + '.sy'), 0) if mc.getAttr((obj + '.sz'), se=True): mc.setAttr((obj + '.sz'), 0) return objList
4,021,415,312,165,912,600
Reset transform values @param objList: List of transforms to zero out. @type objList: list
utils/cleanup.py
zeroTransforms
Lynn5160/glTools
python
def zeroTransforms(objList=[]): '\n\tReset transform values\n\t@param objList: List of transforms to zero out.\n\t@type objList: list\n\t' if (not objList): objList = mc.ls(transforms=True) if (not objList): return [] for obj in objList: if mc.getAttr((obj + '.tx'), se=True): mc.setAttr((obj + '.tx'), 0) if mc.getAttr((obj + '.ty'), se=True): mc.setAttr((obj + '.ty'), 0) if mc.getAttr((obj + '.tz'), se=True): mc.setAttr((obj + '.tz'), 0) if mc.getAttr((obj + '.rx'), se=True): mc.setAttr((obj + '.rx'), 0) if mc.getAttr((obj + '.ry'), se=True): mc.setAttr((obj + '.ry'), 0) if mc.getAttr((obj + '.rz'), se=True): mc.setAttr((obj + '.rz'), 0) if mc.getAttr((obj + '.sx'), se=True): mc.setAttr((obj + '.sx'), 0) if mc.getAttr((obj + '.sy'), se=True): mc.setAttr((obj + '.sy'), 0) if mc.getAttr((obj + '.sz'), se=True): mc.setAttr((obj + '.sz'), 0) return objList
def copyInputShapeAttrs(geoList=[]): '\n\tCopy user defined attributes from an input shape to the output deforming shape.\n\t@param geoList: List of geometry to copy atributes for.\n\t@type geoList: list\n\t' if (not geoList): geoList = (mc.listRelatives((mc.ls(geometry=True) or []), p=True, pa=True) or []) if (not geoList): return [] for geo in geoList: geoShape = (mc.listRelatives(geo, s=True, ni=True) or []) if (not geoShape): print((('No shape found for geometry transform "' + geo) + '"!')) continue geoInputShape = geoShape[0] try: geoInputShape = glTools.utils.shape.findInputShape(geoShape[0]) except: pass if (geoInputShape != geoShape[0]): userAttr = (mc.listAttr(geoInputShape, ud=True, s=True) or []) for at in userAttr: glTools.utils.attribute.copyAttr(geoInputShape, geoShape[0], at)
4,709,085,801,992,864,000
Copy user defined attributes from an input shape to the output deforming shape. @param geoList: List of geometry to copy atributes for. @type geoList: list
utils/cleanup.py
copyInputShapeAttrs
Lynn5160/glTools
python
def copyInputShapeAttrs(geoList=[]): '\n\tCopy user defined attributes from an input shape to the output deforming shape.\n\t@param geoList: List of geometry to copy atributes for.\n\t@type geoList: list\n\t' if (not geoList): geoList = (mc.listRelatives((mc.ls(geometry=True) or []), p=True, pa=True) or []) if (not geoList): return [] for geo in geoList: geoShape = (mc.listRelatives(geo, s=True, ni=True) or []) if (not geoShape): print((('No shape found for geometry transform "' + geo) + '"!')) continue geoInputShape = geoShape[0] try: geoInputShape = glTools.utils.shape.findInputShape(geoShape[0]) except: pass if (geoInputShape != geoShape[0]): userAttr = (mc.listAttr(geoInputShape, ud=True, s=True) or []) for at in userAttr: glTools.utils.attribute.copyAttr(geoInputShape, geoShape[0], at)
def removeTurtle(): '\n\tDelete nodes and unload plgin related to the Turtle Renderer.\n\t' turtleNode = 'TurtleDefaultBakeLayer' if mc.objExists(turtleNode): print('Removing Turtle nodes...') mc.lockNode(turtleNode, l=False) mc.delete(turtleNode) if mc.pluginInfo('Turtle', q=True, loaded=True): print('Unloading Turtle plugin...') try: mc.unloadPlugin('Turtle', f=True) except: print('Error unloading Turtle plugin!')
-309,964,089,159,626,600
Delete nodes and unload plgin related to the Turtle Renderer.
utils/cleanup.py
removeTurtle
Lynn5160/glTools
python
def removeTurtle(): '\n\t\n\t' turtleNode = 'TurtleDefaultBakeLayer' if mc.objExists(turtleNode): print('Removing Turtle nodes...') mc.lockNode(turtleNode, l=False) mc.delete(turtleNode) if mc.pluginInfo('Turtle', q=True, loaded=True): print('Unloading Turtle plugin...') try: mc.unloadPlugin('Turtle', f=True) except: print('Error unloading Turtle plugin!')
def test_instantiate(core_config, patch_mongo, ampel_logger): '\n AbsT3Loader understands all the aliases in the ampel-core config\n ' ctx = AmpelContext.load(core_config) aliases = ctx.config.get('alias.t3', dict) assert (len((directives := T3SimpleDataLoader(context=ctx, logger=ampel_logger, directives=[k[1:] for k in aliases.keys()]).directives)) == len(aliases)) for (d, value) in zip(directives, aliases.values()): assert (d.dict(exclude_defaults=True) == value)
840,419,088,730,266,900
AbsT3Loader understands all the aliases in the ampel-core config
ampel/test/test_T3SimpleDataLoader.py
test_instantiate
mafn/Ampel-core
python
def test_instantiate(core_config, patch_mongo, ampel_logger): '\n \n ' ctx = AmpelContext.load(core_config) aliases = ctx.config.get('alias.t3', dict) assert (len((directives := T3SimpleDataLoader(context=ctx, logger=ampel_logger, directives=[k[1:] for k in aliases.keys()]).directives)) == len(aliases)) for (d, value) in zip(directives, aliases.values()): assert (d.dict(exclude_defaults=True) == value)
@bp_rack.route('/lists.html', methods=['GET', 'POST']) @login_required @permission_rack_section_search.require(http_exception=403) def lists(): '\n 货架列表\n :return:\n ' template_name = 'rack/lists.html' document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack lists') form = RackSearchForm(request.form) form.warehouse_id.choices = get_warehouse_choices() search_condition = [(Rack.status_delete == STATUS_DEL_NO)] if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Search Failure'), 'danger') if (hasattr(form, 'csrf_token') and getattr(form, 'csrf_token').errors): map((lambda x: flash(x, 'danger')), form.csrf_token.errors) else: if (form.warehouse_id.data != DEFAULT_SEARCH_CHOICES_INT_OPTION): search_condition.append((Rack.warehouse_id == form.warehouse_id.data)) if form.name.data: search_condition.append((Rack.name == form.name.data)) if (form.op.data == OPERATION_EXPORT): if (not permission_rack_section_export.can()): abort(403) column_names = Rack.__table__.columns.keys() query_sets = get_rack_rows(*search_condition) return excel.make_response_from_query_sets(query_sets=query_sets, column_names=column_names, file_type='csv', file_name=('%s.csv' % _('rack lists'))) if (form.op.data == OPERATION_DELETE): if (not permission_rack_section_del.can()): abort(403) rack_ids = request.form.getlist('rack_id') permitted = True for rack_id in rack_ids: if count_inventory(**{'rack_id': rack_id, 'status_delete': STATUS_DEL_NO}): ext_msg = _('Currently In Use') flash(_('Del Failure, %(ext_msg)s', ext_msg=ext_msg), 'danger') permitted = False break if permitted: result_total = True for rack_id in rack_ids: current_time = datetime.utcnow() rack_data = {'status_delete': STATUS_DEL_OK, 'delete_time': current_time, 'update_time': current_time} result = edit_rack(rack_id, rack_data) result_total = (result_total and result) if result_total: flash(_('Del Success'), 'success') else: flash(_('Del Failure'), 'danger') pagination = get_rack_pagination(form.page.data, PER_PAGE_BACKEND, *search_condition) return render_template(template_name, form=form, pagination=pagination, **document_info)
5,864,496,859,774,998,000
货架列表 :return:
app_backend/views/rack.py
lists
zhanghe06/bearing_project
python
@bp_rack.route('/lists.html', methods=['GET', 'POST']) @login_required @permission_rack_section_search.require(http_exception=403) def lists(): '\n 货架列表\n :return:\n ' template_name = 'rack/lists.html' document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack lists') form = RackSearchForm(request.form) form.warehouse_id.choices = get_warehouse_choices() search_condition = [(Rack.status_delete == STATUS_DEL_NO)] if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Search Failure'), 'danger') if (hasattr(form, 'csrf_token') and getattr(form, 'csrf_token').errors): map((lambda x: flash(x, 'danger')), form.csrf_token.errors) else: if (form.warehouse_id.data != DEFAULT_SEARCH_CHOICES_INT_OPTION): search_condition.append((Rack.warehouse_id == form.warehouse_id.data)) if form.name.data: search_condition.append((Rack.name == form.name.data)) if (form.op.data == OPERATION_EXPORT): if (not permission_rack_section_export.can()): abort(403) column_names = Rack.__table__.columns.keys() query_sets = get_rack_rows(*search_condition) return excel.make_response_from_query_sets(query_sets=query_sets, column_names=column_names, file_type='csv', file_name=('%s.csv' % _('rack lists'))) if (form.op.data == OPERATION_DELETE): if (not permission_rack_section_del.can()): abort(403) rack_ids = request.form.getlist('rack_id') permitted = True for rack_id in rack_ids: if count_inventory(**{'rack_id': rack_id, 'status_delete': STATUS_DEL_NO}): ext_msg = _('Currently In Use') flash(_('Del Failure, %(ext_msg)s', ext_msg=ext_msg), 'danger') permitted = False break if permitted: result_total = True for rack_id in rack_ids: current_time = datetime.utcnow() rack_data = {'status_delete': STATUS_DEL_OK, 'delete_time': current_time, 'update_time': current_time} result = edit_rack(rack_id, rack_data) result_total = (result_total and result) if result_total: flash(_('Del Success'), 'success') else: flash(_('Del Failure'), 'danger') pagination = get_rack_pagination(form.page.data, PER_PAGE_BACKEND, *search_condition) return render_template(template_name, form=form, pagination=pagination, **document_info)
@bp_rack.route('/<int:rack_id>/info.html') @login_required @permission_rack_section_get.require(http_exception=403) def info(rack_id): '\n 货架详情\n :param rack_id:\n :return:\n ' rack_info = get_rack_row_by_id(rack_id) if (not rack_info): abort(404) if (rack_info.status_delete == STATUS_DEL_OK): abort(410) document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack info') return render_template('rack/info.html', rack_info=rack_info, **document_info)
-8,079,144,495,823,498,000
货架详情 :param rack_id: :return:
app_backend/views/rack.py
info
zhanghe06/bearing_project
python
@bp_rack.route('/<int:rack_id>/info.html') @login_required @permission_rack_section_get.require(http_exception=403) def info(rack_id): '\n 货架详情\n :param rack_id:\n :return:\n ' rack_info = get_rack_row_by_id(rack_id) if (not rack_info): abort(404) if (rack_info.status_delete == STATUS_DEL_OK): abort(410) document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack info') return render_template('rack/info.html', rack_info=rack_info, **document_info)
@bp_rack.route('/add.html', methods=['GET', 'POST']) @login_required @permission_rack_section_add.require(http_exception=403) def add(): '\n 创建货架\n :return:\n ' template_name = 'rack/add.html' document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack add') form = RackAddForm(request.form) form.warehouse_id.choices = get_warehouse_choices(option_type='create') if (request.method == 'GET'): return render_template(template_name, form=form, **document_info) if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Add Failure'), 'danger') return render_template(template_name, form=form, **document_info) current_time = datetime.utcnow() rack_data = {'warehouse_id': form.warehouse_id.data, 'name': form.name.data, 'create_time': current_time, 'update_time': current_time} result = add_rack(rack_data) if result: flash(_('Add Success'), 'success') return redirect((request.args.get('next') or url_for('rack.lists'))) else: flash(_('Add Failure'), 'danger') return render_template(template_name, form=form, **document_info)
-1,110,958,220,311,944,700
创建货架 :return:
app_backend/views/rack.py
add
zhanghe06/bearing_project
python
@bp_rack.route('/add.html', methods=['GET', 'POST']) @login_required @permission_rack_section_add.require(http_exception=403) def add(): '\n 创建货架\n :return:\n ' template_name = 'rack/add.html' document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack add') form = RackAddForm(request.form) form.warehouse_id.choices = get_warehouse_choices(option_type='create') if (request.method == 'GET'): return render_template(template_name, form=form, **document_info) if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Add Failure'), 'danger') return render_template(template_name, form=form, **document_info) current_time = datetime.utcnow() rack_data = {'warehouse_id': form.warehouse_id.data, 'name': form.name.data, 'create_time': current_time, 'update_time': current_time} result = add_rack(rack_data) if result: flash(_('Add Success'), 'success') return redirect((request.args.get('next') or url_for('rack.lists'))) else: flash(_('Add Failure'), 'danger') return render_template(template_name, form=form, **document_info)
@bp_rack.route('/<int:rack_id>/edit.html', methods=['GET', 'POST']) @login_required @permission_rack_section_edit.require(http_exception=403) def edit(rack_id): '\n 货架编辑\n ' rack_info = get_rack_row_by_id(rack_id) if (not rack_info): abort(404) if (rack_info.status_delete == STATUS_DEL_OK): abort(410) template_name = 'rack/edit.html' form = RackEditForm(request.form) form.warehouse_id.choices = get_warehouse_choices(option_type='update') document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack edit') if (request.method == 'GET'): form.warehouse_id.data = rack_info.warehouse_id form.name.data = rack_info.name return render_template(template_name, rack_id=rack_id, form=form, **document_info) if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Edit Failure'), 'danger') return render_template(template_name, rack_id=rack_id, form=form, **document_info) current_time = datetime.utcnow() rack_data = {'warehouse_id': form.warehouse_id.data, 'name': form.name.data, 'update_time': current_time} result = edit_rack(rack_id, rack_data) if result: flash(_('Edit Success'), 'success') return redirect((request.args.get('next') or url_for('rack.lists'))) else: flash(_('Edit Failure'), 'danger') return render_template(template_name, rack_id=rack_id, form=form, **document_info)
-5,308,319,042,190,759,000
货架编辑
app_backend/views/rack.py
edit
zhanghe06/bearing_project
python
@bp_rack.route('/<int:rack_id>/edit.html', methods=['GET', 'POST']) @login_required @permission_rack_section_edit.require(http_exception=403) def edit(rack_id): '\n \n ' rack_info = get_rack_row_by_id(rack_id) if (not rack_info): abort(404) if (rack_info.status_delete == STATUS_DEL_OK): abort(410) template_name = 'rack/edit.html' form = RackEditForm(request.form) form.warehouse_id.choices = get_warehouse_choices(option_type='update') document_info = DOCUMENT_INFO.copy() document_info['TITLE'] = _('rack edit') if (request.method == 'GET'): form.warehouse_id.data = rack_info.warehouse_id form.name.data = rack_info.name return render_template(template_name, rack_id=rack_id, form=form, **document_info) if (request.method == 'POST'): if (not form.validate_on_submit()): flash(_('Edit Failure'), 'danger') return render_template(template_name, rack_id=rack_id, form=form, **document_info) current_time = datetime.utcnow() rack_data = {'warehouse_id': form.warehouse_id.data, 'name': form.name.data, 'update_time': current_time} result = edit_rack(rack_id, rack_data) if result: flash(_('Edit Success'), 'success') return redirect((request.args.get('next') or url_for('rack.lists'))) else: flash(_('Edit Failure'), 'danger') return render_template(template_name, rack_id=rack_id, form=form, **document_info)
@bp_rack.route('/ajax/del', methods=['GET', 'POST']) @login_required def ajax_delete(): '\n 货架删除\n :return:\n ' ajax_success_msg = AJAX_SUCCESS_MSG.copy() ajax_failure_msg = AJAX_FAILURE_MSG.copy() if (not permission_rack_section_del.can()): ext_msg = _('Permission Denied') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if (not ((request.method == 'GET') and request.is_xhr)): ext_msg = _('Method Not Allowed') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) rack_id = request.args.get('rack_id', 0, type=int) if (not rack_id): ext_msg = _('ID does not exist') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) rack_info = get_rack_row_by_id(rack_id) if (not rack_info): ext_msg = _('ID does not exist') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if (rack_info.status_delete == STATUS_DEL_OK): ext_msg = _('Already deleted') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if count_inventory(**{'rack_id': rack_id, 'status_delete': STATUS_DEL_NO}): ext_msg = _('Currently In Use') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) current_time = datetime.utcnow() rack_data = {'status_delete': STATUS_DEL_OK, 'delete_time': current_time, 'update_time': current_time} result = edit_rack(rack_id, rack_data) if result: ajax_success_msg['msg'] = _('Del Success') return jsonify(ajax_success_msg) else: ajax_failure_msg['msg'] = _('Del Failure') return jsonify(ajax_failure_msg)
-4,760,058,191,510,217,000
货架删除 :return:
app_backend/views/rack.py
ajax_delete
zhanghe06/bearing_project
python
@bp_rack.route('/ajax/del', methods=['GET', 'POST']) @login_required def ajax_delete(): '\n 货架删除\n :return:\n ' ajax_success_msg = AJAX_SUCCESS_MSG.copy() ajax_failure_msg = AJAX_FAILURE_MSG.copy() if (not permission_rack_section_del.can()): ext_msg = _('Permission Denied') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if (not ((request.method == 'GET') and request.is_xhr)): ext_msg = _('Method Not Allowed') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) rack_id = request.args.get('rack_id', 0, type=int) if (not rack_id): ext_msg = _('ID does not exist') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) rack_info = get_rack_row_by_id(rack_id) if (not rack_info): ext_msg = _('ID does not exist') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if (rack_info.status_delete == STATUS_DEL_OK): ext_msg = _('Already deleted') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) if count_inventory(**{'rack_id': rack_id, 'status_delete': STATUS_DEL_NO}): ext_msg = _('Currently In Use') ajax_failure_msg['msg'] = _('Del Failure, %(ext_msg)s', ext_msg=ext_msg) return jsonify(ajax_failure_msg) current_time = datetime.utcnow() rack_data = {'status_delete': STATUS_DEL_OK, 'delete_time': current_time, 'update_time': current_time} result = edit_rack(rack_id, rack_data) if result: ajax_success_msg['msg'] = _('Del Success') return jsonify(ajax_success_msg) else: ajax_failure_msg['msg'] = _('Del Failure') return jsonify(ajax_failure_msg)
@bp_rack.route('/ajax/get_rack_choices', methods=['GET', 'POST']) @login_required def ajax_get_rack_choices(): '\n 货架选项\n :return:\n ' warehouse_id = request.args.get('warehouse_id', 0, type=int) rack_choices = get_rack_choices(warehouse_id) return jsonify(rack_choices)
-7,212,981,095,690,917,000
货架选项 :return:
app_backend/views/rack.py
ajax_get_rack_choices
zhanghe06/bearing_project
python
@bp_rack.route('/ajax/get_rack_choices', methods=['GET', 'POST']) @login_required def ajax_get_rack_choices(): '\n 货架选项\n :return:\n ' warehouse_id = request.args.get('warehouse_id', 0, type=int) rack_choices = get_rack_choices(warehouse_id) return jsonify(rack_choices)
def test_margin_asset_without_asset(): 'Tests the API endpoint to margin asset without asset' client = Client(key, secret) client.margin_asset.when.called_with('').should.throw(ParameterRequiredError)
7,273,823,675,300,908,000
Tests the API endpoint to margin asset without asset
tests/spot/margin/test_margin_asset.py
test_margin_asset_without_asset
0x000050/binance-connector-python
python
def test_margin_asset_without_asset(): client = Client(key, secret) client.margin_asset.when.called_with().should.throw(ParameterRequiredError)
@mock_http_response(responses.GET, ('/sapi/v1/margin/asset\\?' + urlencode(params)), mock_item, 200) def test_margin_asset(): 'Tests the API endpoint to margin asset' client = Client(key, secret) response = client.margin_asset(**params) response.should.equal(mock_item)
-732,374,126,652,178,800
Tests the API endpoint to margin asset
tests/spot/margin/test_margin_asset.py
test_margin_asset
0x000050/binance-connector-python
python
@mock_http_response(responses.GET, ('/sapi/v1/margin/asset\\?' + urlencode(params)), mock_item, 200) def test_margin_asset(): client = Client(key, secret) response = client.margin_asset(**params) response.should.equal(mock_item)
def record(self, name: str, value: float, ignore_nan=True): '\n Args:\n name: name of value.\n value: value to record.\n ignore_nan: ignore nan values and do not record them (they will mess up the averages).\n ' if (ignore_nan and ((value != value) or (value is None))): return self.avgs[name] if (self.avgs.get(name) is None): self.avgs[name] = value else: self.avgs[name] = ((self.mix_rate * self.avgs[name]) + ((1 - self.mix_rate) * value)) return self.avgs[name]
7,226,268,007,534,564,000
Args: name: name of value. value: value to record. ignore_nan: ignore nan values and do not record them (they will mess up the averages).
wrangl/metrics/running_avg.py
record
vzhong/wrangl
python
def record(self, name: str, value: float, ignore_nan=True): '\n Args:\n name: name of value.\n value: value to record.\n ignore_nan: ignore nan values and do not record them (they will mess up the averages).\n ' if (ignore_nan and ((value != value) or (value is None))): return self.avgs[name] if (self.avgs.get(name) is None): self.avgs[name] = value else: self.avgs[name] = ((self.mix_rate * self.avgs[name]) + ((1 - self.mix_rate) * value)) return self.avgs[name]
def create_loss(self): ' create loss function ' return 'categorical_crossentropy'
2,219,248,142,900,374,300
create loss function
source/engine/steps/config_model.py
create_loss
Borrk/DeepLearning-Engine
python
def create_loss(self): ' ' return 'categorical_crossentropy'
def __init__(self, quotas=None): 'ShowDomainQuotaResponse - a model defined in huaweicloud sdk' super(ShowDomainQuotaResponse, self).__init__() self._quotas = None self.discriminator = None if (quotas is not None): self.quotas = quotas
7,046,108,231,739,304,000
ShowDomainQuotaResponse - a model defined in huaweicloud sdk
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
__init__
huaweicloud/huaweicloud-sdk-python-v3
python
def __init__(self, quotas=None): super(ShowDomainQuotaResponse, self).__init__() self._quotas = None self.discriminator = None if (quotas is not None): self.quotas = quotas
@property def quotas(self): 'Gets the quotas of this ShowDomainQuotaResponse.\n\n\n :return: The quotas of this ShowDomainQuotaResponse.\n :rtype: QuotaResult\n ' return self._quotas
-3,598,046,173,939,050,500
Gets the quotas of this ShowDomainQuotaResponse. :return: The quotas of this ShowDomainQuotaResponse. :rtype: QuotaResult
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
quotas
huaweicloud/huaweicloud-sdk-python-v3
python
@property def quotas(self): 'Gets the quotas of this ShowDomainQuotaResponse.\n\n\n :return: The quotas of this ShowDomainQuotaResponse.\n :rtype: QuotaResult\n ' return self._quotas
@quotas.setter def quotas(self, quotas): 'Sets the quotas of this ShowDomainQuotaResponse.\n\n\n :param quotas: The quotas of this ShowDomainQuotaResponse.\n :type: QuotaResult\n ' self._quotas = quotas
8,606,408,353,636,620,000
Sets the quotas of this ShowDomainQuotaResponse. :param quotas: The quotas of this ShowDomainQuotaResponse. :type: QuotaResult
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
quotas
huaweicloud/huaweicloud-sdk-python-v3
python
@quotas.setter def quotas(self, quotas): 'Sets the quotas of this ShowDomainQuotaResponse.\n\n\n :param quotas: The quotas of this ShowDomainQuotaResponse.\n :type: QuotaResult\n ' self._quotas = quotas
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) elif (attr in self.sensitive_list): result[attr] = '****' else: result[attr] = value return result
2,594,216,033,120,720,000
Returns the model properties as a dict
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
to_dict
huaweicloud/huaweicloud-sdk-python-v3
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) elif (attr in self.sensitive_list): result[attr] = '****' else: result[attr] = value return result
def to_str(self): 'Returns the string representation of the model' import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
-6,095,553,759,700,562,000
Returns the string representation of the model
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
to_str
huaweicloud/huaweicloud-sdk-python-v3
python
def to_str(self): import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self): 'For `print`' return self.to_str()
-1,581,176,371,750,213,000
For `print`
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
__repr__
huaweicloud/huaweicloud-sdk-python-v3
python
def __repr__(self): return self.to_str()
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, ShowDomainQuotaResponse)): return False return (self.__dict__ == other.__dict__)
4,360,679,028,406,671,400
Returns true if both objects are equal
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
__eq__
huaweicloud/huaweicloud-sdk-python-v3
python
def __eq__(self, other): if (not isinstance(other, ShowDomainQuotaResponse)): return False return (self.__dict__ == other.__dict__)
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
7,764,124,047,908,058,000
Returns true if both objects are not equal
huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/show_domain_quota_response.py
__ne__
huaweicloud/huaweicloud-sdk-python-v3
python
def __ne__(self, other): return (not (self == other))
def get_sql_resource_sql_stored_procedure(account_name: Optional[str]=None, container_name: Optional[str]=None, database_name: Optional[str]=None, resource_group_name: Optional[str]=None, stored_procedure_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetSqlResourceSqlStoredProcedureResult: '\n An Azure Cosmos DB storedProcedure.\n\n\n :param str account_name: Cosmos DB database account name.\n :param str container_name: Cosmos DB container name.\n :param str database_name: Cosmos DB database name.\n :param str resource_group_name: The name of the resource group. The name is case insensitive.\n :param str stored_procedure_name: Cosmos DB storedProcedure name.\n ' __args__ = dict() __args__['accountName'] = account_name __args__['containerName'] = container_name __args__['databaseName'] = database_name __args__['resourceGroupName'] = resource_group_name __args__['storedProcedureName'] = stored_procedure_name if (opts is None): opts = pulumi.InvokeOptions() if (opts.version is None): opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:documentdb/v20200401:getSqlResourceSqlStoredProcedure', __args__, opts=opts, typ=GetSqlResourceSqlStoredProcedureResult).value return AwaitableGetSqlResourceSqlStoredProcedureResult(id=__ret__.id, location=__ret__.location, name=__ret__.name, resource=__ret__.resource, tags=__ret__.tags, type=__ret__.type)
-9,073,698,145,446,339,000
An Azure Cosmos DB storedProcedure. :param str account_name: Cosmos DB database account name. :param str container_name: Cosmos DB container name. :param str database_name: Cosmos DB database name. :param str resource_group_name: The name of the resource group. The name is case insensitive. :param str stored_procedure_name: Cosmos DB storedProcedure name.
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
get_sql_resource_sql_stored_procedure
polivbr/pulumi-azure-native
python
def get_sql_resource_sql_stored_procedure(account_name: Optional[str]=None, container_name: Optional[str]=None, database_name: Optional[str]=None, resource_group_name: Optional[str]=None, stored_procedure_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetSqlResourceSqlStoredProcedureResult: '\n An Azure Cosmos DB storedProcedure.\n\n\n :param str account_name: Cosmos DB database account name.\n :param str container_name: Cosmos DB container name.\n :param str database_name: Cosmos DB database name.\n :param str resource_group_name: The name of the resource group. The name is case insensitive.\n :param str stored_procedure_name: Cosmos DB storedProcedure name.\n ' __args__ = dict() __args__['accountName'] = account_name __args__['containerName'] = container_name __args__['databaseName'] = database_name __args__['resourceGroupName'] = resource_group_name __args__['storedProcedureName'] = stored_procedure_name if (opts is None): opts = pulumi.InvokeOptions() if (opts.version is None): opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:documentdb/v20200401:getSqlResourceSqlStoredProcedure', __args__, opts=opts, typ=GetSqlResourceSqlStoredProcedureResult).value return AwaitableGetSqlResourceSqlStoredProcedureResult(id=__ret__.id, location=__ret__.location, name=__ret__.name, resource=__ret__.resource, tags=__ret__.tags, type=__ret__.type)
@property @pulumi.getter def id(self) -> str: '\n The unique resource identifier of the ARM resource.\n ' return pulumi.get(self, 'id')
-3,549,939,143,766,340,000
The unique resource identifier of the ARM resource.
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
id
polivbr/pulumi-azure-native
python
@property @pulumi.getter def id(self) -> str: '\n \n ' return pulumi.get(self, 'id')
@property @pulumi.getter def location(self) -> Optional[str]: '\n The location of the resource group to which the resource belongs.\n ' return pulumi.get(self, 'location')
3,271,524,381,365,131,300
The location of the resource group to which the resource belongs.
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
location
polivbr/pulumi-azure-native
python
@property @pulumi.getter def location(self) -> Optional[str]: '\n \n ' return pulumi.get(self, 'location')
@property @pulumi.getter def name(self) -> str: '\n The name of the ARM resource.\n ' return pulumi.get(self, 'name')
5,284,165,818,500,662,000
The name of the ARM resource.
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
name
polivbr/pulumi-azure-native
python
@property @pulumi.getter def name(self) -> str: '\n \n ' return pulumi.get(self, 'name')
@property @pulumi.getter def tags(self) -> Optional[Mapping[(str, str)]]: '\n Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".\n ' return pulumi.get(self, 'tags')
3,512,089,887,617,268,700
Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
tags
polivbr/pulumi-azure-native
python
@property @pulumi.getter def tags(self) -> Optional[Mapping[(str, str)]]: '\n \n ' return pulumi.get(self, 'tags')
@property @pulumi.getter def type(self) -> str: '\n The type of Azure resource.\n ' return pulumi.get(self, 'type')
7,933,918,790,997,796,000
The type of Azure resource.
sdk/python/pulumi_azure_native/documentdb/v20200401/get_sql_resource_sql_stored_procedure.py
type
polivbr/pulumi-azure-native
python
@property @pulumi.getter def type(self) -> str: '\n \n ' return pulumi.get(self, 'type')
def addscriptpath(script): '\n Add the path part of the scriptfile to the system path to\n allow modules to be loaded from the same place.\n\n Each path is added only once.\n ' pathfound = 0 scriptpath = os.path.dirname(script) for pathitem in sys.path: if (pathitem == scriptpath): pathfound = 1 break if (pathfound == 0): sys.path.append(scriptpath)
-95,659,354,973,389,170
Add the path part of the scriptfile to the system path to allow modules to be loaded from the same place. Each path is added only once.
python/init.py
addscriptpath
exported/ollypython
python
def addscriptpath(script): '\n Add the path part of the scriptfile to the system path to\n allow modules to be loaded from the same place.\n\n Each path is added only once.\n ' pathfound = 0 scriptpath = os.path.dirname(script) for pathitem in sys.path: if (pathitem == scriptpath): pathfound = 1 break if (pathfound == 0): sys.path.append(scriptpath)
def runscript(script): '\n Run the specified script after adding its directory path to\n system path.\n\n This function is used by the low-level plugin code.\n ' addscriptpath(script) watchdog.reset() argv = sys.argv sys.argv = [script] execfile(script, globals()) sys.argv = argv
4,093,790,362,929,177,000
Run the specified script after adding its directory path to system path. This function is used by the low-level plugin code.
python/init.py
runscript
exported/ollypython
python
def runscript(script): '\n Run the specified script after adding its directory path to\n system path.\n\n This function is used by the low-level plugin code.\n ' addscriptpath(script) watchdog.reset() argv = sys.argv sys.argv = [script] execfile(script, globals()) sys.argv = argv
def install(self): ' Install the tracer function, required for the watchdog ' if (not self.installed): sys.settrace(self.tracer) self.installed = True
-443,802,513,454,424,260
Install the tracer function, required for the watchdog
python/init.py
install
exported/ollypython
python
def install(self): ' ' if (not self.installed): sys.settrace(self.tracer) self.installed = True
def activate(self, timeout=None): ' Activate the watchdog, with optional timeout change ' assert self.installed, 'WatchDog must be installed before activating' if timeout: self.timeout = timeout self.reset() self.active = True
-7,555,068,057,636,498,000
Activate the watchdog, with optional timeout change
python/init.py
activate
exported/ollypython
python
def activate(self, timeout=None): ' ' assert self.installed, 'WatchDog must be installed before activating' if timeout: self.timeout = timeout self.reset() self.active = True
def deactivate(self): ' Deactivate the watchdog ' self.active = True
4,592,113,951,165,954,600
Deactivate the watchdog
python/init.py
deactivate
exported/ollypython
python
def deactivate(self): ' ' self.active = True
def reset(self): ' Reset the timer, useful for long-running scripts ' self.timestamp = time.clock()
6,988,983,387,992,105,000
Reset the timer, useful for long-running scripts
python/init.py
reset
exported/ollypython
python
def reset(self): ' ' self.timestamp = time.clock()
def tracer(self, frame, event, arg): ' Tracer function that receives the tracing events ' if (not self.active): return None return self.tracer
-1,960,268,148,174,367,200
Tracer function that receives the tracing events
python/init.py
tracer
exported/ollypython
python
def tracer(self, frame, event, arg): ' ' if (not self.active): return None return self.tracer
@bind_hass def turn_on(hass, entity_id, variables=None, context=None): 'Turn script on.\n\n This is a legacy helper method. Do not use it for new tests.\n ' (_, object_id) = split_entity_id(entity_id) hass.services.call(DOMAIN, object_id, variables, context=context)
881,353,817,027,971,100
Turn script on. This is a legacy helper method. Do not use it for new tests.
tests/components/test_script.py
turn_on
27tech/home-assistant
python
@bind_hass def turn_on(hass, entity_id, variables=None, context=None): 'Turn script on.\n\n This is a legacy helper method. Do not use it for new tests.\n ' (_, object_id) = split_entity_id(entity_id) hass.services.call(DOMAIN, object_id, variables, context=context)
@bind_hass def turn_off(hass, entity_id): 'Turn script on.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id})
-4,524,185,376,768,060,000
Turn script on. This is a legacy helper method. Do not use it for new tests.
tests/components/test_script.py
turn_off
27tech/home-assistant
python
@bind_hass def turn_off(hass, entity_id): 'Turn script on.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id})
@bind_hass def toggle(hass, entity_id): 'Toggle the script.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id})
-5,155,807,265,171,595,000
Toggle the script. This is a legacy helper method. Do not use it for new tests.
tests/components/test_script.py
toggle
27tech/home-assistant
python
@bind_hass def toggle(hass, entity_id): 'Toggle the script.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id})
@bind_hass def reload(hass): 'Reload script component.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_RELOAD)
4,122,823,776,589,158,400
Reload script component. This is a legacy helper method. Do not use it for new tests.
tests/components/test_script.py
reload
27tech/home-assistant
python
@bind_hass def reload(hass): 'Reload script component.\n\n This is a legacy helper method. Do not use it for new tests.\n ' hass.services.call(DOMAIN, SERVICE_RELOAD)
async def test_shared_context(hass): 'Test that the shared context is passed down the chain.' event = 'test_event' context = Context() event_mock = Mock() run_mock = Mock() hass.bus.async_listen(event, event_mock) hass.bus.async_listen(EVENT_SCRIPT_STARTED, run_mock) assert (await async_setup_component(hass, 'script', {'script': {'test': {'sequence': [{'event': event}]}}})) (await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, context=context)) (await hass.async_block_till_done()) assert (event_mock.call_count == 1) assert (run_mock.call_count == 1) (args, kwargs) = run_mock.call_args assert (args[0].context == context) assert (args[0].data.get(ATTR_NAME) == 'test') assert (args[0].data.get(ATTR_ENTITY_ID) == 'script.test') (args, kwargs) = event_mock.call_args assert (args[0].context == context) state = hass.states.get('script.test') assert (state is not None) assert (state.context == context)
4,054,526,703,985,677,000
Test that the shared context is passed down the chain.
tests/components/test_script.py
test_shared_context
27tech/home-assistant
python
async def test_shared_context(hass): event = 'test_event' context = Context() event_mock = Mock() run_mock = Mock() hass.bus.async_listen(event, event_mock) hass.bus.async_listen(EVENT_SCRIPT_STARTED, run_mock) assert (await async_setup_component(hass, 'script', {'script': {'test': {'sequence': [{'event': event}]}}})) (await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, context=context)) (await hass.async_block_till_done()) assert (event_mock.call_count == 1) assert (run_mock.call_count == 1) (args, kwargs) = run_mock.call_args assert (args[0].context == context) assert (args[0].data.get(ATTR_NAME) == 'test') assert (args[0].data.get(ATTR_ENTITY_ID) == 'script.test') (args, kwargs) = event_mock.call_args assert (args[0].context == context) state = hass.states.get('script.test') assert (state is not None) assert (state.context == context)
def setUp(self): 'Set up things to be run when tests are started.' self.hass = get_test_home_assistant()
8,009,785,092,768,199,000
Set up things to be run when tests are started.
tests/components/test_script.py
setUp
27tech/home-assistant
python
def setUp(self): self.hass = get_test_home_assistant()
def tearDown(self): 'Stop down everything that was started.' self.hass.stop()
-7,983,443,536,413,136,000
Stop down everything that was started.
tests/components/test_script.py
tearDown
27tech/home-assistant
python
def tearDown(self): self.hass.stop()
def test_setup_with_invalid_configs(self): 'Test setup with invalid configs.' for value in ({'test': {}}, {'test hello world': {'sequence': [{'event': 'bla'}]}}, {'test': {'sequence': {'event': 'test_event', 'service': 'homeassistant.turn_on'}}}): assert (not setup_component(self.hass, 'script', {'script': value})), 'Script loaded with wrong config {}'.format(value) assert (0 == len(self.hass.states.entity_ids('script')))
-9,022,565,211,143,436,000
Test setup with invalid configs.
tests/components/test_script.py
test_setup_with_invalid_configs
27tech/home-assistant
python
def test_setup_with_invalid_configs(self): for value in ({'test': {}}, {'test hello world': {'sequence': [{'event': 'bla'}]}}, {'test': {'sequence': {'event': 'test_event', 'service': 'homeassistant.turn_on'}}}): assert (not setup_component(self.hass, 'script', {'script': value})), 'Script loaded with wrong config {}'.format(value) assert (0 == len(self.hass.states.entity_ids('script')))
def test_turn_on_service(self): 'Verify that the turn_on service.' event = 'test_event' events = [] @callback def record_event(event): 'Add recorded event to set.' events.append(event) self.hass.bus.listen(event, record_event) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}, {'event': event}]}}}) turn_on(self.hass, ENTITY_ID) self.hass.block_till_done() assert script.is_on(self.hass, ENTITY_ID) assert (0 == len(events)) turn_on(self.hass, ENTITY_ID) self.hass.block_till_done() assert (0 == len(events)) turn_off(self.hass, ENTITY_ID) self.hass.block_till_done() assert (not script.is_on(self.hass, ENTITY_ID)) assert (0 == len(events)) state = self.hass.states.get('group.all_scripts') assert (state is not None) assert (state.attributes.get('entity_id') == (ENTITY_ID,))
49,833,895,516,182,200
Verify that the turn_on service.
tests/components/test_script.py
test_turn_on_service
27tech/home-assistant
python
def test_turn_on_service(self): event = 'test_event' events = [] @callback def record_event(event): 'Add recorded event to set.' events.append(event) self.hass.bus.listen(event, record_event) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}, {'event': event}]}}}) turn_on(self.hass, ENTITY_ID) self.hass.block_till_done() assert script.is_on(self.hass, ENTITY_ID) assert (0 == len(events)) turn_on(self.hass, ENTITY_ID) self.hass.block_till_done() assert (0 == len(events)) turn_off(self.hass, ENTITY_ID) self.hass.block_till_done() assert (not script.is_on(self.hass, ENTITY_ID)) assert (0 == len(events)) state = self.hass.states.get('group.all_scripts') assert (state is not None) assert (state.attributes.get('entity_id') == (ENTITY_ID,))
def test_toggle_service(self): 'Test the toggling of a service.' event = 'test_event' events = [] @callback def record_event(event): 'Add recorded event to set.' events.append(event) self.hass.bus.listen(event, record_event) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}, {'event': event}]}}}) toggle(self.hass, ENTITY_ID) self.hass.block_till_done() assert script.is_on(self.hass, ENTITY_ID) assert (0 == len(events)) toggle(self.hass, ENTITY_ID) self.hass.block_till_done() assert (not script.is_on(self.hass, ENTITY_ID)) assert (0 == len(events))
-2,021,375,834,811,586,800
Test the toggling of a service.
tests/components/test_script.py
test_toggle_service
27tech/home-assistant
python
def test_toggle_service(self): event = 'test_event' events = [] @callback def record_event(event): 'Add recorded event to set.' events.append(event) self.hass.bus.listen(event, record_event) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}, {'event': event}]}}}) toggle(self.hass, ENTITY_ID) self.hass.block_till_done() assert script.is_on(self.hass, ENTITY_ID) assert (0 == len(events)) toggle(self.hass, ENTITY_ID) self.hass.block_till_done() assert (not script.is_on(self.hass, ENTITY_ID)) assert (0 == len(events))
def test_passing_variables(self): 'Test different ways of passing in variables.' calls = [] context = Context() @callback def record_call(service): 'Add recorded event to set.' calls.append(service) self.hass.services.register('test', 'script', record_call) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': {'service': 'test.script', 'data_template': {'hello': '{{ greeting }}'}}}}}) turn_on(self.hass, ENTITY_ID, {'greeting': 'world'}, context=context) self.hass.block_till_done() assert (len(calls) == 1) assert (calls[0].context is context) assert (calls[0].data['hello'] == 'world') self.hass.services.call('script', 'test', {'greeting': 'universe'}, context=context) self.hass.block_till_done() assert (len(calls) == 2) assert (calls[1].context is context) assert (calls[1].data['hello'] == 'universe')
-6,479,770,249,143,740,000
Test different ways of passing in variables.
tests/components/test_script.py
test_passing_variables
27tech/home-assistant
python
def test_passing_variables(self): calls = [] context = Context() @callback def record_call(service): 'Add recorded event to set.' calls.append(service) self.hass.services.register('test', 'script', record_call) assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': {'service': 'test.script', 'data_template': {'hello': '{{ greeting }}'}}}}}) turn_on(self.hass, ENTITY_ID, {'greeting': 'world'}, context=context) self.hass.block_till_done() assert (len(calls) == 1) assert (calls[0].context is context) assert (calls[0].data['hello'] == 'world') self.hass.services.call('script', 'test', {'greeting': 'universe'}, context=context) self.hass.block_till_done() assert (len(calls) == 2) assert (calls[1].context is context) assert (calls[1].data['hello'] == 'universe')
def test_reload_service(self): 'Verify that the turn_on service.' assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}]}}}) assert (self.hass.states.get(ENTITY_ID) is not None) assert self.hass.services.has_service(script.DOMAIN, 'test') with patch('homeassistant.config.load_yaml_config_file', return_value={'script': {'test2': {'sequence': [{'delay': {'seconds': 5}}]}}}): with patch('homeassistant.config.find_config_file', return_value=''): reload(self.hass) self.hass.block_till_done() assert (self.hass.states.get(ENTITY_ID) is None) assert (not self.hass.services.has_service(script.DOMAIN, 'test')) assert (self.hass.states.get('script.test2') is not None) assert self.hass.services.has_service(script.DOMAIN, 'test2')
2,295,490,650,558,444,500
Verify that the turn_on service.
tests/components/test_script.py
test_reload_service
27tech/home-assistant
python
def test_reload_service(self): assert setup_component(self.hass, 'script', {'script': {'test': {'sequence': [{'delay': {'seconds': 5}}]}}}) assert (self.hass.states.get(ENTITY_ID) is not None) assert self.hass.services.has_service(script.DOMAIN, 'test') with patch('homeassistant.config.load_yaml_config_file', return_value={'script': {'test2': {'sequence': [{'delay': {'seconds': 5}}]}}}): with patch('homeassistant.config.find_config_file', return_value=): reload(self.hass) self.hass.block_till_done() assert (self.hass.states.get(ENTITY_ID) is None) assert (not self.hass.services.has_service(script.DOMAIN, 'test')) assert (self.hass.states.get('script.test2') is not None) assert self.hass.services.has_service(script.DOMAIN, 'test2')
@callback def record_event(event): 'Add recorded event to set.' events.append(event)
81,542,234,316,640,880
Add recorded event to set.
tests/components/test_script.py
record_event
27tech/home-assistant
python
@callback def record_event(event): events.append(event)
@callback def record_event(event): 'Add recorded event to set.' events.append(event)
81,542,234,316,640,880
Add recorded event to set.
tests/components/test_script.py
record_event
27tech/home-assistant
python
@callback def record_event(event): events.append(event)