nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/PartDesign/WizardShaft/SegmentFunction.py
python
SegmentFunctionSegment.hasStart
(self, xval)
return abs(self.start - xval) < 1E-9
Return true if the start of this segment is xval
Return true if the start of this segment is xval
[ "Return", "true", "if", "the", "start", "of", "this", "segment", "is", "xval" ]
def hasStart(self, xval): "Return true if the start of this segment is xval" #FIXME: 1E-9 is arbitrary here. But since units are in meters, 1E-9 is a nanometer... return abs(self.start - xval) < 1E-9
[ "def", "hasStart", "(", "self", ",", "xval", ")", ":", "#FIXME: 1E-9 is arbitrary here. But since units are in meters, 1E-9 is a nanometer...", "return", "abs", "(", "self", ".", "start", "-", "xval", ")", "<", "1E-9" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/PartDesign/WizardShaft/SegmentFunction.py#L36-L39
intel/caffe
3f494b442ee3f9d17a07b09ecbd5fa2bbda00836
examples/faster-rcnn/lib/rpn/generate.py
python
_get_image_blob
(im)
return blob, im_info
Converts an image into a network input. Arguments: im (ndarray): a color image in BGR order Returns: blob (ndarray): a data blob holding an image pyramid im_scale_factors (list): list of image scales (relative to im) used in the image pyramid
Converts an image into a network input.
[ "Converts", "an", "image", "into", "a", "network", "input", "." ]
def _get_image_blob(im): """Converts an image into a network input. Arguments: im (ndarray): a color image in BGR order Returns: blob (ndarray): a data blob holding an image pyramid im_scale_factors (list): list of image scales (relative to im) used in the image pyramid """ im_orig = im.astype(np.float32, copy=True) im_orig -= cfg.PIXEL_MEANS im_shape = im_orig.shape im_size_min = np.min(im_shape[0:2]) im_size_max = np.max(im_shape[0:2]) processed_ims = [] assert len(cfg.TEST.SCALES) == 1 target_size = cfg.TEST.SCALES[0] im_scale = float(target_size) / float(im_size_min) # Prevent the biggest axis from being more than MAX_SIZE if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE: im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max) im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR) im_info = np.hstack((im.shape[:2], im_scale))[np.newaxis, :] processed_ims.append(im) # Create a blob to hold the input images blob = im_list_to_blob(processed_ims) return blob, im_info
[ "def", "_get_image_blob", "(", "im", ")", ":", "im_orig", "=", "im", ".", "astype", "(", "np", ".", "float32", ",", "copy", "=", "True", ")", "im_orig", "-=", "cfg", ".", "PIXEL_MEANS", "im_shape", "=", "im_orig", ".", "shape", "im_size_min", "=", "np"...
https://github.com/intel/caffe/blob/3f494b442ee3f9d17a07b09ecbd5fa2bbda00836/examples/faster-rcnn/lib/rpn/generate.py#L47-L82
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/nn/init.py
python
sparse_
(tensor, sparsity, std=0.01)
return tensor
r"""Fills the 2D input `Tensor` as a sparse matrix, where the non-zero elements will be drawn from the normal distribution :math:`\mathcal{N}(0, 0.01)`, as described in `Deep learning via Hessian-free optimization` - Martens, J. (2010). Args: tensor: an n-dimensional `torch.Tensor` sparsity: The fraction of elements in each column to be set to zero std: the standard deviation of the normal distribution used to generate the non-zero values Examples: >>> w = torch.empty(3, 5) >>> nn.init.sparse_(w, sparsity=0.1)
r"""Fills the 2D input `Tensor` as a sparse matrix, where the non-zero elements will be drawn from the normal distribution :math:`\mathcal{N}(0, 0.01)`, as described in `Deep learning via Hessian-free optimization` - Martens, J. (2010).
[ "r", "Fills", "the", "2D", "input", "Tensor", "as", "a", "sparse", "matrix", "where", "the", "non", "-", "zero", "elements", "will", "be", "drawn", "from", "the", "normal", "distribution", ":", "math", ":", "\\", "mathcal", "{", "N", "}", "(", "0", "...
def sparse_(tensor, sparsity, std=0.01): r"""Fills the 2D input `Tensor` as a sparse matrix, where the non-zero elements will be drawn from the normal distribution :math:`\mathcal{N}(0, 0.01)`, as described in `Deep learning via Hessian-free optimization` - Martens, J. (2010). Args: tensor: an n-dimensional `torch.Tensor` sparsity: The fraction of elements in each column to be set to zero std: the standard deviation of the normal distribution used to generate the non-zero values Examples: >>> w = torch.empty(3, 5) >>> nn.init.sparse_(w, sparsity=0.1) """ if tensor.ndimension() != 2: raise ValueError("Only tensors with 2 dimensions are supported") rows, cols = tensor.shape num_zeros = int(math.ceil(sparsity * rows)) with torch.no_grad(): tensor.normal_(0, std) for col_idx in range(cols): row_indices = torch.randperm(rows) zero_indices = row_indices[:num_zeros] tensor[zero_indices, col_idx] = 0 return tensor
[ "def", "sparse_", "(", "tensor", ",", "sparsity", ",", "std", "=", "0.01", ")", ":", "if", "tensor", ".", "ndimension", "(", ")", "!=", "2", ":", "raise", "ValueError", "(", "\"Only tensors with 2 dimensions are supported\"", ")", "rows", ",", "cols", "=", ...
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/nn/init.py#L488-L516
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/flatmenu.py
python
FlatMenuBar.OnLeaveWindow
(self, event)
Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`FlatMenuBar`. :param `event`: a :class:`MouseEvent` event to be processed. :note: This method is for GTK only.
Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`FlatMenuBar`.
[ "Handles", "the", "wx", ".", "EVT_LEAVE_WINDOW", "event", "for", ":", "class", ":", "FlatMenuBar", "." ]
def OnLeaveWindow(self, event): """ Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`FlatMenuBar`. :param `event`: a :class:`MouseEvent` event to be processed. :note: This method is for GTK only. """ self._curretHiliteItem = -1 self._dropDownButtonState = ControlNormal # Reset items state for item in self._items: item.SetState(ControlNormal) for but in self._tbButtons: but._state = ControlNormal self.Refresh()
[ "def", "OnLeaveWindow", "(", "self", ",", "event", ")", ":", "self", ".", "_curretHiliteItem", "=", "-", "1", "self", ".", "_dropDownButtonState", "=", "ControlNormal", "# Reset items state", "for", "item", "in", "self", ".", "_items", ":", "item", ".", "Set...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/flatmenu.py#L3134-L3153
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py
python
GenericTable.get_attrs
(self)
retrieve our attributes
retrieve our attributes
[ "retrieve", "our", "attributes" ]
def get_attrs(self): """ retrieve our attributes """ self.non_index_axes = [] self.nan_rep = None self.levels = [] self.index_axes = [a for a in self.indexables if a.is_an_indexable] self.values_axes = [a for a in self.indexables if not a.is_an_indexable] self.data_columns = [a.name for a in self.values_axes]
[ "def", "get_attrs", "(", "self", ")", ":", "self", ".", "non_index_axes", "=", "[", "]", "self", ".", "nan_rep", "=", "None", "self", ".", "levels", "=", "[", "]", "self", ".", "index_axes", "=", "[", "a", "for", "a", "in", "self", ".", "indexables...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py#L4525-L4533
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/timeseries/python/timeseries/state_space_models/state_space_model.py
python
StateSpaceModel._exogenous_noise_increasing
(self, current_times, exogenous_values, state)
return (start_mean + mean_addition, start_covariance + covariance_addition, previous_times)
Update state with exogenous regressors, increasing uncertainty. Adds to the state mean a linear transformation of `exogenous_values`, and increases uncertainty by constructing a covariance matrix based on `exogenous_values` and adding it to the state covariance. This update is useful for modeling changes relative to current state, e.g. the furnace turned on so the temperature will be increasing at an additional 1 degree per minute with some uncertainty, this uncertainty being added to our current uncertainty in the per-minute change in temperature. Args: current_times: A [batch size] Tensor of times for the exogenous values being input. exogenous_values: A [batch size x exogenous input dimension] Tensor of exogenous values for each part of the batch. state: A tuple of (mean, covariance, previous_times) having shapes mean; [batch size x state dimension] covariance; [batch size x state dimension x state dimension] previous_times; [batch size] Returns: Updated state taking the exogenous regressors into account (with higher uncertainty than the input state).
Update state with exogenous regressors, increasing uncertainty.
[ "Update", "state", "with", "exogenous", "regressors", "increasing", "uncertainty", "." ]
def _exogenous_noise_increasing(self, current_times, exogenous_values, state): """Update state with exogenous regressors, increasing uncertainty. Adds to the state mean a linear transformation of `exogenous_values`, and increases uncertainty by constructing a covariance matrix based on `exogenous_values` and adding it to the state covariance. This update is useful for modeling changes relative to current state, e.g. the furnace turned on so the temperature will be increasing at an additional 1 degree per minute with some uncertainty, this uncertainty being added to our current uncertainty in the per-minute change in temperature. Args: current_times: A [batch size] Tensor of times for the exogenous values being input. exogenous_values: A [batch size x exogenous input dimension] Tensor of exogenous values for each part of the batch. state: A tuple of (mean, covariance, previous_times) having shapes mean; [batch size x state dimension] covariance; [batch size x state dimension x state dimension] previous_times; [batch size] Returns: Updated state taking the exogenous regressors into account (with higher uncertainty than the input state). """ start_mean, start_covariance, previous_times = state with variable_scope.variable_scope("exogenous_noise_increasing_mean"): mean_addition = layers.fully_connected( exogenous_values, tensor_shape.dimension_value(start_mean.shape[1]), activation_fn=None) state_dimension = tensor_shape.dimension_value(start_covariance.shape[1]) with variable_scope.variable_scope("exogenous_noise_increasing_covariance"): covariance_addition = ( math_utils.transform_to_covariance_matrices( exogenous_values, state_dimension)) return (start_mean + mean_addition, start_covariance + covariance_addition, previous_times)
[ "def", "_exogenous_noise_increasing", "(", "self", ",", "current_times", ",", "exogenous_values", ",", "state", ")", ":", "start_mean", ",", "start_covariance", ",", "previous_times", "=", "state", "with", "variable_scope", ".", "variable_scope", "(", "\"exogenous_noi...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/timeseries/python/timeseries/state_space_models/state_space_model.py#L547-L585
Cisco-Talos/moflow
ed71dfb0540d9e0d7a4c72f0881b58958d573728
BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/google/protobuf/internal/wire_format.py
python
_VarUInt64ByteSizeNoTag
(uint64)
return 10
Returns the number of bytes required to serialize a single varint using boundary value comparisons. (unrolled loop optimization -WPierce) uint64 must be unsigned.
Returns the number of bytes required to serialize a single varint using boundary value comparisons. (unrolled loop optimization -WPierce) uint64 must be unsigned.
[ "Returns", "the", "number", "of", "bytes", "required", "to", "serialize", "a", "single", "varint", "using", "boundary", "value", "comparisons", ".", "(", "unrolled", "loop", "optimization", "-", "WPierce", ")", "uint64", "must", "be", "unsigned", "." ]
def _VarUInt64ByteSizeNoTag(uint64): """Returns the number of bytes required to serialize a single varint using boundary value comparisons. (unrolled loop optimization -WPierce) uint64 must be unsigned. """ if uint64 <= 0x7f: return 1 if uint64 <= 0x3fff: return 2 if uint64 <= 0x1fffff: return 3 if uint64 <= 0xfffffff: return 4 if uint64 <= 0x7ffffffff: return 5 if uint64 <= 0x3ffffffffff: return 6 if uint64 <= 0x1ffffffffffff: return 7 if uint64 <= 0xffffffffffffff: return 8 if uint64 <= 0x7fffffffffffffff: return 9 if uint64 > UINT64_MAX: raise message.EncodeError('Value out of range: %d' % uint64) return 10
[ "def", "_VarUInt64ByteSizeNoTag", "(", "uint64", ")", ":", "if", "uint64", "<=", "0x7f", ":", "return", "1", "if", "uint64", "<=", "0x3fff", ":", "return", "2", "if", "uint64", "<=", "0x1fffff", ":", "return", "3", "if", "uint64", "<=", "0xfffffff", ":",...
https://github.com/Cisco-Talos/moflow/blob/ed71dfb0540d9e0d7a4c72f0881b58958d573728/BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/google/protobuf/internal/wire_format.py#L232-L248
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/smtplib.py
python
SMTP.connect
(self, host='localhost', port=0)
return (code, msg)
Connect to a host on a given port. If the hostname ends with a colon (`:') followed by a number, and there is no port specified, that suffix will be stripped off and the number interpreted as the port number to use. Note: This method is automatically invoked by __init__, if a host is specified during instantiation.
Connect to a host on a given port.
[ "Connect", "to", "a", "host", "on", "a", "given", "port", "." ]
def connect(self, host='localhost', port=0): """Connect to a host on a given port. If the hostname ends with a colon (`:') followed by a number, and there is no port specified, that suffix will be stripped off and the number interpreted as the port number to use. Note: This method is automatically invoked by __init__, if a host is specified during instantiation. """ if not port and (host.find(':') == host.rfind(':')): i = host.rfind(':') if i >= 0: host, port = host[:i], host[i + 1:] try: port = int(port) except ValueError: raise socket.error, "nonnumeric port" if not port: port = self.default_port if self.debuglevel > 0: print>>stderr, 'connect:', (host, port) self.sock = self._get_socket(host, port, self.timeout) (code, msg) = self.getreply() if self.debuglevel > 0: print>>stderr, "connect:", msg return (code, msg)
[ "def", "connect", "(", "self", ",", "host", "=", "'localhost'", ",", "port", "=", "0", ")", ":", "if", "not", "port", "and", "(", "host", ".", "find", "(", "':'", ")", "==", "host", ".", "rfind", "(", "':'", ")", ")", ":", "i", "=", "host", "...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/smtplib.py#L294-L321
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
chrome/common/extensions/docs/server2/compiled_file_system.py
python
CompiledFileSystem._RecursiveList
(self, path)
return Future(delegate=Gettable(resolve))
Returns a Future containing the recursive directory listing of |path| as a flat list of paths.
Returns a Future containing the recursive directory listing of |path| as a flat list of paths.
[ "Returns", "a", "Future", "containing", "the", "recursive", "directory", "listing", "of", "|path|", "as", "a", "flat", "list", "of", "paths", "." ]
def _RecursiveList(self, path): '''Returns a Future containing the recursive directory listing of |path| as a flat list of paths. ''' def split_dirs_from_files(paths): '''Returns a tuple (dirs, files) where |dirs| contains the directory names in |paths| and |files| contains the files. ''' result = [], [] for path in paths: result[0 if path.endswith('/') else 1].append(path) return result def add_prefix(prefix, paths): return [prefix + path for path in paths] # Read in the initial list of files. Do this eagerly (i.e. not part of the # asynchronous Future contract) because there's a greater chance to # parallelise fetching with the second layer (can fetch multiple paths). try: first_layer_dirs, first_layer_files = split_dirs_from_files( self._file_system.ReadSingle(path).Get()) except FileNotFoundError: return Future(exc_info=sys.exc_info()) if not first_layer_dirs: return Future(value=first_layer_files) second_layer_listing = self._file_system.Read( add_prefix(path, first_layer_dirs)) def resolve(): def get_from_future_listing(futures): '''Recursively lists files from directory listing |futures|. ''' dirs, files = [], [] for dir_name, listing in futures.Get().iteritems(): new_dirs, new_files = split_dirs_from_files(listing) # |dirs| are paths for reading. Add the full prefix relative to # |path| so that |file_system| can find the files. dirs += add_prefix(dir_name, new_dirs) # |files| are not for reading, they are for returning to the caller. # This entire function set (i.e. GetFromFileListing) is defined to # not include the fetched-path in the result, however, |dir_name| # will be prefixed with |path|. Strip it. assert dir_name.startswith(path) files += add_prefix(dir_name[len(path):], new_files) if dirs: files += get_from_future_listing(self._file_system.Read(dirs)) return files return first_layer_files + get_from_future_listing(second_layer_listing) return Future(delegate=Gettable(resolve))
[ "def", "_RecursiveList", "(", "self", ",", "path", ")", ":", "def", "split_dirs_from_files", "(", "paths", ")", ":", "'''Returns a tuple (dirs, files) where |dirs| contains the directory\n names in |paths| and |files| contains the files.\n '''", "result", "=", "[", "]",...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/chrome/common/extensions/docs/server2/compiled_file_system.py#L148-L201
eventql/eventql
7ca0dbb2e683b525620ea30dc40540a22d5eb227
deps/3rdparty/spidermonkey/mozjs/python/pyyaml/lib/yaml/__init__.py
python
YAMLObject.to_yaml
(cls, dumper, data)
return dumper.represent_yaml_object(cls.yaml_tag, data, cls, flow_style=cls.yaml_flow_style)
Convert a Python object to a representation node.
Convert a Python object to a representation node.
[ "Convert", "a", "Python", "object", "to", "a", "representation", "node", "." ]
def to_yaml(cls, dumper, data): """ Convert a Python object to a representation node. """ return dumper.represent_yaml_object(cls.yaml_tag, data, cls, flow_style=cls.yaml_flow_style)
[ "def", "to_yaml", "(", "cls", ",", "dumper", ",", "data", ")", ":", "return", "dumper", ".", "represent_yaml_object", "(", "cls", ".", "yaml_tag", ",", "data", ",", "cls", ",", "flow_style", "=", "cls", ".", "yaml_flow_style", ")" ]
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/pyyaml/lib/yaml/__init__.py#L308-L313
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/propgrid.py
python
PropertyGridManager.SetPageSplitterLeft
(*args, **kwargs)
return _propgrid.PropertyGridManager_SetPageSplitterLeft(*args, **kwargs)
SetPageSplitterLeft(self, int page, bool subProps=False)
SetPageSplitterLeft(self, int page, bool subProps=False)
[ "SetPageSplitterLeft", "(", "self", "int", "page", "bool", "subProps", "=", "False", ")" ]
def SetPageSplitterLeft(*args, **kwargs): """SetPageSplitterLeft(self, int page, bool subProps=False)""" return _propgrid.PropertyGridManager_SetPageSplitterLeft(*args, **kwargs)
[ "def", "SetPageSplitterLeft", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGridManager_SetPageSplitterLeft", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L3586-L3588
deepmind/open_spiel
4ca53bea32bb2875c7385d215424048ae92f78c8
open_spiel/python/algorithms/dqn.py
python
DQN.__init__
(self, session, player_id, state_representation_size, num_actions, hidden_layers_sizes=128, replay_buffer_capacity=10000, batch_size=128, replay_buffer_class=ReplayBuffer, learning_rate=0.01, update_target_network_every=1000, learn_every=10, discount_factor=1.0, min_buffer_size_to_learn=1000, epsilon_start=1.0, epsilon_end=0.1, epsilon_decay_duration=int(1e6), optimizer_str="sgd", loss_str="mse")
Initialize the DQN agent.
Initialize the DQN agent.
[ "Initialize", "the", "DQN", "agent", "." ]
def __init__(self, session, player_id, state_representation_size, num_actions, hidden_layers_sizes=128, replay_buffer_capacity=10000, batch_size=128, replay_buffer_class=ReplayBuffer, learning_rate=0.01, update_target_network_every=1000, learn_every=10, discount_factor=1.0, min_buffer_size_to_learn=1000, epsilon_start=1.0, epsilon_end=0.1, epsilon_decay_duration=int(1e6), optimizer_str="sgd", loss_str="mse"): """Initialize the DQN agent.""" # This call to locals() is used to store every argument used to initialize # the class instance, so it can be copied with no hyperparameter change. self._kwargs = locals() self.player_id = player_id self._session = session self._num_actions = num_actions if isinstance(hidden_layers_sizes, int): hidden_layers_sizes = [hidden_layers_sizes] self._layer_sizes = hidden_layers_sizes self._batch_size = batch_size self._update_target_network_every = update_target_network_every self._learn_every = learn_every self._min_buffer_size_to_learn = min_buffer_size_to_learn self._discount_factor = discount_factor self._epsilon_start = epsilon_start self._epsilon_end = epsilon_end self._epsilon_decay_duration = epsilon_decay_duration # TODO(author6) Allow for optional replay buffer config. if not isinstance(replay_buffer_capacity, int): raise ValueError("Replay buffer capacity not an integer.") self._replay_buffer = replay_buffer_class(replay_buffer_capacity) self._prev_timestep = None self._prev_action = None # Step counter to keep track of learning, eps decay and target network. self._step_counter = 0 # Keep track of the last training loss achieved in an update step. self._last_loss_value = None # Create required TensorFlow placeholders to perform the Q-network updates. self._info_state_ph = tf.placeholder( shape=[None, state_representation_size], dtype=tf.float32, name="info_state_ph") self._action_ph = tf.placeholder( shape=[None], dtype=tf.int32, name="action_ph") self._reward_ph = tf.placeholder( shape=[None], dtype=tf.float32, name="reward_ph") self._is_final_step_ph = tf.placeholder( shape=[None], dtype=tf.float32, name="is_final_step_ph") self._next_info_state_ph = tf.placeholder( shape=[None, state_representation_size], dtype=tf.float32, name="next_info_state_ph") self._legal_actions_mask_ph = tf.placeholder( shape=[None, num_actions], dtype=tf.float32, name="legal_actions_mask_ph") self._q_network = simple_nets.MLP(state_representation_size, self._layer_sizes, num_actions) self._q_values = self._q_network(self._info_state_ph) self._target_q_network = simple_nets.MLP(state_representation_size, self._layer_sizes, num_actions) self._target_q_values = self._target_q_network(self._next_info_state_ph) # Stop gradient to prevent updates to the target network while learning self._target_q_values = tf.stop_gradient(self._target_q_values) self._update_target_network = self._create_target_network_update_op( self._q_network, self._target_q_network) # Create the loss operations. # Sum a large negative constant to illegal action logits before taking the # max. This prevents illegal action values from being considered as target. illegal_actions = 1 - self._legal_actions_mask_ph illegal_logits = illegal_actions * ILLEGAL_ACTION_LOGITS_PENALTY max_next_q = tf.reduce_max( tf.math.add(tf.stop_gradient(self._target_q_values), illegal_logits), axis=-1) target = ( self._reward_ph + (1 - self._is_final_step_ph) * self._discount_factor * max_next_q) action_indices = tf.stack( [tf.range(tf.shape(self._q_values)[0]), self._action_ph], axis=-1) predictions = tf.gather_nd(self._q_values, action_indices) self._savers = [("q_network", tf.train.Saver(self._q_network.variables)), ("target_q_network", tf.train.Saver(self._target_q_network.variables))] if loss_str == "mse": loss_class = tf.losses.mean_squared_error elif loss_str == "huber": loss_class = tf.losses.huber_loss else: raise ValueError("Not implemented, choose from 'mse', 'huber'.") self._loss = tf.reduce_mean( loss_class(labels=target, predictions=predictions)) if optimizer_str == "adam": self._optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate) elif optimizer_str == "sgd": self._optimizer = tf.train.GradientDescentOptimizer( learning_rate=learning_rate) else: raise ValueError("Not implemented, choose from 'adam' and 'sgd'.") self._learn_step = self._optimizer.minimize(self._loss) self._initialize()
[ "def", "__init__", "(", "self", ",", "session", ",", "player_id", ",", "state_representation_size", ",", "num_actions", ",", "hidden_layers_sizes", "=", "128", ",", "replay_buffer_capacity", "=", "10000", ",", "batch_size", "=", "128", ",", "replay_buffer_class", ...
https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/dqn.py#L47-L174
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/integrate/_ode.py
python
_transform_banded_jac
(bjac)
return newjac
Convert a real matrix of the form (for example) [0 0 A B] [0 0 0 B] [0 0 C D] [0 0 A D] [E F G H] to [0 F C H] [I J K L] [E J G L] [I 0 K 0] That is, every other column is shifted up one.
Convert a real matrix of the form (for example)
[ "Convert", "a", "real", "matrix", "of", "the", "form", "(", "for", "example", ")" ]
def _transform_banded_jac(bjac): """ Convert a real matrix of the form (for example) [0 0 A B] [0 0 0 B] [0 0 C D] [0 0 A D] [E F G H] to [0 F C H] [I J K L] [E J G L] [I 0 K 0] That is, every other column is shifted up one. """ # Shift every other column. newjac = zeros((bjac.shape[0] + 1, bjac.shape[1])) newjac[1:, ::2] = bjac[:, ::2] newjac[:-1, 1::2] = bjac[:, 1::2] return newjac
[ "def", "_transform_banded_jac", "(", "bjac", ")", ":", "# Shift every other column.", "newjac", "=", "zeros", "(", "(", "bjac", ".", "shape", "[", "0", "]", "+", "1", ",", "bjac", ".", "shape", "[", "1", "]", ")", ")", "newjac", "[", "1", ":", ",", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/integrate/_ode.py#L571-L587
bristolcrypto/SPDZ-2
721abfae849625a02ea49aabc534f9cf41ca643f
Compiler/program.py
python
Program.curr_tape
(self)
return self._curr_tape
The tape that is currently running.
The tape that is currently running.
[ "The", "tape", "that", "is", "currently", "running", "." ]
def curr_tape(self): """ The tape that is currently running.""" if self._curr_tape is None: # Create a new main thread if necessary self.restart_main_thread() return self._curr_tape
[ "def", "curr_tape", "(", "self", ")", ":", "if", "self", ".", "_curr_tape", "is", "None", ":", "# Create a new main thread if necessary", "self", ".", "restart_main_thread", "(", ")", "return", "self", ".", "_curr_tape" ]
https://github.com/bristolcrypto/SPDZ-2/blob/721abfae849625a02ea49aabc534f9cf41ca643f/Compiler/program.py#L315-L320
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/protobuf/python/google/protobuf/descriptor_pool.py
python
DescriptorPool._ConvertEnumDescriptor
(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None)
return desc
Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor
Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
[ "Make", "a", "protobuf", "EnumDescriptor", "given", "an", "EnumDescriptorProto", "protobuf", "." ]
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, containing_type=None, scope=None): """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. Args: enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. package: Optional package name for the new message EnumDescriptor. file_desc: The file containing the enum descriptor. containing_type: The type containing this enum. scope: Scope containing available types. Returns: The added descriptor """ if package: enum_name = '.'.join((package, enum_proto.name)) else: enum_name = enum_proto.name if file_desc is None: file_name = None else: file_name = file_desc.name values = [self._MakeEnumValueDescriptor(value, index) for index, value in enumerate(enum_proto.value)] desc = descriptor.EnumDescriptor(name=enum_proto.name, full_name=enum_name, filename=file_name, file=file_desc, values=values, containing_type=containing_type, options=enum_proto.options) scope['.%s' % enum_name] = desc self._enum_descriptors[enum_name] = desc return desc
[ "def", "_ConvertEnumDescriptor", "(", "self", ",", "enum_proto", ",", "package", "=", "None", ",", "file_desc", "=", "None", ",", "containing_type", "=", "None", ",", "scope", "=", "None", ")", ":", "if", "package", ":", "enum_name", "=", "'.'", ".", "jo...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/google/protobuf/descriptor_pool.py#L482-L518
wyrover/book-code
7f4883d9030d553bc6bcfa3da685e34789839900
3rdparty/protobuf/python/mox.py
python
MockMethod.GetPossibleGroup
(self)
return group
Returns a possible group from the end of the call queue or None if no other methods are on the stack.
Returns a possible group from the end of the call queue or None if no other methods are on the stack.
[ "Returns", "a", "possible", "group", "from", "the", "end", "of", "the", "call", "queue", "or", "None", "if", "no", "other", "methods", "are", "on", "the", "stack", "." ]
def GetPossibleGroup(self): """Returns a possible group from the end of the call queue or None if no other methods are on the stack. """ # Remove this method from the tail of the queue so we can add it to a group. this_method = self._call_queue.pop() assert this_method == self # Determine if the tail of the queue is a group, or just a regular ordered # mock method. group = None try: group = self._call_queue[-1] except IndexError: pass return group
[ "def", "GetPossibleGroup", "(", "self", ")", ":", "# Remove this method from the tail of the queue so we can add it to a group.", "this_method", "=", "self", ".", "_call_queue", ".", "pop", "(", ")", "assert", "this_method", "==", "self", "# Determine if the tail of the queue...
https://github.com/wyrover/book-code/blob/7f4883d9030d553bc6bcfa3da685e34789839900/3rdparty/protobuf/python/mox.py#L645-L662
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/ndarray/ndarray.py
python
maximum
(lhs, rhs)
return _ufunc_helper( lhs, rhs, op.broadcast_maximum, lambda x, y: x if x > y else y, _internal._maximum_scalar, None)
Returns element-wise maximum of the input arrays with broadcasting. Equivalent to ``mx.nd.broadcast_maximum(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First array to be compared. rhs : scalar or mxnet.ndarray.array Second array to be compared. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray The element-wise maximum of the input arrays. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> mx.nd.maximum(x, 2).asnumpy() array([[ 2., 2., 2.], [ 2., 2., 2.]], dtype=float32) >>> mx.nd.maximum(x, y).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.maximum(y, z).asnumpy() array([[ 0., 1.], [ 1., 1.]], dtype=float32)
Returns element-wise maximum of the input arrays with broadcasting.
[ "Returns", "element", "-", "wise", "maximum", "of", "the", "input", "arrays", "with", "broadcasting", "." ]
def maximum(lhs, rhs): """Returns element-wise maximum of the input arrays with broadcasting. Equivalent to ``mx.nd.broadcast_maximum(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First array to be compared. rhs : scalar or mxnet.ndarray.array Second array to be compared. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray The element-wise maximum of the input arrays. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> mx.nd.maximum(x, 2).asnumpy() array([[ 2., 2., 2.], [ 2., 2., 2.]], dtype=float32) >>> mx.nd.maximum(x, y).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.maximum(y, z).asnumpy() array([[ 0., 1.], [ 1., 1.]], dtype=float32) """ # pylint: disable= no-member, protected-access return _ufunc_helper( lhs, rhs, op.broadcast_maximum, lambda x, y: x if x > y else y, _internal._maximum_scalar, None)
[ "def", "maximum", "(", "lhs", ",", "rhs", ")", ":", "# pylint: disable= no-member, protected-access", "return", "_ufunc_helper", "(", "lhs", ",", "rhs", ",", "op", ".", "broadcast_maximum", ",", "lambda", "x", ",", "y", ":", "x", "if", "x", ">", "y", "else...
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/ndarray/ndarray.py#L3095-L3148
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/cudf/cudf/core/dataframe.py
python
DataFrame._prepare_for_rowwise_op
(self, method, skipna)
return coerced, mask, common_dtype
Prepare a DataFrame for CuPy-based row-wise operations.
Prepare a DataFrame for CuPy-based row-wise operations.
[ "Prepare", "a", "DataFrame", "for", "CuPy", "-", "based", "row", "-", "wise", "operations", "." ]
def _prepare_for_rowwise_op(self, method, skipna): """Prepare a DataFrame for CuPy-based row-wise operations.""" if method not in _cupy_nan_methods_map and any( col.nullable for col in self._columns ): msg = ( f"Row-wise operations to calculate '{method}' do not " f"currently support columns with null values. " f"Consider removing them with .dropna() " f"or using .fillna()." ) raise ValueError(msg) is_pure_dt = all(is_datetime_dtype(dt) for dt in self.dtypes) if not is_pure_dt: filtered = self.select_dtypes(include=[np.number, np.bool_]) else: filtered = self.copy(deep=False) common_dtype = find_common_type(filtered.dtypes) if filtered._num_columns < self._num_columns: msg = ( "Row-wise operations currently only support int, float " "and bool dtypes. Non numeric columns are ignored." ) warnings.warn(msg) if not skipna and any(col.nullable for col in filtered._columns): mask = DataFrame( { name: filtered._data[name]._get_mask_as_column() if filtered._data[name].nullable else column.full(len(filtered._data[name]), True) for name in filtered._data.names } ) mask = mask.all(axis=1) else: mask = None coerced = filtered.astype(common_dtype, copy=False) if is_pure_dt: # Further convert into cupy friendly types coerced = coerced.astype("int64", copy=False) return coerced, mask, common_dtype
[ "def", "_prepare_for_rowwise_op", "(", "self", ",", "method", ",", "skipna", ")", ":", "if", "method", "not", "in", "_cupy_nan_methods_map", "and", "any", "(", "col", ".", "nullable", "for", "col", "in", "self", ".", "_columns", ")", ":", "msg", "=", "("...
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/dataframe.py#L5130-L5177
0ad/0ad
f58db82e0e925016d83f4e3fa7ca599e3866e2af
source/tools/i18n/generateDebugTranslation.py
python
generate_long_strings
(root_path, input_file_name, output_file_name, languages=None)
Generate the 'long strings' debug catalog. This catalog contains the longest singular and plural string, found amongst all translated languages or a filtered subset. It can be used to check if GUI elements are large enough. The catalog is long.*.po
Generate the 'long strings' debug catalog. This catalog contains the longest singular and plural string, found amongst all translated languages or a filtered subset. It can be used to check if GUI elements are large enough. The catalog is long.*.po
[ "Generate", "the", "long", "strings", "debug", "catalog", ".", "This", "catalog", "contains", "the", "longest", "singular", "and", "plural", "string", "found", "amongst", "all", "translated", "languages", "or", "a", "filtered", "subset", ".", "It", "can", "be"...
def generate_long_strings(root_path, input_file_name, output_file_name, languages=None): """ Generate the 'long strings' debug catalog. This catalog contains the longest singular and plural string, found amongst all translated languages or a filtered subset. It can be used to check if GUI elements are large enough. The catalog is long.*.po """ print("Generating", output_file_name) input_file_path = os.path.join(root_path, input_file_name) output_file_path = os.path.join(root_path, output_file_name) template_catalog = Catalog.readFrom(input_file_path) # Pretend we write English to get plurals. long_string_catalog = Catalog(locale="en") # Fill catalog with English strings. for message in template_catalog: long_string_catalog.add( id=message.id, string=message.id, context=message.context) # Load existing translation catalogs. existing_translation_catalogs = getCatalogs(input_file_path, languages) # If any existing translation has more characters than the average expansion, use that instead. for translation_catalog in existing_translation_catalogs: for long_string_catalog_message in long_string_catalog: translation_message = translation_catalog.get( long_string_catalog_message.id, long_string_catalog_message.context) if not translation_message or not translation_message.string: continue if not long_string_catalog_message.pluralizable or not translation_message.pluralizable: if len(translation_message.string) > len(long_string_catalog_message.string): long_string_catalog_message.string = translation_message.string continue longest_singular_string = translation_message.string[0] longest_plural_string = translation_message.string[1 if len( translation_message.string) > 1 else 0] candidate_singular_string = long_string_catalog_message.string[0] # There might be between 0 and infinite plural forms. candidate_plural_string = "" for candidate_string in long_string_catalog_message.string[1:]: if len(candidate_string) > len(candidate_plural_string): candidate_plural_string = candidate_string changed = False if len(candidate_singular_string) > len(longest_singular_string): longest_singular_string = candidate_singular_string changed = True if len(candidate_plural_string) > len(longest_plural_string): longest_plural_string = candidate_plural_string changed = True if changed: long_string_catalog_message.string = [ longest_singular_string, longest_plural_string] translation_message = long_string_catalog_message long_string_catalog.writeTo(output_file_path)
[ "def", "generate_long_strings", "(", "root_path", ",", "input_file_name", ",", "output_file_name", ",", "languages", "=", "None", ")", ":", "print", "(", "\"Generating\"", ",", "output_file_name", ")", "input_file_path", "=", "os", ".", "path", ".", "join", "(",...
https://github.com/0ad/0ad/blob/f58db82e0e925016d83f4e3fa7ca599e3866e2af/source/tools/i18n/generateDebugTranslation.py#L32-L92
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/logistic_regressor.py
python
LogisticRegressor.get_default_metrics
(cls, thresholds=None)
return metrics
Returns a dictionary of basic metrics for logistic regression. Args: thresholds: List of floating point thresholds to use for accuracy, precision, and recall metrics. If None, defaults to [0.5]. Returns: Dictionary mapping metrics string names to metrics functions.
Returns a dictionary of basic metrics for logistic regression.
[ "Returns", "a", "dictionary", "of", "basic", "metrics", "for", "logistic", "regression", "." ]
def get_default_metrics(cls, thresholds=None): """Returns a dictionary of basic metrics for logistic regression. Args: thresholds: List of floating point thresholds to use for accuracy, precision, and recall metrics. If None, defaults to [0.5]. Returns: Dictionary mapping metrics string names to metrics functions. """ if thresholds is None: thresholds = [0.5] metrics = {} metrics[cls.PREDICTION_MEAN] = _predictions_streaming_mean metrics[cls.TARGET_MEAN] = _targets_streaming_mean # Also include the streaming mean of the label as an accuracy baseline, as # a reminder to users. metrics[cls.ACCURACY_BASELINE] = _targets_streaming_mean metrics[cls.AUC] = metrics_lib.streaming_auc for threshold in thresholds: metrics[cls.ACCURACY_MEAN % threshold] = _make_streaming_with_threshold( metrics_lib.streaming_accuracy, threshold) # Precision for positive examples. metrics[cls.PRECISION_MEAN % threshold] = _make_streaming_with_threshold( metrics_lib.streaming_precision, threshold) # Recall for positive examples. metrics[cls.RECALL_MEAN % threshold] = _make_streaming_with_threshold( metrics_lib.streaming_recall, threshold) return metrics
[ "def", "get_default_metrics", "(", "cls", ",", "thresholds", "=", "None", ")", ":", "if", "thresholds", "is", "None", ":", "thresholds", "=", "[", "0.5", "]", "metrics", "=", "{", "}", "metrics", "[", "cls", ".", "PREDICTION_MEAN", "]", "=", "_prediction...
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/logistic_regressor.py#L84-L116
Tencent/CMONGO
c40380caa14e05509f46993aa8b8da966b09b0b5
buildscripts/cpplint.py
python
_BlockInfo.CheckEnd
(self, filename, clean_lines, linenum, error)
Run checks that applies to text after the closing brace. This is mostly used for checking end of namespace comments. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Run checks that applies to text after the closing brace.
[ "Run", "checks", "that", "applies", "to", "text", "after", "the", "closing", "brace", "." ]
def CheckEnd(self, filename, clean_lines, linenum, error): """Run checks that applies to text after the closing brace. This is mostly used for checking end of namespace comments. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ pass
[ "def", "CheckEnd", "(", "self", ",", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "pass" ]
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/buildscripts/cpplint.py#L2027-L2038
SFTtech/openage
d6a08c53c48dc1e157807471df92197f6ca9e04d
openage/nyan/nyan_structs.py
python
NyanObject._prepare_inheritance_content
(self, import_tree=None)
return output_str
Returns a string containing the nyan object's inheritance set in the header. Subroutine of dump().
Returns a string containing the nyan object's inheritance set in the header.
[ "Returns", "a", "string", "containing", "the", "nyan", "object", "s", "inheritance", "set", "in", "the", "header", "." ]
def _prepare_inheritance_content(self, import_tree=None): """ Returns a string containing the nyan object's inheritance set in the header. Subroutine of dump(). """ output_str = "(" if len(self._parents) > 0: for parent in self._parents: if import_tree: sfqon = ".".join(import_tree.get_alias_fqon( parent.get_fqon(), namespace=self.get_fqon() )) else: sfqon = ".".join(parent.get_fqon()) output_str += f"{sfqon}, " output_str = output_str[:-2] output_str += "):\n" return output_str
[ "def", "_prepare_inheritance_content", "(", "self", ",", "import_tree", "=", "None", ")", ":", "output_str", "=", "\"(\"", "if", "len", "(", "self", ".", "_parents", ")", ">", "0", ":", "for", "parent", "in", "self", ".", "_parents", ":", "if", "import_t...
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/nyan/nyan_structs.py#L390-L416
amrayn/easyloggingpp
8489989bb26c6371df103f6cbced3fbee1bc3c2f
tools/cpplint.py
python
_VerboseLevel
()
return _cpplint_state.verbose_level
Returns the module's verbosity setting.
Returns the module's verbosity setting.
[ "Returns", "the", "module", "s", "verbosity", "setting", "." ]
def _VerboseLevel(): """Returns the module's verbosity setting.""" return _cpplint_state.verbose_level
[ "def", "_VerboseLevel", "(", ")", ":", "return", "_cpplint_state", ".", "verbose_level" ]
https://github.com/amrayn/easyloggingpp/blob/8489989bb26c6371df103f6cbced3fbee1bc3c2f/tools/cpplint.py#L754-L756
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/distutils/archive_util.py
python
make_zipfile
(base_name, base_dir, verbose=0, dry_run=0)
return zip_filename
Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file.
Create a zip file from all the files under 'base_dir'.
[ "Create", "a", "zip", "file", "from", "all", "the", "files", "under", "base_dir", "." ]
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file. """ try: import zipfile except ImportError: zipfile = None zip_filename = base_name + ".zip" mkpath(os.path.dirname(zip_filename), dry_run=dry_run) # If zipfile module is not available, try spawning an external # 'zip' command. if zipfile is None: if verbose: zipoptions = "-r" else: zipoptions = "-rq" try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise DistutilsExecError, \ ("unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility") % zip_filename else: log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) if base_dir != os.curdir: path = os.path.normpath(os.path.join(base_dir, '')) zip.write(path, path) log.info("adding '%s'", path) for dirpath, dirnames, filenames in os.walk(base_dir): for name in dirnames: path = os.path.normpath(os.path.join(dirpath, name, '')) zip.write(path, path) log.info("adding '%s'", path) for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): zip.write(path, path) log.info("adding '%s'" % path) zip.close() return zip_filename
[ "def", "make_zipfile", "(", "base_name", ",", "base_dir", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ")", ":", "try", ":", "import", "zipfile", "except", "ImportError", ":", "zipfile", "=", "None", "zip_filename", "=", "base_name", "+", "\".zip\""...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/distutils/archive_util.py#L121-L181
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/storage_uri.py
python
BucketStorageUri.is_cloud_uri
(self)
return True
Returns True if this URI names a bucket or object.
Returns True if this URI names a bucket or object.
[ "Returns", "True", "if", "this", "URI", "names", "a", "bucket", "or", "object", "." ]
def is_cloud_uri(self): """Returns True if this URI names a bucket or object.""" return True
[ "def", "is_cloud_uri", "(", "self", ")", ":", "return", "True" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/storage_uri.py#L512-L514
MegEngine/MegEngine
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
imperative/python/megengine/dtr/dtr.py
python
enable_sqrt_sampling
(mod)
return _enable_sqrt_sampling
r"""Get or set whether sqrt sampling is allowed. Sqrt sampling means that given the size of the candidate set is N, only enumerate sqrt(N) tensors. When the number of tensors is very high, enabling this optimization will speed up the training. Examples: .. code-block:: import megengine as mge mge.dtr.enable_sqrt_sampling = True
r"""Get or set whether sqrt sampling is allowed. Sqrt sampling means that given the size of the candidate set is N, only enumerate sqrt(N) tensors. When the number of tensors is very high, enabling this optimization will speed up the training. Examples: .. code-block::
[ "r", "Get", "or", "set", "whether", "sqrt", "sampling", "is", "allowed", ".", "Sqrt", "sampling", "means", "that", "given", "the", "size", "of", "the", "candidate", "set", "is", "N", "only", "enumerate", "sqrt", "(", "N", ")", "tensors", ".", "When", "...
def enable_sqrt_sampling(mod): r"""Get or set whether sqrt sampling is allowed. Sqrt sampling means that given the size of the candidate set is N, only enumerate sqrt(N) tensors. When the number of tensors is very high, enabling this optimization will speed up the training. Examples: .. code-block:: import megengine as mge mge.dtr.enable_sqrt_sampling = True """ return _enable_sqrt_sampling
[ "def", "enable_sqrt_sampling", "(", "mod", ")", ":", "return", "_enable_sqrt_sampling" ]
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/dtr/dtr.py#L97-L109
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/misc/doccer.py
python
inherit_docstring_from
(cls)
return _doc
This decorator modifies the decorated function's docstring by replacing occurrences of '%(super)s' with the docstring of the method of the same name from the class `cls`. If the decorated method has no docstring, it is simply given the docstring of `cls`s method. Parameters ---------- cls : Python class or instance A class with a method with the same name as the decorated method. The docstring of the method in this class replaces '%(super)s' in the docstring of the decorated method. Returns ------- f : function The decorator function that modifies the __doc__ attribute of its argument. Examples -------- In the following, the docstring for Bar.func created using the docstring of `Foo.func`. >>> class Foo(object): ... def func(self): ... '''Do something useful.''' ... return ... >>> class Bar(Foo): ... @inherit_docstring_from(Foo) ... def func(self): ... '''%(super)s ... Do it fast. ... ''' ... return ... >>> b = Bar() >>> b.func.__doc__ 'Do something useful.\n Do it fast.\n '
This decorator modifies the decorated function's docstring by replacing occurrences of '%(super)s' with the docstring of the method of the same name from the class `cls`.
[ "This", "decorator", "modifies", "the", "decorated", "function", "s", "docstring", "by", "replacing", "occurrences", "of", "%", "(", "super", ")", "s", "with", "the", "docstring", "of", "the", "method", "of", "the", "same", "name", "from", "the", "class", ...
def inherit_docstring_from(cls): """ This decorator modifies the decorated function's docstring by replacing occurrences of '%(super)s' with the docstring of the method of the same name from the class `cls`. If the decorated method has no docstring, it is simply given the docstring of `cls`s method. Parameters ---------- cls : Python class or instance A class with a method with the same name as the decorated method. The docstring of the method in this class replaces '%(super)s' in the docstring of the decorated method. Returns ------- f : function The decorator function that modifies the __doc__ attribute of its argument. Examples -------- In the following, the docstring for Bar.func created using the docstring of `Foo.func`. >>> class Foo(object): ... def func(self): ... '''Do something useful.''' ... return ... >>> class Bar(Foo): ... @inherit_docstring_from(Foo) ... def func(self): ... '''%(super)s ... Do it fast. ... ''' ... return ... >>> b = Bar() >>> b.func.__doc__ 'Do something useful.\n Do it fast.\n ' """ def _doc(func): cls_docstring = getattr(cls, func.__name__).__doc__ func_docstring = func.__doc__ if func_docstring is None: func.__doc__ = cls_docstring else: new_docstring = func_docstring % dict(super=cls_docstring) func.__doc__ = new_docstring return func return _doc
[ "def", "inherit_docstring_from", "(", "cls", ")", ":", "def", "_doc", "(", "func", ")", ":", "cls_docstring", "=", "getattr", "(", "cls", ",", "func", ".", "__name__", ")", ".", "__doc__", "func_docstring", "=", "func", ".", "__doc__", "if", "func_docstrin...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/misc/doccer.py#L71-L125
microsoft/ivy
9f3c7ecc0b2383129fdd0953e10890d98d09a82d
ivy/ivy_parser.py
python
p_loc
(p)
loc :
loc :
[ "loc", ":" ]
def p_loc(p): 'loc : ' p[0] = None
[ "def", "p_loc", "(", "p", ")", ":", "p", "[", "0", "]", "=", "None" ]
https://github.com/microsoft/ivy/blob/9f3c7ecc0b2383129fdd0953e10890d98d09a82d/ivy/ivy_parser.py#L2005-L2007
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py3/numpy/ma/core.py
python
put
(a, indices, values, mode='raise')
Set storage-indexed locations to corresponding values. This function is equivalent to `MaskedArray.put`, see that method for details. See Also -------- MaskedArray.put
Set storage-indexed locations to corresponding values.
[ "Set", "storage", "-", "indexed", "locations", "to", "corresponding", "values", "." ]
def put(a, indices, values, mode='raise'): """ Set storage-indexed locations to corresponding values. This function is equivalent to `MaskedArray.put`, see that method for details. See Also -------- MaskedArray.put """ # We can't use 'frommethod', the order of arguments is different try: return a.put(indices, values, mode=mode) except AttributeError: return narray(a, copy=False).put(indices, values, mode=mode)
[ "def", "put", "(", "a", ",", "indices", ",", "values", ",", "mode", "=", "'raise'", ")", ":", "# We can't use 'frommethod', the order of arguments is different", "try", ":", "return", "a", ".", "put", "(", "indices", ",", "values", ",", "mode", "=", "mode", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/ma/core.py#L7046-L7062
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/richtext.py
python
RichTextBuffer_SetBulletRightMargin
(*args, **kwargs)
return _richtext.RichTextBuffer_SetBulletRightMargin(*args, **kwargs)
RichTextBuffer_SetBulletRightMargin(int margin)
RichTextBuffer_SetBulletRightMargin(int margin)
[ "RichTextBuffer_SetBulletRightMargin", "(", "int", "margin", ")" ]
def RichTextBuffer_SetBulletRightMargin(*args, **kwargs): """RichTextBuffer_SetBulletRightMargin(int margin)""" return _richtext.RichTextBuffer_SetBulletRightMargin(*args, **kwargs)
[ "def", "RichTextBuffer_SetBulletRightMargin", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_richtext", ".", "RichTextBuffer_SetBulletRightMargin", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/richtext.py#L2717-L2719
ArduPilot/ardupilot
6e684b3496122b8158ac412b609d00004b7ac306
Tools/scripts/build_binaries.py
python
build_binaries.read_string_from_filepath
(self, filepath)
return content
returns content of filepath as a string
returns content of filepath as a string
[ "returns", "content", "of", "filepath", "as", "a", "string" ]
def read_string_from_filepath(self, filepath): '''returns content of filepath as a string''' with open(filepath, 'rb') as fh: content = fh.read() return content
[ "def", "read_string_from_filepath", "(", "self", ",", "filepath", ")", ":", "with", "open", "(", "filepath", ",", "'rb'", ")", "as", "fh", ":", "content", "=", "fh", ".", "read", "(", ")", "return", "content" ]
https://github.com/ArduPilot/ardupilot/blob/6e684b3496122b8158ac412b609d00004b7ac306/Tools/scripts/build_binaries.py#L347-L351
Cantera/cantera
0119484b261967ccb55a0066c020599cacc312e4
interfaces/cython/cantera/ctml2yaml.py
python
main
()
Parse command line arguments and pass them to `convert`.
Parse command line arguments and pass them to `convert`.
[ "Parse", "command", "line", "arguments", "and", "pass", "them", "to", "convert", "." ]
def main(): """Parse command line arguments and pass them to `convert`.""" parser = argparse.ArgumentParser( description="Convert legacy CTML input files to YAML format", epilog=( "The 'output' argument is optional. If it is not given, an output " "file with the same name as the input file is used, with the extension " "changed to '.yaml'." ), ) parser.add_argument("input", help="The input CTML filename. Must be specified.") parser.add_argument("output", nargs="?", help="The output YAML filename. Optional.") if len(sys.argv) not in [2, 3]: if len(sys.argv) > 3: print( "ctml2yaml.py: error: unrecognized arguments:", ' '.join(sys.argv[3:]), file=sys.stderr, ) parser.print_help(sys.stderr) sys.exit(1) args = parser.parse_args() input_file = Path(args.input) if args.output is None: output_file = input_file.with_suffix(".yaml") else: output_file = Path(args.output) convert(input_file, output_file)
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Convert legacy CTML input files to YAML format\"", ",", "epilog", "=", "(", "\"The 'output' argument is optional. If it is not given, an output \"", "\"file with the same...
https://github.com/Cantera/cantera/blob/0119484b261967ccb55a0066c020599cacc312e4/interfaces/cython/cantera/ctml2yaml.py#L2659-L2687
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py
python
Wm.wm_overrideredirect
(self, boolean=None)
return self._getboolean(self.tk.call( 'wm', 'overrideredirect', self._w, boolean))
Instruct the window manager to ignore this widget if BOOLEAN is given with 1. Return the current value if None is given.
Instruct the window manager to ignore this widget if BOOLEAN is given with 1. Return the current value if None is given.
[ "Instruct", "the", "window", "manager", "to", "ignore", "this", "widget", "if", "BOOLEAN", "is", "given", "with", "1", ".", "Return", "the", "current", "value", "if", "None", "is", "given", "." ]
def wm_overrideredirect(self, boolean=None): """Instruct the window manager to ignore this widget if BOOLEAN is given with 1. Return the current value if None is given.""" return self._getboolean(self.tk.call( 'wm', 'overrideredirect', self._w, boolean))
[ "def", "wm_overrideredirect", "(", "self", ",", "boolean", "=", "None", ")", ":", "return", "self", ".", "_getboolean", "(", "self", ".", "tk", ".", "call", "(", "'wm'", ",", "'overrideredirect'", ",", "self", ".", "_w", ",", "boolean", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py#L1943-L1948
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/contrib/slim/python/slim/data/data_provider.py
python
DataProvider.__init__
(self, items_to_tensors, num_samples)
Constructs the Data Provider. Args: items_to_tensors: a dictionary of names to tensors. num_samples: the number of samples in the dataset being provided.
Constructs the Data Provider.
[ "Constructs", "the", "Data", "Provider", "." ]
def __init__(self, items_to_tensors, num_samples): """Constructs the Data Provider. Args: items_to_tensors: a dictionary of names to tensors. num_samples: the number of samples in the dataset being provided. """ self._items_to_tensors = items_to_tensors self._num_samples = num_samples
[ "def", "__init__", "(", "self", ",", "items_to_tensors", ",", "num_samples", ")", ":", "self", ".", "_items_to_tensors", "=", "items_to_tensors", "self", ".", "_num_samples", "=", "num_samples" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/slim/python/slim/data/data_provider.py#L52-L60
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
EventLoopBase.WakeUpIdle
(*args, **kwargs)
return _core_.EventLoopBase_WakeUpIdle(*args, **kwargs)
WakeUpIdle(self)
WakeUpIdle(self)
[ "WakeUpIdle", "(", "self", ")" ]
def WakeUpIdle(*args, **kwargs): """WakeUpIdle(self)""" return _core_.EventLoopBase_WakeUpIdle(*args, **kwargs)
[ "def", "WakeUpIdle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "EventLoopBase_WakeUpIdle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L8808-L8810
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
_CppLintState.PrintErrorCounts
(self)
Print a summary of errors by category, and the total.
Print a summary of errors by category, and the total.
[ "Print", "a", "summary", "of", "errors", "by", "category", "and", "the", "total", "." ]
def PrintErrorCounts(self): """Print a summary of errors by category, and the total.""" for category, count in self.errors_by_category.iteritems(): sys.stderr.write('Category \'%s\' errors found: %d\n' % (category, count)) sys.stderr.write('Total errors found: %d\n' % self.error_count)
[ "def", "PrintErrorCounts", "(", "self", ")", ":", "for", "category", ",", "count", "in", "self", ".", "errors_by_category", ".", "iteritems", "(", ")", ":", "sys", ".", "stderr", ".", "write", "(", "'Category \\'%s\\' errors found: %d\\n'", "%", "(", "category...
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L621-L626
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/imaplib.py
python
IMAP4.xatom
(self, name, *args)
return self._simple_command(name, *args)
Allow simple extension commands notified by server in CAPABILITY response. Assumes command is legal in current state. (typ, [data]) = <instance>.xatom(name, arg, ...) Returns response appropriate to extension command `name'.
Allow simple extension commands notified by server in CAPABILITY response.
[ "Allow", "simple", "extension", "commands", "notified", "by", "server", "in", "CAPABILITY", "response", "." ]
def xatom(self, name, *args): """Allow simple extension commands notified by server in CAPABILITY response. Assumes command is legal in current state. (typ, [data]) = <instance>.xatom(name, arg, ...) Returns response appropriate to extension command `name'. """ name = name.upper() #if not name in self.capabilities: # Let the server decide! # raise self.error('unknown extension command: %s' % name) if not name in Commands: Commands[name] = (self.state,) return self._simple_command(name, *args)
[ "def", "xatom", "(", "self", ",", "name", ",", "*", "args", ")", ":", "name", "=", "name", ".", "upper", "(", ")", "#if not name in self.capabilities: # Let the server decide!", "# raise self.error('unknown extension command: %s' % name)", "if", "not", "name", "...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/imaplib.py#L895-L910
tensorflow/deepmath
b5b721f54de1d5d6a02d78f5da5995237f9995f9
deepmath/deephol/utilities/stats.py
python
merge_aggregate_stat
(target: deephol_stat_pb2.ProofAggregateStat, source: deephol_stat_pb2.ProofAggregateStat)
Merge two aggregated statistics. Args: target: Aggregeated statistics to be updated. source: Statistics to be merged in.
Merge two aggregated statistics.
[ "Merge", "two", "aggregated", "statistics", "." ]
def merge_aggregate_stat(target: deephol_stat_pb2.ProofAggregateStat, source: deephol_stat_pb2.ProofAggregateStat): """Merge two aggregated statistics. Args: target: Aggregeated statistics to be updated. source: Statistics to be merged in. """ target.num_theorems_attempted += source.num_theorems_attempted target.num_theorems_proved += source.num_theorems_proved target.num_theorems_with_bad_proof += source.num_theorems_with_bad_proof target.num_nodes += source.num_nodes target.num_reduced_nodes += source.num_reduced_nodes target.num_closed_nodes += source.num_closed_nodes target.time_spent_milliseconds += source.time_spent_milliseconds merge_log_scale_histograms(target.proof_time_histogram, source.proof_time_histogram) merge_log_scale_histograms(target.proof_time_histogram_proved, source.proof_time_histogram_proved) merge_log_scale_histograms(target.proof_time_histogram_failed, source.proof_time_histogram_failed) merge_proof_tapp_stats(target.tapp_stat, source.tapp_stat) merge_histograms(target.num_reduced_nodes_distribution, source.num_reduced_nodes_distribution) target.total_prediction_time += source.total_prediction_time merge_log_scale_histograms(target.proof_prediction_time_histogram, source.proof_prediction_time_histogram) merge_log_scale_histograms(target.node_prediction_time_histogram, source.node_prediction_time_histogram) target.proof_closed_after_millis.extend(source.proof_closed_after_millis)
[ "def", "merge_aggregate_stat", "(", "target", ":", "deephol_stat_pb2", ".", "ProofAggregateStat", ",", "source", ":", "deephol_stat_pb2", ".", "ProofAggregateStat", ")", ":", "target", ".", "num_theorems_attempted", "+=", "source", ".", "num_theorems_attempted", "target...
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/deephol/utilities/stats.py#L238-L267
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/D7AbsoluteCrossSections.py
python
D7AbsoluteCrossSections._read_experiment_properties
(self, ws)
Reads the user-provided dictionary that contains sample geometry (type, dimensions) and experimental conditions, such as the beam size and calculates derived parameters.
Reads the user-provided dictionary that contains sample geometry (type, dimensions) and experimental conditions, such as the beam size and calculates derived parameters.
[ "Reads", "the", "user", "-", "provided", "dictionary", "that", "contains", "sample", "geometry", "(", "type", "dimensions", ")", "and", "experimental", "conditions", "such", "as", "the", "beam", "size", "and", "calculates", "derived", "parameters", "." ]
def _read_experiment_properties(self, ws): """Reads the user-provided dictionary that contains sample geometry (type, dimensions) and experimental conditions, such as the beam size and calculates derived parameters.""" self._sampleAndEnvironmentProperties = self.getProperty('SampleAndEnvironmentProperties').value if 'InitialEnergy' not in self._sampleAndEnvironmentProperties: h = physical_constants['Planck constant'][0] # in m^2 kg / s neutron_mass = physical_constants['neutron mass'][0] # in0 kg wavelength = mtd[ws][0].getRun().getLogData('monochromator.wavelength').value * 1e-10 # in m joules_to_mev = 1e3 / physical_constants['electron volt'][0] self._sampleAndEnvironmentProperties['InitialEnergy'] = \ joules_to_mev * math.pow(h / wavelength, 2) / (2 * neutron_mass) if self.getPropertyValue('NormalisationMethod') != 'None' and 'NMoles' not in self._sampleAndEnvironmentProperties: sample_mass = self._sampleAndEnvironmentProperties['SampleMass'].value formula_unit_mass = self._sampleAndEnvironmentProperties['FormulaUnitMass'].value self._sampleAndEnvironmentProperties['NMoles'] = (sample_mass / formula_unit_mass)
[ "def", "_read_experiment_properties", "(", "self", ",", "ws", ")", ":", "self", ".", "_sampleAndEnvironmentProperties", "=", "self", ".", "getProperty", "(", "'SampleAndEnvironmentProperties'", ")", ".", "value", "if", "'InitialEnergy'", "not", "in", "self", ".", ...
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/D7AbsoluteCrossSections.py#L296-L311
Komnomnomnom/swigibpy
cfd307fdbfaffabc69a2dc037538d7e34a8b8daf
swigibpy.py
python
ComboLegList.size
(self)
return _swigibpy.ComboLegList_size(self)
size(ComboLegList self) -> std::vector< shared_ptr< ComboLeg > >::size_type
size(ComboLegList self) -> std::vector< shared_ptr< ComboLeg > >::size_type
[ "size", "(", "ComboLegList", "self", ")", "-", ">", "std", "::", "vector<", "shared_ptr<", "ComboLeg", ">", ">", "::", "size_type" ]
def size(self): """size(ComboLegList self) -> std::vector< shared_ptr< ComboLeg > >::size_type""" return _swigibpy.ComboLegList_size(self)
[ "def", "size", "(", "self", ")", ":", "return", "_swigibpy", ".", "ComboLegList_size", "(", "self", ")" ]
https://github.com/Komnomnomnom/swigibpy/blob/cfd307fdbfaffabc69a2dc037538d7e34a8b8daf/swigibpy.py#L321-L323
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
cmake/developer_package/cpplint/cpplint.py
python
FileInfo.Split
(self)
return (project,) + os.path.splitext(rest)
Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension).
Splits the file into the directory, basename, and extension.
[ "Splits", "the", "file", "into", "the", "directory", "basename", "and", "extension", "." ]
def Split(self): """Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension). """ googlename = self.RepositoryName() project, rest = os.path.split(googlename) return (project,) + os.path.splitext(rest)
[ "def", "Split", "(", "self", ")", ":", "googlename", "=", "self", ".", "RepositoryName", "(", ")", "project", ",", "rest", "=", "os", ".", "path", ".", "split", "(", "googlename", ")", "return", "(", "project", ",", ")", "+", "os", ".", "path", "."...
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/cmake/developer_package/cpplint/cpplint.py#L1379-L1391
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/_collections.py
python
HTTPHeaderDict.itermerged
(self)
Iterate over all headers, merging duplicate ones together.
Iterate over all headers, merging duplicate ones together.
[ "Iterate", "over", "all", "headers", "merging", "duplicate", "ones", "together", "." ]
def itermerged(self): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] yield val[0], ', '.join(val[1:])
[ "def", "itermerged", "(", "self", ")", ":", "for", "key", "in", "self", ":", "val", "=", "self", ".", "_container", "[", "key", ".", "lower", "(", ")", "]", "yield", "val", "[", "0", "]", ",", "', '", ".", "join", "(", "val", "[", "1", ":", "...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/_collections.py#L293-L297
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBTypeNameSpecifier.__eq__
(self, *args)
return _lldb.SBTypeNameSpecifier___eq__(self, *args)
__eq__(self, SBTypeNameSpecifier rhs) -> bool
__eq__(self, SBTypeNameSpecifier rhs) -> bool
[ "__eq__", "(", "self", "SBTypeNameSpecifier", "rhs", ")", "-", ">", "bool" ]
def __eq__(self, *args): """__eq__(self, SBTypeNameSpecifier rhs) -> bool""" return _lldb.SBTypeNameSpecifier___eq__(self, *args)
[ "def", "__eq__", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBTypeNameSpecifier___eq__", "(", "self", ",", "*", "args", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L11309-L11311
openthread/openthread
9fcdbed9c526c70f1556d1ed84099c1535c7cd32
tools/otci/otci/otci.py
python
OTCI.srp_client_get_host_state
(self)
return self.__parse_str(self.execute_command('srp client host state'))
Get SRP client host state.
Get SRP client host state.
[ "Get", "SRP", "client", "host", "state", "." ]
def srp_client_get_host_state(self): """Get SRP client host state.""" return self.__parse_str(self.execute_command('srp client host state'))
[ "def", "srp_client_get_host_state", "(", "self", ")", ":", "return", "self", ".", "__parse_str", "(", "self", ".", "execute_command", "(", "'srp client host state'", ")", ")" ]
https://github.com/openthread/openthread/blob/9fcdbed9c526c70f1556d1ed84099c1535c7cd32/tools/otci/otci/otci.py#L1089-L1091
google/clif
cab24d6a105609a65c95a36a1712ae3c20c7b5df
clif/pyclif.py
python
_GetHeaders
(ast)
return wrap_header
Scan AST for header files.
Scan AST for header files.
[ "Scan", "AST", "for", "header", "files", "." ]
def _GetHeaders(ast): """Scan AST for header files.""" # It's not moved to astutils yet because of asserts. included = set(d.cpp_file for d in ast.decls if d.cpp_file) if not included: return None if len(included) != 1: raise argparse.ArgumentError( 'input_filename', 'must have exactly one <<from "header":>> statement') wrap_header = included.pop() return wrap_header
[ "def", "_GetHeaders", "(", "ast", ")", ":", "# It's not moved to astutils yet because of asserts.", "included", "=", "set", "(", "d", ".", "cpp_file", "for", "d", "in", "ast", ".", "decls", "if", "d", ".", "cpp_file", ")", "if", "not", "included", ":", "retu...
https://github.com/google/clif/blob/cab24d6a105609a65c95a36a1712ae3c20c7b5df/clif/pyclif.py#L122-L133
CaoWGG/TensorRT-CenterNet
f949252e37b51e60f873808f46d3683f15735e79
onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py
python
Type.get_size
(self)
return conf.lib.clang_Type_getSizeOf(self)
Retrieve the size of the record.
Retrieve the size of the record.
[ "Retrieve", "the", "size", "of", "the", "record", "." ]
def get_size(self): """ Retrieve the size of the record. """ return conf.lib.clang_Type_getSizeOf(self)
[ "def", "get_size", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_Type_getSizeOf", "(", "self", ")" ]
https://github.com/CaoWGG/TensorRT-CenterNet/blob/f949252e37b51e60f873808f46d3683f15735e79/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L2089-L2093
pybind/pybind11
6493f496e30c80f004772c906370c8f4db94b6ec
pybind11/setup_helpers.py
python
tmp_chdir
()
Prepare and enter a temporary directory, cleanup when done
Prepare and enter a temporary directory, cleanup when done
[ "Prepare", "and", "enter", "a", "temporary", "directory", "cleanup", "when", "done" ]
def tmp_chdir() -> Iterator[str]: "Prepare and enter a temporary directory, cleanup when done" # Threadsafe with tmp_chdir_lock: olddir = os.getcwd() try: tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) yield tmpdir finally: os.chdir(olddir) shutil.rmtree(tmpdir)
[ "def", "tmp_chdir", "(", ")", "->", "Iterator", "[", "str", "]", ":", "# Threadsafe", "with", "tmp_chdir_lock", ":", "olddir", "=", "os", ".", "getcwd", "(", ")", "try", ":", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "os", ".", "chdir", "(...
https://github.com/pybind/pybind11/blob/6493f496e30c80f004772c906370c8f4db94b6ec/pybind11/setup_helpers.py#L216-L228
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
StatusBar.SetFields
(self, items)
Set the values of the statusbar fields from a list of strings.
Set the values of the statusbar fields from a list of strings.
[ "Set", "the", "values", "of", "the", "statusbar", "fields", "from", "a", "list", "of", "strings", "." ]
def SetFields(self, items): """Set the values of the statusbar fields from a list of strings. """ self.SetFieldsCount(len(items)) for i in range(len(items)): self.SetStatusText(items[i], i)
[ "def", "SetFields", "(", "self", ",", "items", ")", ":", "self", ".", "SetFieldsCount", "(", "len", "(", "items", ")", ")", "for", "i", "in", "range", "(", "len", "(", "items", ")", ")", ":", "self", ".", "SetStatusText", "(", "items", "[", "i", ...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L1325-L1329
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
xmlDoc.htmlDocContentDumpOutput
(self, buf, encoding)
Dump an HTML document. Formating return/spaces are added.
Dump an HTML document. Formating return/spaces are added.
[ "Dump", "an", "HTML", "document", ".", "Formating", "return", "/", "spaces", "are", "added", "." ]
def htmlDocContentDumpOutput(self, buf, encoding): """Dump an HTML document. Formating return/spaces are added. """ if buf is None: buf__o = None else: buf__o = buf._o libxml2mod.htmlDocContentDumpOutput(buf__o, self._o, encoding)
[ "def", "htmlDocContentDumpOutput", "(", "self", ",", "buf", ",", "encoding", ")", ":", "if", "buf", "is", "None", ":", "buf__o", "=", "None", "else", ":", "buf__o", "=", "buf", ".", "_o", "libxml2mod", ".", "htmlDocContentDumpOutput", "(", "buf__o", ",", ...
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L3997-L4001
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/propgrid.py
python
PropertyGridInterface.SetPropertyValidator
(*args, **kwargs)
return _propgrid.PropertyGridInterface_SetPropertyValidator(*args, **kwargs)
SetPropertyValidator(self, PGPropArg id, Validator validator)
SetPropertyValidator(self, PGPropArg id, Validator validator)
[ "SetPropertyValidator", "(", "self", "PGPropArg", "id", "Validator", "validator", ")" ]
def SetPropertyValidator(*args, **kwargs): """SetPropertyValidator(self, PGPropArg id, Validator validator)""" return _propgrid.PropertyGridInterface_SetPropertyValidator(*args, **kwargs)
[ "def", "SetPropertyValidator", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGridInterface_SetPropertyValidator", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L1446-L1448
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/numeric.py
python
normalize_axis_tuple
(axis, ndim, argname=None, allow_duplicate=False)
return axis
Normalizes an axis argument into a tuple of non-negative integer axes. This handles shorthands such as ``1`` and converts them to ``(1,)``, as well as performing the handling of negative indices covered by `normalize_axis_index`. By default, this forbids axes from being specified multiple times. Used internally by multi-axis-checking logic. .. versionadded:: 1.13.0 Parameters ---------- axis : int, iterable of int The un-normalized index or indices of the axis. ndim : int The number of dimensions of the array that `axis` should be normalized against. argname : str, optional A prefix to put before the error message, typically the name of the argument. allow_duplicate : bool, optional If False, the default, disallow an axis from being specified twice. Returns ------- normalized_axes : tuple of int The normalized axis index, such that `0 <= normalized_axis < ndim` Raises ------ AxisError If any axis provided is out of range ValueError If an axis is repeated See also -------- normalize_axis_index : normalizing a single scalar axis
Normalizes an axis argument into a tuple of non-negative integer axes.
[ "Normalizes", "an", "axis", "argument", "into", "a", "tuple", "of", "non", "-", "negative", "integer", "axes", "." ]
def normalize_axis_tuple(axis, ndim, argname=None, allow_duplicate=False): """ Normalizes an axis argument into a tuple of non-negative integer axes. This handles shorthands such as ``1`` and converts them to ``(1,)``, as well as performing the handling of negative indices covered by `normalize_axis_index`. By default, this forbids axes from being specified multiple times. Used internally by multi-axis-checking logic. .. versionadded:: 1.13.0 Parameters ---------- axis : int, iterable of int The un-normalized index or indices of the axis. ndim : int The number of dimensions of the array that `axis` should be normalized against. argname : str, optional A prefix to put before the error message, typically the name of the argument. allow_duplicate : bool, optional If False, the default, disallow an axis from being specified twice. Returns ------- normalized_axes : tuple of int The normalized axis index, such that `0 <= normalized_axis < ndim` Raises ------ AxisError If any axis provided is out of range ValueError If an axis is repeated See also -------- normalize_axis_index : normalizing a single scalar axis """ # Optimization to speed-up the most common cases. if type(axis) not in (tuple, list): try: axis = [operator.index(axis)] except TypeError: pass # Going via an iterator directly is slower than via list comprehension. axis = tuple([normalize_axis_index(ax, ndim, argname) for ax in axis]) if not allow_duplicate and len(set(axis)) != len(axis): if argname: raise ValueError('repeated axis in `{}` argument'.format(argname)) else: raise ValueError('repeated axis') return axis
[ "def", "normalize_axis_tuple", "(", "axis", ",", "ndim", ",", "argname", "=", "None", ",", "allow_duplicate", "=", "False", ")", ":", "# Optimization to speed-up the most common cases.", "if", "type", "(", "axis", ")", "not", "in", "(", "tuple", ",", "list", "...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/numeric.py#L1277-L1333
idaholab/moose
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
python/peacock/Input/BlockEditor.py
python
BlockEditor._applyAndClose
(self)
Apply any changes the user has made then close the window
Apply any changes the user has made then close the window
[ "Apply", "any", "changes", "the", "user", "has", "made", "then", "close", "the", "window" ]
def _applyAndClose(self): """ Apply any changes the user has made then close the window """ if self.apply_button.isEnabled(): self.applyChanges() self.appliedAndClosed.emit(self.block) self.close()
[ "def", "_applyAndClose", "(", "self", ")", ":", "if", "self", ".", "apply_button", ".", "isEnabled", "(", ")", ":", "self", ".", "applyChanges", "(", ")", "self", ".", "appliedAndClosed", ".", "emit", "(", "self", ".", "block", ")", "self", ".", "close...
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/Input/BlockEditor.py#L187-L194
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
PageSetupDialogData.GetDefaultMinMargins
(*args, **kwargs)
return _windows_.PageSetupDialogData_GetDefaultMinMargins(*args, **kwargs)
GetDefaultMinMargins(self) -> bool
GetDefaultMinMargins(self) -> bool
[ "GetDefaultMinMargins", "(", "self", ")", "-", ">", "bool" ]
def GetDefaultMinMargins(*args, **kwargs): """GetDefaultMinMargins(self) -> bool""" return _windows_.PageSetupDialogData_GetDefaultMinMargins(*args, **kwargs)
[ "def", "GetDefaultMinMargins", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "PageSetupDialogData_GetDefaultMinMargins", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L4886-L4888
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/distutils/misc_util.py
python
Configuration.set_options
(self, **options)
Configure Configuration instance. The following options are available: - ignore_setup_xxx_py - assume_default_configuration - delegate_options_to_subpackages - quiet
Configure Configuration instance.
[ "Configure", "Configuration", "instance", "." ]
def set_options(self, **options): """ Configure Configuration instance. The following options are available: - ignore_setup_xxx_py - assume_default_configuration - delegate_options_to_subpackages - quiet """ for key, value in options.items(): if key in self.options: self.options[key] = value else: raise ValueError('Unknown option: '+key)
[ "def", "set_options", "(", "self", ",", "*", "*", "options", ")", ":", "for", "key", ",", "value", "in", "options", ".", "items", "(", ")", ":", "if", "key", "in", "self", ".", "options", ":", "self", ".", "options", "[", "key", "]", "=", "value"...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/distutils/misc_util.py#L864-L879
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py
python
Node.insertBefore
(self, node, refNode)
Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node :arg node: the node to insert :arg refNode: the child node to insert the node before
Insert node as a child of the current node, before refNode in the
[ "Insert", "node", "as", "a", "child", "of", "the", "current", "node", "before", "refNode", "in", "the" ]
def insertBefore(self, node, refNode): """Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node :arg node: the node to insert :arg refNode: the child node to insert the node before """ raise NotImplementedError
[ "def", "insertBefore", "(", "self", ",", "node", ",", "refNode", ")", ":", "raise", "NotImplementedError" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py#L153-L173
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/memory_stats/python/ops/memory_stats_ops.py
python
BytesLimit
()
return gen_memory_stats_ops.bytes_limit()
Generates an op that measures the total memory (in bytes) of a device.
Generates an op that measures the total memory (in bytes) of a device.
[ "Generates", "an", "op", "that", "measures", "the", "total", "memory", "(", "in", "bytes", ")", "of", "a", "device", "." ]
def BytesLimit(): """Generates an op that measures the total memory (in bytes) of a device.""" return gen_memory_stats_ops.bytes_limit()
[ "def", "BytesLimit", "(", ")", ":", "return", "gen_memory_stats_ops", ".", "bytes_limit", "(", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/memory_stats/python/ops/memory_stats_ops.py#L34-L36
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/extern/aui/aui_utilities.py
python
IndentPressedBitmap
(rect, button_state)
return rect
Indents the input rectangle `rect` based on the value of `button_state`. :param Rect `rect`: the button bitmap rectangle; :param integer `button_state`: the button state.
Indents the input rectangle `rect` based on the value of `button_state`.
[ "Indents", "the", "input", "rectangle", "rect", "based", "on", "the", "value", "of", "button_state", "." ]
def IndentPressedBitmap(rect, button_state): """ Indents the input rectangle `rect` based on the value of `button_state`. :param Rect `rect`: the button bitmap rectangle; :param integer `button_state`: the button state. """ if button_state == AUI_BUTTON_STATE_PRESSED: rect.x += 1 rect.y += 1 return rect
[ "def", "IndentPressedBitmap", "(", "rect", ",", "button_state", ")", ":", "if", "button_state", "==", "AUI_BUTTON_STATE_PRESSED", ":", "rect", ".", "x", "+=", "1", "rect", ".", "y", "+=", "1", "return", "rect" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/aui/aui_utilities.py#L147-L159
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/metrics/pairwise.py
python
pairwise_distances
(X, Y=None, metric="euclidean", n_jobs=None, force_all_finite=True, **kwds)
return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
Compute the distance matrix from a vector array X and optional Y. This method takes either a vector array or a distance matrix, and returns a distance matrix. If the input is a vector array, the distances are computed. If the input is a distances matrix, it is returned instead. This method provides a safe way to take a distance matrix as input, while preserving compatibility with many other algorithms that take a vector array. If Y is given (default is None), then the returned matrix is the pairwise distance between the arrays from both X and Y. Valid values for metric are: - From scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2', 'manhattan']. These metrics support sparse matrix inputs. ['nan_euclidean'] but it does not yet support sparse matrices. - From scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev', 'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule'] See the documentation for scipy.spatial.distance for details on these metrics. These metrics do not support sparse matrix inputs. Note that in the case of 'cityblock', 'cosine' and 'euclidean' (which are valid scipy.spatial.distance metrics), the scikit-learn implementation will be used, which is faster and has support for sparse matrices (except for 'cityblock'). For a verbose description of the metrics from scikit-learn, see the __doc__ of the sklearn.pairwise.distance_metrics function. Read more in the :ref:`User Guide <metrics>`. Parameters ---------- X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \ [n_samples_a, n_features] otherwise Array of pairwise distances between samples, or a feature array. Y : array [n_samples_b, n_features], optional An optional second feature array. Only allowed if metric != "precomputed". metric : string, or callable The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by scipy.spatial.distance.pdist for its metric parameter, or a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS. If metric is "precomputed", X is assumed to be a distance matrix. Alternatively, if metric is a callable function, it is called on each pair of instances (rows) and the resulting value recorded. The callable should take two arrays from X as input and return a value indicating the distance between them. n_jobs : int or None, optional (default=None) The number of jobs to use for the computation. This works by breaking down the pairwise matrix into n_jobs even slices and computing them in parallel. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. force_all_finite : boolean or 'allow-nan', (default=True) Whether to raise an error on np.inf and np.nan in array. The possibilities are: - True: Force all values of array to be finite. - False: accept both np.inf and np.nan in array. - 'allow-nan': accept only np.nan values in array. Values cannot be infinite. .. versionadded:: 0.22 **kwds : optional keyword parameters Any further parameters are passed directly to the distance function. If using a scipy.spatial.distance metric, the parameters are still metric dependent. See the scipy docs for usage examples. Returns ------- D : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b] A distance matrix D such that D_{i, j} is the distance between the ith and jth vectors of the given matrix X, if Y is None. If Y is not None, then D_{i, j} is the distance between the ith array from X and the jth array from Y. See also -------- pairwise_distances_chunked : performs the same calculation as this function, but returns a generator of chunks of the distance matrix, in order to limit memory usage. paired_distances : Computes the distances between corresponding elements of two arrays
Compute the distance matrix from a vector array X and optional Y.
[ "Compute", "the", "distance", "matrix", "from", "a", "vector", "array", "X", "and", "optional", "Y", "." ]
def pairwise_distances(X, Y=None, metric="euclidean", n_jobs=None, force_all_finite=True, **kwds): """ Compute the distance matrix from a vector array X and optional Y. This method takes either a vector array or a distance matrix, and returns a distance matrix. If the input is a vector array, the distances are computed. If the input is a distances matrix, it is returned instead. This method provides a safe way to take a distance matrix as input, while preserving compatibility with many other algorithms that take a vector array. If Y is given (default is None), then the returned matrix is the pairwise distance between the arrays from both X and Y. Valid values for metric are: - From scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2', 'manhattan']. These metrics support sparse matrix inputs. ['nan_euclidean'] but it does not yet support sparse matrices. - From scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev', 'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule'] See the documentation for scipy.spatial.distance for details on these metrics. These metrics do not support sparse matrix inputs. Note that in the case of 'cityblock', 'cosine' and 'euclidean' (which are valid scipy.spatial.distance metrics), the scikit-learn implementation will be used, which is faster and has support for sparse matrices (except for 'cityblock'). For a verbose description of the metrics from scikit-learn, see the __doc__ of the sklearn.pairwise.distance_metrics function. Read more in the :ref:`User Guide <metrics>`. Parameters ---------- X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \ [n_samples_a, n_features] otherwise Array of pairwise distances between samples, or a feature array. Y : array [n_samples_b, n_features], optional An optional second feature array. Only allowed if metric != "precomputed". metric : string, or callable The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by scipy.spatial.distance.pdist for its metric parameter, or a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS. If metric is "precomputed", X is assumed to be a distance matrix. Alternatively, if metric is a callable function, it is called on each pair of instances (rows) and the resulting value recorded. The callable should take two arrays from X as input and return a value indicating the distance between them. n_jobs : int or None, optional (default=None) The number of jobs to use for the computation. This works by breaking down the pairwise matrix into n_jobs even slices and computing them in parallel. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. force_all_finite : boolean or 'allow-nan', (default=True) Whether to raise an error on np.inf and np.nan in array. The possibilities are: - True: Force all values of array to be finite. - False: accept both np.inf and np.nan in array. - 'allow-nan': accept only np.nan values in array. Values cannot be infinite. .. versionadded:: 0.22 **kwds : optional keyword parameters Any further parameters are passed directly to the distance function. If using a scipy.spatial.distance metric, the parameters are still metric dependent. See the scipy docs for usage examples. Returns ------- D : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b] A distance matrix D such that D_{i, j} is the distance between the ith and jth vectors of the given matrix X, if Y is None. If Y is not None, then D_{i, j} is the distance between the ith array from X and the jth array from Y. See also -------- pairwise_distances_chunked : performs the same calculation as this function, but returns a generator of chunks of the distance matrix, in order to limit memory usage. paired_distances : Computes the distances between corresponding elements of two arrays """ if (metric not in _VALID_METRICS and not callable(metric) and metric != "precomputed"): raise ValueError("Unknown metric %s. " "Valid metrics are %s, or 'precomputed', or a " "callable" % (metric, _VALID_METRICS)) if metric == "precomputed": X, _ = check_pairwise_arrays(X, Y, precomputed=True, force_all_finite=force_all_finite) whom = ("`pairwise_distances`. Precomputed distance " " need to have non-negative values.") check_non_negative(X, whom=whom) return X elif metric in PAIRWISE_DISTANCE_FUNCTIONS: func = PAIRWISE_DISTANCE_FUNCTIONS[metric] elif callable(metric): func = partial(_pairwise_callable, metric=metric, force_all_finite=force_all_finite, **kwds) else: if issparse(X) or issparse(Y): raise TypeError("scipy distance metrics do not" " support sparse matrices.") dtype = bool if metric in PAIRWISE_BOOLEAN_FUNCTIONS else None if (dtype == bool and (X.dtype != bool or (Y is not None and Y.dtype != bool))): msg = "Data was converted to boolean for metric %s" % metric warnings.warn(msg, DataConversionWarning) X, Y = check_pairwise_arrays(X, Y, dtype=dtype, force_all_finite=force_all_finite) # precompute data-derived metric params params = _precompute_metric_params(X, Y, metric=metric, **kwds) kwds.update(**params) if effective_n_jobs(n_jobs) == 1 and X is Y: return distance.squareform(distance.pdist(X, metric=metric, **kwds)) func = partial(distance.cdist, metric=metric, **kwds) return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
[ "def", "pairwise_distances", "(", "X", ",", "Y", "=", "None", ",", "metric", "=", "\"euclidean\"", ",", "n_jobs", "=", "None", ",", "force_all_finite", "=", "True", ",", "*", "*", "kwds", ")", ":", "if", "(", "metric", "not", "in", "_VALID_METRICS", "a...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/metrics/pairwise.py#L1609-L1752
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/learn/python/learn/estimators/head.py
python
loss_only_head
(loss_fn, head_name=None)
return _LossOnlyHead(loss_fn, head_name=head_name)
Creates a Head that contains only loss terms. Loss only head holds additional loss terms to be added to other heads and usually represents additional regularization terms in the objective function. Args: loss_fn: a function that takes no argument and returns a list of scalar tensors. head_name: a name for the head. Returns: An instance of `Head` to hold the additional losses.
Creates a Head that contains only loss terms.
[ "Creates", "a", "Head", "that", "contains", "only", "loss", "terms", "." ]
def loss_only_head(loss_fn, head_name=None): """Creates a Head that contains only loss terms. Loss only head holds additional loss terms to be added to other heads and usually represents additional regularization terms in the objective function. Args: loss_fn: a function that takes no argument and returns a list of scalar tensors. head_name: a name for the head. Returns: An instance of `Head` to hold the additional losses. """ return _LossOnlyHead(loss_fn, head_name=head_name)
[ "def", "loss_only_head", "(", "loss_fn", ",", "head_name", "=", "None", ")", ":", "return", "_LossOnlyHead", "(", "loss_fn", ",", "head_name", "=", "head_name", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/learn/python/learn/estimators/head.py#L430-L444
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/timeseries/python/timeseries/model.py
python
SequentialTimeSeriesModel._window_initializer
(self, times, state)
Prepare for training or prediction on a window of data. Args: times: A [batch size x window size] Tensor with times for each observation. state: Model-dependent state, each with size [batch size x ...]. The number and type will typically be fixed by the model (for example a mean and variance). Returns: Nothing
Prepare for training or prediction on a window of data.
[ "Prepare", "for", "training", "or", "prediction", "on", "a", "window", "of", "data", "." ]
def _window_initializer(self, times, state): """Prepare for training or prediction on a window of data. Args: times: A [batch size x window size] Tensor with times for each observation. state: Model-dependent state, each with size [batch size x ...]. The number and type will typically be fixed by the model (for example a mean and variance). Returns: Nothing """ pass
[ "def", "_window_initializer", "(", "self", ",", "times", ",", "state", ")", ":", "pass" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/timeseries/python/timeseries/model.py#L436-L448
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
direct/src/distributed/ClockDelta.py
python
ClockDelta.networkToLocalTime
(self, networkTime, now = None, bits = 16, ticksPerSec=NetworkTimePrecision)
return now + float(diff) / ticksPerSec
networkToLocalTime(self, int networkTime) Converts the indicated networkTime to the corresponding localTime value. The time is assumed to be within +/- 5 minutes of the current local time given in now, or getRealTime() if now is not specified.
networkToLocalTime(self, int networkTime)
[ "networkToLocalTime", "(", "self", "int", "networkTime", ")" ]
def networkToLocalTime(self, networkTime, now = None, bits = 16, ticksPerSec=NetworkTimePrecision): """networkToLocalTime(self, int networkTime) Converts the indicated networkTime to the corresponding localTime value. The time is assumed to be within +/- 5 minutes of the current local time given in now, or getRealTime() if now is not specified. """ if now is None: now = self.globalClock.getRealTime() # Are we in non-real-time mode (i.e. filming a movie)? If you # set movie-network-time 1, then we'll circumvent this logic # and always return now. if self.globalClock.getMode() == ClockObject.MNonRealTime and \ ConfigVariableBool('movie-network-time', False): return now # First, determine what network time we have for 'now'. ntime = int(math.floor(((now - self.delta) * ticksPerSec) + 0.5)) # The signed difference between these is the number of ticks # by which the network time differs from 'now'. if bits == 16: diff = self.__signExtend(networkTime - ntime) else: # Assume the bits is either 16 or 32. If it's 32, no need # to sign-extend. 32 bits gives us about 227 days of # continuous timestamp. diff = networkTime - ntime return now + float(diff) / ticksPerSec
[ "def", "networkToLocalTime", "(", "self", ",", "networkTime", ",", "now", "=", "None", ",", "bits", "=", "16", ",", "ticksPerSec", "=", "NetworkTimePrecision", ")", ":", "if", "now", "is", "None", ":", "now", "=", "self", ".", "globalClock", ".", "getRea...
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/distributed/ClockDelta.py#L235-L268
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
bindings/pyroot/pythonizations/python/ROOT/_pythonization/_roofit/_roojsonfactorywstool.py
python
RooJSONFactoryWSTool.gendoc
(cls)
return hs3
Generate the importer and exporter documentation.
Generate the importer and exporter documentation.
[ "Generate", "the", "importer", "and", "exporter", "documentation", "." ]
def gendoc(cls): """Generate the importer and exporter documentation.""" hs3 = {} for key, importer in cls.importers(): if not key in hs3.keys(): hs3[key] = {} if not "import" in hs3[key]: hs3[key]["import"] = [] hs3[key]["import"].append({"native": True}) for tclass, exporters in cls.exporters(): for exp in exporters: key = exp.key() if not key in hs3.keys(): hs3[key] = {} hs3[key]["class"] = str(tclass.GetName()) if not "export" in hs3[key]: hs3[key]["export"] = [] hs3[key]["export"].append({"native": True}) for key, importer in cls.pdfImportExpressions(): if not key in hs3.keys(): hs3[key] = {} if not "import" in hs3[key]: hs3[key]["import"] = [] hs3[key]["import"].append( { "class": str(importer.tclass.GetName()), "args": [str(e) for e in importer.arguments], "native": False, } ) for key, importer in cls.functionImportExpressions(): if not key in hs3.keys(): hs3[key] = {} if not "import" in hs3[key]: hs3[key]["import"] = [] hs3[key]["import"].append( { "class": str(importer.tclass.GetName()), "args": [str(e) for e in importer.arguments], "native": False, } ) for tclass, exporter in cls.exportKeys(): key = exporter.type if not key in hs3.keys(): hs3[key] = {} hs3[key]["class"] = str(tclass.GetName()) if not "export" in hs3[key]: hs3[key]["export"] = [] hs3[key]["export"].append( { "native": False, "proxies": {str(a): str(b) for a, b in exporter.proxies}, } ) return hs3
[ "def", "gendoc", "(", "cls", ")", ":", "hs3", "=", "{", "}", "for", "key", ",", "importer", "in", "cls", ".", "importers", "(", ")", ":", "if", "not", "key", "in", "hs3", ".", "keys", "(", ")", ":", "hs3", "[", "key", "]", "=", "{", "}", "i...
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/bindings/pyroot/pythonizations/python/ROOT/_pythonization/_roofit/_roojsonfactorywstool.py#L16-L72
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/contrib/slim/quantization/quantization_pass.py
python
_get_input_name_index
(op, input_var_name)
return res
Get the input name and index of the var_name in the op
Get the input name and index of the var_name in the op
[ "Get", "the", "input", "name", "and", "index", "of", "the", "var_name", "in", "the", "op" ]
def _get_input_name_index(op, input_var_name): """Get the input name and index of the var_name in the op""" assert isinstance(op, (IrNode, Operator)), \ "The input op should be IrNode or Operator." op_name = op.name() if isinstance(op, IrNode) \ else op.type if op_name not in _op_real_in_out_name: return None res = None for argname in _op_real_in_out_name[op_name][0]: var_names = op.input(argname) for index, name in enumerate(var_names): if name == input_var_name: res = (argname, index) return res
[ "def", "_get_input_name_index", "(", "op", ",", "input_var_name", ")", ":", "assert", "isinstance", "(", "op", ",", "(", "IrNode", ",", "Operator", ")", ")", ",", "\"The input op should be IrNode or Operator.\"", "op_name", "=", "op", ".", "name", "(", ")", "i...
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/contrib/slim/quantization/quantization_pass.py#L175-L190
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/tools/gyp/pylib/gyp/__init__.py
python
NameValueListToDict
(name_value_list)
return result
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is.
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is.
[ "Takes", "an", "array", "of", "strings", "of", "the", "form", "NAME", "=", "VALUE", "and", "creates", "a", "dictionary", "of", "the", "pairs", ".", "If", "a", "string", "is", "simply", "NAME", "then", "the", "value", "in", "the", "dictionary", "is", "s...
def NameValueListToDict(name_value_list): """ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is. """ result = {} for item in name_value_list: tokens = item.split("=", 1) if len(tokens) == 2: # If we can make it an int, use that, otherwise, use the string. try: token_value = int(tokens[1]) except ValueError: token_value = tokens[1] # Set the variable to the supplied value. result[tokens[0]] = token_value else: # No value supplied, treat it as a boolean and set it. result[tokens[0]] = True return result
[ "def", "NameValueListToDict", "(", "name_value_list", ")", ":", "result", "=", "{", "}", "for", "item", "in", "name_value_list", ":", "tokens", "=", "item", ".", "split", "(", "\"=\"", ",", "1", ")", "if", "len", "(", "tokens", ")", "==", "2", ":", "...
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/gyp/pylib/gyp/__init__.py#L156-L176
facebookincubator/BOLT
88c70afe9d388ad430cc150cc158641701397f70
lldb/examples/python/file_extract.py
python
FileExtract.get_n_sint32
(self, n, fail_value=0)
Extract "n" int32_t integers from the binary file at the current file position, returns a list of integers
Extract "n" int32_t integers from the binary file at the current file position, returns a list of integers
[ "Extract", "n", "int32_t", "integers", "from", "the", "binary", "file", "at", "the", "current", "file", "position", "returns", "a", "list", "of", "integers" ]
def get_n_sint32(self, n, fail_value=0): '''Extract "n" int32_t integers from the binary file at the current file position, returns a list of integers''' s = self.read_size(4 * n) if s: return struct.unpack(self.byte_order + ("%u" % n) + 'i', s) else: return (fail_value,) * n
[ "def", "get_n_sint32", "(", "self", ",", "n", ",", "fail_value", "=", "0", ")", ":", "s", "=", "self", ".", "read_size", "(", "4", "*", "n", ")", "if", "s", ":", "return", "struct", ".", "unpack", "(", "self", ".", "byte_order", "+", "(", "\"%u\"...
https://github.com/facebookincubator/BOLT/blob/88c70afe9d388ad430cc150cc158641701397f70/lldb/examples/python/file_extract.py#L196-L202
gem5/gem5
141cc37c2d4b93959d4c249b8f7e6a8b2ef75338
util/gerrit-bot/gerrit.py
python
GerritRestAPI.list_reviewers
(self, change_id)
return self._get(f"/changes/{change_id}/reviewers")
list reviewers of a change
list reviewers of a change
[ "list", "reviewers", "of", "a", "change" ]
def list_reviewers(self, change_id): """ list reviewers of a change """ return self._get(f"/changes/{change_id}/reviewers")
[ "def", "list_reviewers", "(", "self", ",", "change_id", ")", ":", "return", "self", ".", "_get", "(", "f\"/changes/{change_id}/reviewers\"", ")" ]
https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/util/gerrit-bot/gerrit.py#L112-L114
luliyucoordinate/Leetcode
96afcdc54807d1d184e881a075d1dbf3371e31fb
src/0957-Prison-Cells-After-N-Days/0957.py
python
Solution.prisonAfterNDays
(self, cells, N)
return cells
:type cells: List[int] :type N: int :rtype: List[int]
:type cells: List[int] :type N: int :rtype: List[int]
[ ":", "type", "cells", ":", "List", "[", "int", "]", ":", "type", "N", ":", "int", ":", "rtype", ":", "List", "[", "int", "]" ]
def prisonAfterNDays(self, cells, N): """ :type cells: List[int] :type N: int :rtype: List[int] """ N = N % 14 if not N: N = 14 for _ in range(N): for i in range(1, len(cells) - 1): if cells[i-1] & 1 == cells[i+1] & 1: cells[i] = 2 if not cells[i] & 1 else 3 for i in range(len(cells)): cells[i] >>= 1 return cells
[ "def", "prisonAfterNDays", "(", "self", ",", "cells", ",", "N", ")", ":", "N", "=", "N", "%", "14", "if", "not", "N", ":", "N", "=", "14", "for", "_", "in", "range", "(", "N", ")", ":", "for", "i", "in", "range", "(", "1", ",", "len", "(", ...
https://github.com/luliyucoordinate/Leetcode/blob/96afcdc54807d1d184e881a075d1dbf3371e31fb/src/0957-Prison-Cells-After-N-Days/0957.py#L2-L20
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Rect.Intersects
(*args, **kwargs)
return _core_.Rect_Intersects(*args, **kwargs)
Intersects(self, Rect rect) -> bool Returns True if the rectangles have a non empty intersection.
Intersects(self, Rect rect) -> bool
[ "Intersects", "(", "self", "Rect", "rect", ")", "-", ">", "bool" ]
def Intersects(*args, **kwargs): """ Intersects(self, Rect rect) -> bool Returns True if the rectangles have a non empty intersection. """ return _core_.Rect_Intersects(*args, **kwargs)
[ "def", "Intersects", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Rect_Intersects", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L1533-L1539
echronos/echronos
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
prj/app/lib/util/crc16.py
python
Crc16Ccitt.add
(self, byte)
Add a new byte to the CRC engine. 'byte' should be a python character. E.g: c.add('x')
Add a new byte to the CRC engine. 'byte' should be a python character. E.g: c.add('x')
[ "Add", "a", "new", "byte", "to", "the", "CRC", "engine", ".", "byte", "should", "be", "a", "python", "character", ".", "E", ".", "g", ":", "c", ".", "add", "(", "x", ")" ]
def add(self, byte): """Add a new byte to the CRC engine. 'byte' should be a python character. E.g: c.add('x') """ # CRC-16 polynomial poly_s = byte ^ (self.state >> 8) poly_t = poly_s ^ (poly_s >> 4) result = s16l(self.state, 8) ^ poly_t ^ s16l(poly_t, 5) ^ s16l(poly_t, 12) self.state = result
[ "def", "add", "(", "self", ",", "byte", ")", ":", "# CRC-16 polynomial", "poly_s", "=", "byte", "^", "(", "self", ".", "state", ">>", "8", ")", "poly_t", "=", "poly_s", "^", "(", "poly_s", ">>", "4", ")", "result", "=", "s16l", "(", "self", ".", ...
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/prj/app/lib/util/crc16.py#L43-L52
baidu/bigflow
449245016c0df7d1252e85581e588bfc60cefad3
bigflow_python/python/bigflow/ptable.py
python
PTable.flatten
(self, **option)
return transforms.flatten(self, **option)
对于每个Key和Value中的每个元素(value 1, value 2, ... value m),构造(Key, value 1), (Key, value 2), ... (Key, value m),结果使用PCollection表示 Returns: PCollection: 表示结果的PCollection
对于每个Key和Value中的每个元素(value 1, value 2, ... value m),构造(Key, value 1), (Key, value 2), ... (Key, value m),结果使用PCollection表示
[ "对于每个Key和Value中的每个元素", "(", "value", "1", "value", "2", "...", "value", "m", ")", ",构造", "(", "Key", "value", "1", ")", "(", "Key", "value", "2", ")", "...", "(", "Key", "value", "m", ")", ",结果使用PCollection表示" ]
def flatten(self, **option): """ 对于每个Key和Value中的每个元素(value 1, value 2, ... value m),构造(Key, value 1), (Key, value 2), ... (Key, value m),结果使用PCollection表示 Returns: PCollection: 表示结果的PCollection """ return transforms.flatten(self, **option)
[ "def", "flatten", "(", "self", ",", "*", "*", "option", ")", ":", "return", "transforms", ".", "flatten", "(", "self", ",", "*", "*", "option", ")" ]
https://github.com/baidu/bigflow/blob/449245016c0df7d1252e85581e588bfc60cefad3/bigflow_python/python/bigflow/ptable.py#L254-L261
google-ar/WebARonTango
e86965d2cbc652156b480e0fcf77c716745578cd
chromium/src/gpu/command_buffer/build_gles2_cmd_buffer.py
python
GETnHandler.WriteServiceImplementation
(self, func, f)
Overrriden from TypeHandler.
Overrriden from TypeHandler.
[ "Overrriden", "from", "TypeHandler", "." ]
def WriteServiceImplementation(self, func, f): """Overrriden from TypeHandler.""" self.WriteServiceHandlerFunctionHeader(func, f) last_arg = func.GetLastOriginalArg() # All except shm_id and shm_offset. all_but_last_args = func.GetCmdArgs()[:-2] for arg in all_but_last_args: arg.WriteGetCode(f) code = """ typedef cmds::%(func_name)s::Result Result; GLsizei num_values = 0; if (!GetNumValuesReturnedForGLGet(pname, &num_values)) { LOCAL_SET_GL_ERROR_INVALID_ENUM(":%(func_name)s", pname, "pname"); return error::kNoError; } Result* result = GetSharedMemoryAs<Result*>( c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset, Result::ComputeSize(num_values)); %(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : NULL; """ f.write(code % { 'last_arg_type': last_arg.type, 'last_arg_name': last_arg.name, 'func_name': func.name, }) func.WriteHandlerValidation(f) code = """ // Check that the client initialized the result. if (result->size != 0) { return error::kInvalidArguments; } """ shadowed = func.GetInfo('shadowed') if not shadowed: f.write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name) f.write(code) func.WriteHandlerImplementation(f) if shadowed: code = """ result->SetNumResults(num_values); return error::kNoError; } """ else: code = """ GLenum error = LOCAL_PEEK_GL_ERROR("%(func_name)s"); if (error == GL_NO_ERROR) { result->SetNumResults(num_values); } return error::kNoError; } """ f.write(code % {'func_name': func.name})
[ "def", "WriteServiceImplementation", "(", "self", ",", "func", ",", "f", ")", ":", "self", ".", "WriteServiceHandlerFunctionHeader", "(", "func", ",", "f", ")", "last_arg", "=", "func", ".", "GetLastOriginalArg", "(", ")", "# All except shm_id and shm_offset.", "a...
https://github.com/google-ar/WebARonTango/blob/e86965d2cbc652156b480e0fcf77c716745578cd/chromium/src/gpu/command_buffer/build_gles2_cmd_buffer.py#L6649-L6699
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/abstract_datastore_input_reader.py
python
AbstractDatastoreInputReader._get_query_spec
(cls, params)
return model.QuerySpec( entity_kind=cls._get_raw_entity_kind(entity_kind), keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)), filters=filters, batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)), model_class_path=entity_kind, app=app, ns=ns)
Construct a model.QuerySpec from model.MapperSpec.
Construct a model.QuerySpec from model.MapperSpec.
[ "Construct", "a", "model", ".", "QuerySpec", "from", "model", ".", "MapperSpec", "." ]
def _get_query_spec(cls, params): """Construct a model.QuerySpec from model.MapperSpec.""" entity_kind = params[cls.ENTITY_KIND_PARAM] filters = params.get(cls.FILTERS_PARAM) app = params.get(cls._APP_PARAM) ns = params.get(cls.NAMESPACE_PARAM) return model.QuerySpec( entity_kind=cls._get_raw_entity_kind(entity_kind), keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)), filters=filters, batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)), model_class_path=entity_kind, app=app, ns=ns)
[ "def", "_get_query_spec", "(", "cls", ",", "params", ")", ":", "entity_kind", "=", "params", "[", "cls", ".", "ENTITY_KIND_PARAM", "]", "filters", "=", "params", ".", "get", "(", "cls", ".", "FILTERS_PARAM", ")", "app", "=", "params", ".", "get", "(", ...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/abstract_datastore_input_reader.py#L73-L87
nasa/astrobee
9241e67e6692810d6e275abb3165b6d02f4ca5ef
localization/sparse_mapping/tools/build_theia_map.py
python
which
(program)
return None
Find if a program is in the PATH
Find if a program is in the PATH
[ "Find", "if", "a", "program", "is", "in", "the", "PATH" ]
def which(program): """Find if a program is in the PATH""" def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None
[ "def", "which", "(", "program", ")", ":", "def", "is_exe", "(", "fpath", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "os", ".", "access", "(", "fpath", ",", "os", ".", "X_OK", ")", "fpath", ",", "fname", "=", ...
https://github.com/nasa/astrobee/blob/9241e67e6692810d6e275abb3165b6d02f4ca5ef/localization/sparse_mapping/tools/build_theia_map.py#L35-L52
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/tools/cpplint.py
python
ReverseCloseExpression
(clean_lines, linenum, pos)
return (line, 0, -1)
If input points to ) or } or ] or >, finds the position that opens it. If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the linenum/pos that correspond to the opening of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *at* the opening brace, or (line, 0, -1) if we never find the matching opening brace. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ) or } or ] or >, finds the position that opens it.
[ "If", "input", "points", "to", ")", "or", "}", "or", "]", "or", ">", "finds", "the", "position", "that", "opens", "it", "." ]
def ReverseCloseExpression(clean_lines, linenum, pos): """If input points to ) or } or ] or >, finds the position that opens it. If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the linenum/pos that correspond to the opening of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *at* the opening brace, or (line, 0, -1) if we never find the matching opening brace. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] if line[pos] not in ')}]>': return (line, 0, -1) # Check last line (start_pos, stack) = FindStartOfExpressionInLine(line, pos, []) if start_pos > -1: return (line, linenum, start_pos) # Continue scanning backward while stack and linenum > 0: linenum -= 1 line = clean_lines.elided[linenum] (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack) if start_pos > -1: return (line, linenum, start_pos) # Did not find start of expression before beginning of file, give up return (line, 0, -1)
[ "def", "ReverseCloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "line", "[", "pos", "]", "not", "in", "')}]>'", ":", "return", "(", "line", ",", "0", ",", ...
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/cpplint.py#L2208-L2243
tangzhenyu/Scene-Text-Understanding
0f7ffc7aea5971a50cdc03d33d0a41075285948b
ctpn_crnn_ocr/CTPN/caffe/scripts/cpp_lint.py
python
CheckForHeaderGuard
(filename, lines, error)
Checks that the file contains a header guard. Logs an error if no #ifndef header guard is present. For other headers, checks that the full pathname is used. Args: filename: The name of the C++ header file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found.
Checks that the file contains a header guard.
[ "Checks", "that", "the", "file", "contains", "a", "header", "guard", "." ]
def CheckForHeaderGuard(filename, lines, error): """Checks that the file contains a header guard. Logs an error if no #ifndef header guard is present. For other headers, checks that the full pathname is used. Args: filename: The name of the C++ header file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found. """ cppvar = GetHeaderGuardCPPVariable(filename) ifndef = None ifndef_linenum = 0 define = None endif = None endif_linenum = 0 for linenum, line in enumerate(lines): linesplit = line.split() if len(linesplit) >= 2: # find the first occurrence of #ifndef and #define, save arg if not ifndef and linesplit[0] == '#ifndef': # set ifndef to the header guard presented on the #ifndef line. ifndef = linesplit[1] ifndef_linenum = linenum if not define and linesplit[0] == '#define': define = linesplit[1] # find the last occurrence of #endif, save entire line if line.startswith('#endif'): endif = line endif_linenum = linenum if not ifndef: error(filename, 0, 'build/header_guard', 5, 'No #ifndef header guard found, suggested CPP variable is: %s' % cppvar) return if not define: error(filename, 0, 'build/header_guard', 5, 'No #define header guard found, suggested CPP variable is: %s' % cppvar) return # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__ # for backward compatibility. if ifndef != cppvar: error_level = 0 if ifndef != cppvar + '_': error_level = 5 ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum, error) error(filename, ifndef_linenum, 'build/header_guard', error_level, '#ifndef header guard has wrong style, please use: %s' % cppvar) if define != ifndef: error(filename, 0, 'build/header_guard', 5, '#ifndef and #define don\'t match, suggested CPP variable is: %s' % cppvar) return if endif != ('#endif // %s' % cppvar): error_level = 0 if endif != ('#endif // %s' % (cppvar + '_')): error_level = 5 ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum, error) error(filename, endif_linenum, 'build/header_guard', error_level, '#endif line should be "#endif // %s"' % cppvar)
[ "def", "CheckForHeaderGuard", "(", "filename", ",", "lines", ",", "error", ")", ":", "cppvar", "=", "GetHeaderGuardCPPVariable", "(", "filename", ")", "ifndef", "=", "None", "ifndef_linenum", "=", "0", "define", "=", "None", "endif", "=", "None", "endif_linenu...
https://github.com/tangzhenyu/Scene-Text-Understanding/blob/0f7ffc7aea5971a50cdc03d33d0a41075285948b/ctpn_crnn_ocr/CTPN/caffe/scripts/cpp_lint.py#L1408-L1480
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
lldb/third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py
python
PtyProcess.readline
(self)
return s
Read one line from the pseudoterminal, and return it as unicode. Can block if there is nothing to read. Raises :exc:`EOFError` if the terminal was closed.
Read one line from the pseudoterminal, and return it as unicode.
[ "Read", "one", "line", "from", "the", "pseudoterminal", "and", "return", "it", "as", "unicode", "." ]
def readline(self): """Read one line from the pseudoterminal, and return it as unicode. Can block if there is nothing to read. Raises :exc:`EOFError` if the terminal was closed. """ try: s = self.fileobj.readline() except (OSError, IOError) as err: if err.args[0] == errno.EIO: # Linux-style EOF self.flag_eof = True raise EOFError('End Of File (EOF). Exception style platform.') raise if s == b'': # BSD-style EOF (also appears to work on recent Solaris (OpenIndiana)) self.flag_eof = True raise EOFError('End Of File (EOF). Empty string style platform.') return s
[ "def", "readline", "(", "self", ")", ":", "try", ":", "s", "=", "self", ".", "fileobj", ".", "readline", "(", ")", "except", "(", "OSError", ",", "IOError", ")", "as", "err", ":", "if", "err", ".", "args", "[", "0", "]", "==", "errno", ".", "EI...
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/lldb/third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py#L530-L549
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/eager/tape.py
python
record_operation
(op_type, output_tensors, input_tensors, backward_function)
Records the operation on all tapes in the stack.
Records the operation on all tapes in the stack.
[ "Records", "the", "operation", "on", "all", "tapes", "in", "the", "stack", "." ]
def record_operation(op_type, output_tensors, input_tensors, backward_function): """Records the operation on all tapes in the stack.""" for t in _tape_stack.stack: t.record_operation(op_type, output_tensors, input_tensors, backward_function)
[ "def", "record_operation", "(", "op_type", ",", "output_tensors", ",", "input_tensors", ",", "backward_function", ")", ":", "for", "t", "in", "_tape_stack", ".", "stack", ":", "t", ".", "record_operation", "(", "op_type", ",", "output_tensors", ",", "input_tenso...
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/eager/tape.py#L177-L182
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/VBox/ValidationKit/common/utils.py
python
formatIntervalHours
(cHours)
return sRet[:-1]
Format a hours interval into a nice 1w 2d 1h string.
Format a hours interval into a nice 1w 2d 1h string.
[ "Format", "a", "hours", "interval", "into", "a", "nice", "1w", "2d", "1h", "string", "." ]
def formatIntervalHours(cHours): """ Format a hours interval into a nice 1w 2d 1h string. """ # Simple special cases. if cHours < 24: return '%sh' % (cHours,); # Generic and a bit slower. cWeeks = cHours / (7 * 24); cHours %= 7 * 24; cDays = cHours / 24; cHours %= 24; sRet = ''; if cWeeks > 0: sRet = '%sw ' % (cWeeks,); if cDays > 0: sRet = '%sd ' % (cDays,); if cHours > 0: sRet += '%sh ' % (cHours,); assert sRet; assert sRet[-1] == ' '; return sRet[:-1];
[ "def", "formatIntervalHours", "(", "cHours", ")", ":", "# Simple special cases.", "if", "cHours", "<", "24", ":", "return", "'%sh'", "%", "(", "cHours", ",", ")", "# Generic and a bit slower.", "cWeeks", "=", "cHours", "/", "(", "7", "*", "24", ")", "cHours"...
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/ValidationKit/common/utils.py#L1452-L1471
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/minimum-increment-to-make-array-unique.py
python
Solution.minIncrementForUnique
(self, A)
return result
:type A: List[int] :rtype: int
:type A: List[int] :rtype: int
[ ":", "type", "A", ":", "List", "[", "int", "]", ":", "rtype", ":", "int" ]
def minIncrementForUnique(self, A): """ :type A: List[int] :rtype: int """ A.sort() A.append(float("inf")) result, duplicate = 0, 0 for i in xrange(1, len(A)): if A[i-1] == A[i]: duplicate += 1 result -= A[i] else: move = min(duplicate, A[i]-A[i-1]-1) duplicate -= move result += move*A[i-1] + move*(move+1)//2 return result
[ "def", "minIncrementForUnique", "(", "self", ",", "A", ")", ":", "A", ".", "sort", "(", ")", "A", ".", "append", "(", "float", "(", "\"inf\"", ")", ")", "result", ",", "duplicate", "=", "0", ",", "0", "for", "i", "in", "xrange", "(", "1", ",", ...
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/minimum-increment-to-make-array-unique.py#L5-L21
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
syzygy/scripts/benchmark/chrome_control.py
python
GetPreload
()
Reads Chrome.dll preload settings from the registry. Returns: The percentage of chrome.dll that will be preloaded.
Reads Chrome.dll preload settings from the registry.
[ "Reads", "Chrome", ".", "dll", "preload", "settings", "from", "the", "registry", "." ]
def GetPreload(): """Reads Chrome.dll preload settings from the registry. Returns: The percentage of chrome.dll that will be preloaded. """ try: with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, _CHROME_FRAME_KEY) as key: percentage = _GetDWORDValue(key, _PREREAD_PERCENTAGE_VALUE) if percentage is None: percentage = 0 if _GetDWORDValue(key, _PREREAD_VALUE) == 0 else 100 return percentage except exceptions.WindowsError, ex: # We expect specific errors on non-present key or values. if ex.errno is not winerror.ERROR_FILE_NOT_FOUND: raise else: return 100
[ "def", "GetPreload", "(", ")", ":", "try", ":", "with", "_winreg", ".", "OpenKey", "(", "_winreg", ".", "HKEY_CURRENT_USER", ",", "_CHROME_FRAME_KEY", ")", "as", "key", ":", "percentage", "=", "_GetDWORDValue", "(", "key", ",", "_PREREAD_PERCENTAGE_VALUE", ")"...
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/syzygy/scripts/benchmark/chrome_control.py#L169-L186
forkineye/ESPixelStick
22926f1c0d1131f1369fc7cad405689a095ae3cb
dist/bin/pyserial/examples/port_publisher.py
python
Forwarder.close
(self)
Close all resources and unpublish service
Close all resources and unpublish service
[ "Close", "all", "resources", "and", "unpublish", "service" ]
def close(self): """Close all resources and unpublish service""" if self.log is not None: self.log.info("{}: closing...".format(self.device)) self.alive = False self.unpublish() if self.server_socket: self.server_socket.close() if self.socket: self.handle_disconnect() self.serial.close() if self.on_close is not None: # ensure it is only called once callback = self.on_close self.on_close = None callback(self)
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "log", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"{}: closing...\"", ".", "format", "(", "self", ".", "device", ")", ")", "self", ".", "alive", "=", "False", "self", ...
https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/pyserial/examples/port_publisher.py#L165-L180
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
ConfigBase_Create
(*args)
return _misc_.ConfigBase_Create(*args)
ConfigBase_Create() -> ConfigBase Create and return a new global config object. This function will create the "best" implementation of wx.Config available for the current platform.
ConfigBase_Create() -> ConfigBase
[ "ConfigBase_Create", "()", "-", ">", "ConfigBase" ]
def ConfigBase_Create(*args): """ ConfigBase_Create() -> ConfigBase Create and return a new global config object. This function will create the "best" implementation of wx.Config available for the current platform. """ return _misc_.ConfigBase_Create(*args)
[ "def", "ConfigBase_Create", "(", "*", "args", ")", ":", "return", "_misc_", ".", "ConfigBase_Create", "(", "*", "args", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L3467-L3475
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/aui/auibar.py
python
AuiToolBarItem.SetSizerItem
(self, s)
Associates a sizer item to this toolbar item. :param `s`: an instance of :class:`SizerItem`.
Associates a sizer item to this toolbar item.
[ "Associates", "a", "sizer", "item", "to", "this", "toolbar", "item", "." ]
def SetSizerItem(self, s): """ Associates a sizer item to this toolbar item. :param `s`: an instance of :class:`SizerItem`. """ self.sizer_item = s
[ "def", "SetSizerItem", "(", "self", ",", "s", ")", ":", "self", ".", "sizer_item", "=", "s" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibar.py#L434-L441
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/ultimatelistctrl.py
python
UltimateListCtrl.GetClassDefaultAttributes
(self, variant)
return attr
Returns the default font and colours which are used by the control. This is useful if you want to use the same font or colour in your own control as in a standard control -- which is a much better idea than hard coding specific colours or fonts which might look completely out of place on the users system, especially if it uses themes. This static method is "overridden'' in many derived classes and so calling, for example, :meth:`Button.GetClassDefaultAttributes` () will typically return the values appropriate for a button which will be normally different from those returned by, say, :meth:`ListCtrl.GetClassDefaultAttributes` (). :note: The :class:`VisualAttributes` structure has at least the fields `font`, `colFg` and `colBg`. All of them may be invalid if it was not possible to determine the default control appearance or, especially for the background colour, if the field doesn't make sense as is the case for `colBg` for the controls with themed background. :note: Overridden from :class:`PyControl`.
Returns the default font and colours which are used by the control. This is useful if you want to use the same font or colour in your own control as in a standard control -- which is a much better idea than hard coding specific colours or fonts which might look completely out of place on the users system, especially if it uses themes.
[ "Returns", "the", "default", "font", "and", "colours", "which", "are", "used", "by", "the", "control", ".", "This", "is", "useful", "if", "you", "want", "to", "use", "the", "same", "font", "or", "colour", "in", "your", "own", "control", "as", "in", "a"...
def GetClassDefaultAttributes(self, variant): """ Returns the default font and colours which are used by the control. This is useful if you want to use the same font or colour in your own control as in a standard control -- which is a much better idea than hard coding specific colours or fonts which might look completely out of place on the users system, especially if it uses themes. This static method is "overridden'' in many derived classes and so calling, for example, :meth:`Button.GetClassDefaultAttributes` () will typically return the values appropriate for a button which will be normally different from those returned by, say, :meth:`ListCtrl.GetClassDefaultAttributes` (). :note: The :class:`VisualAttributes` structure has at least the fields `font`, `colFg` and `colBg`. All of them may be invalid if it was not possible to determine the default control appearance or, especially for the background colour, if the field doesn't make sense as is the case for `colBg` for the controls with themed background. :note: Overridden from :class:`PyControl`. """ attr = wx.VisualAttributes() attr.colFg = wx.SystemSettings.GetColour(wx.SYS_COLOUR_LISTBOXTEXT) attr.colBg = wx.SystemSettings.GetColour(wx.SYS_COLOUR_LISTBOX) attr.font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT) return attr
[ "def", "GetClassDefaultAttributes", "(", "self", ",", "variant", ")", ":", "attr", "=", "wx", ".", "VisualAttributes", "(", ")", "attr", ".", "colFg", "=", "wx", ".", "SystemSettings", ".", "GetColour", "(", "wx", ".", "SYS_COLOUR_LISTBOXTEXT", ")", "attr", ...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ultimatelistctrl.py#L12419-L12445
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/protobuf/python/stubout.py
python
StubOutForTesting.Set
(self, parent, child_name, new_child)
Replace child_name's old definition with new_child, in the context of the given parent. The parent could be a module when the child is a function at module scope. Or the parent could be a class when a class' method is being replaced. The named child is set to new_child, while the prior definition is saved away for later, when UnsetAll() is called. This method supports the case where child_name is a staticmethod or a classmethod of parent.
Replace child_name's old definition with new_child, in the context of the given parent. The parent could be a module when the child is a function at module scope. Or the parent could be a class when a class' method is being replaced. The named child is set to new_child, while the prior definition is saved away for later, when UnsetAll() is called.
[ "Replace", "child_name", "s", "old", "definition", "with", "new_child", "in", "the", "context", "of", "the", "given", "parent", ".", "The", "parent", "could", "be", "a", "module", "when", "the", "child", "is", "a", "function", "at", "module", "scope", ".",...
def Set(self, parent, child_name, new_child): """Replace child_name's old definition with new_child, in the context of the given parent. The parent could be a module when the child is a function at module scope. Or the parent could be a class when a class' method is being replaced. The named child is set to new_child, while the prior definition is saved away for later, when UnsetAll() is called. This method supports the case where child_name is a staticmethod or a classmethod of parent. """ old_child = getattr(parent, child_name) old_attribute = parent.__dict__.get(child_name) if old_attribute is not None and isinstance(old_attribute, staticmethod): old_child = staticmethod(old_child) self.cache.append((parent, old_child, child_name)) setattr(parent, child_name, new_child)
[ "def", "Set", "(", "self", ",", "parent", ",", "child_name", ",", "new_child", ")", ":", "old_child", "=", "getattr", "(", "parent", ",", "child_name", ")", "old_attribute", "=", "parent", ".", "__dict__", ".", "get", "(", "child_name", ")", "if", "old_a...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/stubout.py#L109-L126
eldar/deepcut-cnn
928bf2f224fce132f6e4404b4c95fb017297a5e0
python/caffe/pycaffe.py
python
_Net_forward_all
(self, blobs=None, **kwargs)
return all_outs
Run net forward in batches. Parameters ---------- blobs : list of blobs to extract as in forward() kwargs : Keys are input blob names and values are blob ndarrays. Refer to forward(). Returns ------- all_outs : {blob name: list of blobs} dict.
Run net forward in batches.
[ "Run", "net", "forward", "in", "batches", "." ]
def _Net_forward_all(self, blobs=None, **kwargs): """ Run net forward in batches. Parameters ---------- blobs : list of blobs to extract as in forward() kwargs : Keys are input blob names and values are blob ndarrays. Refer to forward(). Returns ------- all_outs : {blob name: list of blobs} dict. """ # Collect outputs from batches all_outs = {out: [] for out in set(self.outputs + (blobs or []))} for batch in self._batch(kwargs): outs = self.forward(blobs=blobs, **batch) for out, out_blob in outs.iteritems(): all_outs[out].extend(out_blob.copy()) # Package in ndarray. for out in all_outs: all_outs[out] = np.asarray(all_outs[out]) # Discard padding. pad = len(all_outs.itervalues().next()) - len(kwargs.itervalues().next()) if pad: for out in all_outs: all_outs[out] = all_outs[out][:-pad] return all_outs
[ "def", "_Net_forward_all", "(", "self", ",", "blobs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Collect outputs from batches", "all_outs", "=", "{", "out", ":", "[", "]", "for", "out", "in", "set", "(", "self", ".", "outputs", "+", "(", "blobs...
https://github.com/eldar/deepcut-cnn/blob/928bf2f224fce132f6e4404b4c95fb017297a5e0/python/caffe/pycaffe.py#L159-L187
wy1iu/LargeMargin_Softmax_Loss
c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec
python/caffe/pycaffe.py
python
_Net_backward
(self, diffs=None, start=None, end=None, **kwargs)
return {out: self.blobs[out].diff for out in outputs}
Backward pass: prepare diffs and run the net backward. Parameters ---------- diffs : list of diffs to return in addition to bottom diffs. kwargs : Keys are output blob names and values are diff ndarrays. If None, top diffs are taken from forward loss. start : optional name of layer at which to begin the backward pass end : optional name of layer at which to finish the backward pass (inclusive) Returns ------- outs: {blob name: diff ndarray} dict.
Backward pass: prepare diffs and run the net backward.
[ "Backward", "pass", ":", "prepare", "diffs", "and", "run", "the", "net", "backward", "." ]
def _Net_backward(self, diffs=None, start=None, end=None, **kwargs): """ Backward pass: prepare diffs and run the net backward. Parameters ---------- diffs : list of diffs to return in addition to bottom diffs. kwargs : Keys are output blob names and values are diff ndarrays. If None, top diffs are taken from forward loss. start : optional name of layer at which to begin the backward pass end : optional name of layer at which to finish the backward pass (inclusive) Returns ------- outs: {blob name: diff ndarray} dict. """ if diffs is None: diffs = [] if start is not None: start_ind = list(self._layer_names).index(start) else: start_ind = len(self.layers) - 1 if end is not None: end_ind = list(self._layer_names).index(end) outputs = set([end] + diffs) else: end_ind = 0 outputs = set(self.inputs + diffs) if kwargs: if set(kwargs.keys()) != set(self.outputs): raise Exception('Top diff arguments do not match net outputs.') # Set top diffs according to defined shapes and make arrays single and # C-contiguous as Caffe expects. for top, diff in six.iteritems(kwargs): if diff.shape[0] != self.blobs[top].shape[0]: raise Exception('Diff is not batch sized') self.blobs[top].diff[...] = diff self._backward(start_ind, end_ind) # Unpack diffs to extract return {out: self.blobs[out].diff for out in outputs}
[ "def", "_Net_backward", "(", "self", ",", "diffs", "=", "None", ",", "start", "=", "None", ",", "end", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "diffs", "is", "None", ":", "diffs", "=", "[", "]", "if", "start", "is", "not", "None", ...
https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/python/caffe/pycaffe.py#L127-L172
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/boto3/resources/collection.py
python
ResourceCollection.all
(self)
return self._clone()
Get all items from the collection, optionally with a custom page size and item count limit. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.all(): ... print(queue.url) 'https://url1' 'https://url2' # Convert to list >>> queues = list(sqs.queues.all()) >>> len(queues) 2
Get all items from the collection, optionally with a custom page size and item count limit.
[ "Get", "all", "items", "from", "the", "collection", "optionally", "with", "a", "custom", "page", "size", "and", "item", "count", "limit", "." ]
def all(self): """ Get all items from the collection, optionally with a custom page size and item count limit. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.all(): ... print(queue.url) 'https://url1' 'https://url2' # Convert to list >>> queues = list(sqs.queues.all()) >>> len(queues) 2 """ return self._clone()
[ "def", "all", "(", "self", ")", ":", "return", "self", ".", "_clone", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/boto3/resources/collection.py#L183-L202
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/logging/__init__.py
python
Handler.flush
(self)
Ensure all logging output has been flushed. This version does nothing and is intended to be implemented by subclasses.
Ensure all logging output has been flushed.
[ "Ensure", "all", "logging", "output", "has", "been", "flushed", "." ]
def flush(self): """ Ensure all logging output has been flushed. This version does nothing and is intended to be implemented by subclasses. """ pass
[ "def", "flush", "(", "self", ")", ":", "pass" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/logging/__init__.py#L760-L767
shogun-toolbox/shogun
9b8d856971af5a295dd6ad70623ae45647a6334c
examples/meta/generator/parse.py
python
FastParser.p_elementAccess
(self, p)
elementAccess : identifier LSQUARE indexList RSQUARE
elementAccess : identifier LSQUARE indexList RSQUARE
[ "elementAccess", ":", "identifier", "LSQUARE", "indexList", "RSQUARE" ]
def p_elementAccess(self, p): "elementAccess : identifier LSQUARE indexList RSQUARE" p[0] = {"ElementAccess": [p[1], {"IndexList": p[3]}]}
[ "def", "p_elementAccess", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "{", "\"ElementAccess\"", ":", "[", "p", "[", "1", "]", ",", "{", "\"IndexList\"", ":", "p", "[", "3", "]", "}", "]", "}" ]
https://github.com/shogun-toolbox/shogun/blob/9b8d856971af5a295dd6ad70623ae45647a6334c/examples/meta/generator/parse.py#L245-L248
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/pexpect/pexpect.py
python
spawn.isatty
(self)
return os.isatty(self.child_fd)
This returns True if the file descriptor is open and connected to a tty(-like) device, else False.
This returns True if the file descriptor is open and connected to a tty(-like) device, else False.
[ "This", "returns", "True", "if", "the", "file", "descriptor", "is", "open", "and", "connected", "to", "a", "tty", "(", "-", "like", ")", "device", "else", "False", "." ]
def isatty(self): """This returns True if the file descriptor is open and connected to a tty(-like) device, else False. """ return os.isatty(self.child_fd)
[ "def", "isatty", "(", "self", ")", ":", "return", "os", ".", "isatty", "(", "self", ".", "child_fd", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/pexpect/pexpect.py#L722-L727
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/qcdb/molecule.py
python
Molecule.__init__
(self, molinit=None, dtype=None, geom=None, elea=None, elez=None, elem=None, mass=None, real=None, elbl=None, name=None, units='Angstrom', input_units_to_au=None, fix_com=None, fix_orientation=None, fix_symmetry=None, fragment_separators=None, fragment_charges=None, fragment_multiplicities=None, molecular_charge=None, molecular_multiplicity=None, comment=None, provenance=None, connectivity=None, enable_qm=True, enable_efp=True, missing_enabled_return_qm='none', missing_enabled_return_efp='none', missing_enabled_return='error', tooclose=0.1, zero_ghost_fragments=False, nonphysical=False, mtol=1.e-3, verbose=1)
Initialize Molecule object from LibmintsMolecule
Initialize Molecule object from LibmintsMolecule
[ "Initialize", "Molecule", "object", "from", "LibmintsMolecule" ]
def __init__(self, molinit=None, dtype=None, geom=None, elea=None, elez=None, elem=None, mass=None, real=None, elbl=None, name=None, units='Angstrom', input_units_to_au=None, fix_com=None, fix_orientation=None, fix_symmetry=None, fragment_separators=None, fragment_charges=None, fragment_multiplicities=None, molecular_charge=None, molecular_multiplicity=None, comment=None, provenance=None, connectivity=None, enable_qm=True, enable_efp=True, missing_enabled_return_qm='none', missing_enabled_return_efp='none', missing_enabled_return='error', tooclose=0.1, zero_ghost_fragments=False, nonphysical=False, mtol=1.e-3, verbose=1): """Initialize Molecule object from LibmintsMolecule""" super(Molecule, self).__init__() if molinit is not None or geom is not None: if isinstance(molinit, dict): molrec = molinit elif isinstance(molinit, str): compound_molrec = qcel.molparse.from_string( molstr=molinit, dtype=dtype, name=name, fix_com=fix_com, fix_orientation=fix_orientation, fix_symmetry=fix_symmetry, return_processed=False, enable_qm=enable_qm, enable_efp=enable_efp, missing_enabled_return_qm=missing_enabled_return_qm, missing_enabled_return_efp=missing_enabled_return_efp, verbose=verbose) molrec = compound_molrec['qm'] elif molinit is None and geom is not None: molrec = qcel.molparse.from_arrays( geom=geom, elea=elea, elez=elez, elem=elem, mass=mass, real=real, elbl=elbl, name=name, units=units, input_units_to_au=input_units_to_au, fix_com=fix_com, fix_orientation=fix_orientation, fix_symmetry=fix_symmetry, fragment_separators=fragment_separators, fragment_charges=fragment_charges, fragment_multiplicities=fragment_multiplicities, molecular_charge=molecular_charge, molecular_multiplicity=molecular_multiplicity, comment=comment, provenance=provenance, connectivity=connectivity, domain='qm', missing_enabled_return=missing_enabled_return, tooclose=tooclose, zero_ghost_fragments=zero_ghost_fragments, nonphysical=nonphysical, mtol=mtol, verbose=verbose) # ok, got the molrec dictionary; now build the thing self._internal_from_dict(molrec, verbose=verbose) # The comment line self.tagline = ""
[ "def", "__init__", "(", "self", ",", "molinit", "=", "None", ",", "dtype", "=", "None", ",", "geom", "=", "None", ",", "elea", "=", "None", ",", "elez", "=", "None", ",", "elem", "=", "None", ",", "mass", "=", "None", ",", "real", "=", "None", ...
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/qcdb/molecule.py#L57-L149
apache/arrow
af33dd1157eb8d7d9bfac25ebf61445b793b7943
python/pyarrow/filesystem.py
python
FileSystem.read_parquet
(self, path, columns=None, metadata=None, schema=None, use_threads=True, use_pandas_metadata=False)
return dataset.read(columns=columns, use_threads=use_threads, use_pandas_metadata=use_pandas_metadata)
Read Parquet data from path in file system. Can read from a single file or a directory of files. Parameters ---------- path : str Single file path or directory columns : List[str], optional Subset of columns to read. metadata : pyarrow.parquet.FileMetaData Known metadata to validate files against. schema : pyarrow.parquet.Schema Known schema to validate files against. Alternative to metadata argument. use_threads : bool, default True Perform multi-threaded column reads. use_pandas_metadata : bool, default False If True and file has custom pandas schema metadata, ensure that index columns are also loaded. Returns ------- table : pyarrow.Table
Read Parquet data from path in file system. Can read from a single file or a directory of files.
[ "Read", "Parquet", "data", "from", "path", "in", "file", "system", ".", "Can", "read", "from", "a", "single", "file", "or", "a", "directory", "of", "files", "." ]
def read_parquet(self, path, columns=None, metadata=None, schema=None, use_threads=True, use_pandas_metadata=False): """ Read Parquet data from path in file system. Can read from a single file or a directory of files. Parameters ---------- path : str Single file path or directory columns : List[str], optional Subset of columns to read. metadata : pyarrow.parquet.FileMetaData Known metadata to validate files against. schema : pyarrow.parquet.Schema Known schema to validate files against. Alternative to metadata argument. use_threads : bool, default True Perform multi-threaded column reads. use_pandas_metadata : bool, default False If True and file has custom pandas schema metadata, ensure that index columns are also loaded. Returns ------- table : pyarrow.Table """ from pyarrow.parquet import ParquetDataset dataset = ParquetDataset(path, schema=schema, metadata=metadata, filesystem=self) return dataset.read(columns=columns, use_threads=use_threads, use_pandas_metadata=use_pandas_metadata)
[ "def", "read_parquet", "(", "self", ",", "path", ",", "columns", "=", "None", ",", "metadata", "=", "None", ",", "schema", "=", "None", ",", "use_threads", "=", "True", ",", "use_pandas_metadata", "=", "False", ")", ":", "from", "pyarrow", ".", "parquet"...
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/python/pyarrow/filesystem.py#L198-L229
microsoft/ivy
9f3c7ecc0b2383129fdd0953e10890d98d09a82d
ivy/ivy_parser.py
python
p_action_call_callatom
(p)
action : CALL callatom
action : CALL callatom
[ "action", ":", "CALL", "callatom" ]
def p_action_call_callatom(p): 'action : CALL callatom' p[0] = CallAction(p[2]) p[0].lineno = get_lineno(p,1)
[ "def", "p_action_call_callatom", "(", "p", ")", ":", "p", "[", "0", "]", "=", "CallAction", "(", "p", "[", "2", "]", ")", "p", "[", "0", "]", ".", "lineno", "=", "get_lineno", "(", "p", ",", "1", ")" ]
https://github.com/microsoft/ivy/blob/9f3c7ecc0b2383129fdd0953e10890d98d09a82d/ivy/ivy_parser.py#L2329-L2332
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/v8/third_party/jinja2/lexer.py
python
get_lexer
(environment)
return lexer
Return a lexer which is probably cached.
Return a lexer which is probably cached.
[ "Return", "a", "lexer", "which", "is", "probably", "cached", "." ]
def get_lexer(environment): """Return a lexer which is probably cached.""" key = (environment.block_start_string, environment.block_end_string, environment.variable_start_string, environment.variable_end_string, environment.comment_start_string, environment.comment_end_string, environment.line_statement_prefix, environment.line_comment_prefix, environment.trim_blocks, environment.lstrip_blocks, environment.newline_sequence, environment.keep_trailing_newline) lexer = _lexer_cache.get(key) if lexer is None: lexer = Lexer(environment) _lexer_cache[key] = lexer return lexer
[ "def", "get_lexer", "(", "environment", ")", ":", "key", "=", "(", "environment", ".", "block_start_string", ",", "environment", ".", "block_end_string", ",", "environment", ".", "variable_start_string", ",", "environment", ".", "variable_end_string", ",", "environm...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/v8/third_party/jinja2/lexer.py#L391-L409
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/FortranCommon.py
python
DialectAddToEnv
(env, dialect, suffixes, ppsuffixes, support_module = 0)
Add dialect specific construction variables.
Add dialect specific construction variables.
[ "Add", "dialect", "specific", "construction", "variables", "." ]
def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0): """Add dialect specific construction variables.""" ComputeFortranSuffixes(suffixes, ppsuffixes) fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect) for suffix in suffixes + ppsuffixes: SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan) env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes) compaction, compppaction, shcompaction, shcompppaction = \ CreateDialectActions(dialect) static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in suffixes: static_obj.add_action(suffix, compaction) shared_obj.add_action(suffix, shcompaction) static_obj.add_emitter(suffix, FortranEmitter) shared_obj.add_emitter(suffix, ShFortranEmitter) for suffix in ppsuffixes: static_obj.add_action(suffix, compppaction) shared_obj.add_action(suffix, shcompppaction) static_obj.add_emitter(suffix, FortranEmitter) shared_obj.add_emitter(suffix, ShFortranEmitter) if '%sFLAGS' % dialect not in env: env['%sFLAGS' % dialect] = SCons.Util.CLVar('') if 'SH%sFLAGS' % dialect not in env: env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect) # If a tool does not define fortran prefix/suffix for include path, use C ones if 'INC%sPREFIX' % dialect not in env: env['INC%sPREFIX' % dialect] = '$INCPREFIX' if 'INC%sSUFFIX' % dialect not in env: env['INC%sSUFFIX' % dialect] = '$INCSUFFIX' env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect) if support_module == 1: env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect) else: env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect) env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
[ "def", "DialectAddToEnv", "(", "env", ",", "dialect", ",", "suffixes", ",", "ppsuffixes", ",", "support_module", "=", "0", ")", ":", "ComputeFortranSuffixes", "(", "suffixes", ",", "ppsuffixes", ")", "fscan", "=", "SCons", ".", "Scanner", ".", "Fortran", "."...
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/FortranCommon.py#L109-L161
svn2github/webrtc
0e4615a75ed555ec866cd5543bfea586f3385ceb
webrtc/tools/barcode_tools/barcode_decoder.py
python
_read_barcode_from_text_file
(barcode_file_name)
return barcode
Reads the decoded barcode for a .txt file. Args: barcode_file_name(string): The name of the .txt file. Return: (string): The decoded barcode.
Reads the decoded barcode for a .txt file.
[ "Reads", "the", "decoded", "barcode", "for", "a", ".", "txt", "file", "." ]
def _read_barcode_from_text_file(barcode_file_name): """Reads the decoded barcode for a .txt file. Args: barcode_file_name(string): The name of the .txt file. Return: (string): The decoded barcode. """ barcode_file = open(barcode_file_name, 'r') barcode = barcode_file.read() barcode_file.close() return barcode
[ "def", "_read_barcode_from_text_file", "(", "barcode_file_name", ")", ":", "barcode_file", "=", "open", "(", "barcode_file_name", ",", "'r'", ")", "barcode", "=", "barcode_file", ".", "read", "(", ")", "barcode_file", ".", "close", "(", ")", "return", "barcode" ...
https://github.com/svn2github/webrtc/blob/0e4615a75ed555ec866cd5543bfea586f3385ceb/webrtc/tools/barcode_tools/barcode_decoder.py#L155-L166
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/print-immutable-linked-list-in-reverse.py
python
Solution3.printLinkedListInReverse
(self, head)
:type head: ImmutableListNode :rtype: None
:type head: ImmutableListNode :rtype: None
[ ":", "type", "head", ":", "ImmutableListNode", ":", "rtype", ":", "None" ]
def printLinkedListInReverse(self, head): """ :type head: ImmutableListNode :rtype: None """ tail = None while head != tail: curr = head while curr.getNext() != tail: curr = curr.getNext() curr.printValue() tail = curr
[ "def", "printLinkedListInReverse", "(", "self", ",", "head", ")", ":", "tail", "=", "None", "while", "head", "!=", "tail", ":", "curr", "=", "head", "while", "curr", ".", "getNext", "(", ")", "!=", "tail", ":", "curr", "=", "curr", ".", "getNext", "(...
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/print-immutable-linked-list-in-reverse.py#L59-L70
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py3/pkg_resources/__init__.py
python
_initialize
(g=globals())
Set up global resource manager (deliberately not state-saved)
Set up global resource manager (deliberately not state-saved)
[ "Set", "up", "global", "resource", "manager", "(", "deliberately", "not", "state", "-", "saved", ")" ]
def _initialize(g=globals()): "Set up global resource manager (deliberately not state-saved)" manager = ResourceManager() g['_manager'] = manager g.update( (name, getattr(manager, name)) for name in dir(manager) if not name.startswith('_') )
[ "def", "_initialize", "(", "g", "=", "globals", "(", ")", ")", ":", "manager", "=", "ResourceManager", "(", ")", "g", "[", "'_manager'", "]", "=", "manager", "g", ".", "update", "(", "(", "name", ",", "getattr", "(", "manager", ",", "name", ")", ")...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/pkg_resources/__init__.py#L3330-L3338
chromiumembedded/cef
80caf947f3fe2210e5344713c5281d8af9bdc295
tools/cef_parser.py
python
obj_function.get_capi_parts
(self, defined_structs=[], prefix=None)
return {'retval': retval, 'name': name, 'args': args}
Return the parts of the C API function definition.
Return the parts of the C API function definition.
[ "Return", "the", "parts", "of", "the", "C", "API", "function", "definition", "." ]
def get_capi_parts(self, defined_structs=[], prefix=None): """ Return the parts of the C API function definition. """ retval = '' dict = self.retval.get_type().get_capi(defined_structs) if dict['format'] == 'single': retval = dict['value'] name = self.get_capi_name(prefix) args = [] if isinstance(self, obj_function_virtual): # virtual functions get themselves as the first argument str = 'struct _' + self.parent.get_capi_name() + '* self' if isinstance(self, obj_function_virtual) and self.is_const(): # const virtual functions get const self pointers str = 'const ' + str args.append(str) if len(self.arguments) > 0: for cls in self.arguments: type = cls.get_type() dict = type.get_capi(defined_structs) if dict['format'] == 'single': args.append(dict['value']) elif dict['format'] == 'multi-arg': # add an additional argument for the size of the array type_name = type.get_name() if type.is_const(): # for const arrays pass the size argument by value args.append('size_t ' + type_name + 'Count') else: # for non-const arrays pass the size argument by address args.append('size_t* ' + type_name + 'Count') args.append(dict['value']) return {'retval': retval, 'name': name, 'args': args}
[ "def", "get_capi_parts", "(", "self", ",", "defined_structs", "=", "[", "]", ",", "prefix", "=", "None", ")", ":", "retval", "=", "''", "dict", "=", "self", ".", "retval", ".", "get_type", "(", ")", ".", "get_capi", "(", "defined_structs", ")", "if", ...
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/cef_parser.py#L1208-L1243