code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def segment_text(text, seg_regex=SEG_REGEX): for m in seg_regex.finditer(text): (yield m.group(0))
Return an iterator of segments in the text. Args: text (unicode): string of IPA Unicode text seg_regex (_regex.Pattern): compiled regex defining a segment (base + modifiers) Return: generator: segments in the input text
codesearchnet
def send_peers(self, connection_id): with self._lock: peer_endpoints = list(self._peers.values()) if self._endpoint: peer_endpoints.append(self._endpoint) peers_response = GetPeersResponse(peer_endpoints=peer_endpoints) ...
Sends a message containing our peers to the connection identified by connection_id. Args: connection_id (str): A unique identifier which identifies an connection on the network server socket.
juraj-google-style
def _find_reader_dataset(self, dataset_key, **dfilter): too_many = False for (reader_name, reader_instance) in self.readers.items(): try: ds_id = reader_instance.get_dataset_key(dataset_key, **dfilter) except TooManyResults: LOG.trace('Too many datasets matching key {} in...
Attempt to find a `DatasetID` in the available readers. Args: dataset_key (str, float, DatasetID): Dataset name, wavelength, or a combination of `DatasetID` parameters to use in searching for the dataset from the available readers. **dfilter (list or str): `DatasetID` parameters besides `name` and `wavelength` to use ...
codesearchnet
def NetshStaticIp(interface, ip=u'127.0.0.9', subnet=u'255.255.255.255', gw=u'127.0.0.1'): args = ['/c', 'netsh', 'interface', 'ip', 'set', 'address', interface, 'static', ip, subnet, gw, '1'] res = client_utils_common.Execute('cmd', args, time_limit=(- 1), bypass_whitelist=True) return res
Changes interface to a staticly set IP. Sets IP configs to local if no paramaters passed. Args: interface: Name of the interface. ip: IP address. subnet: Subnet mask. gw: IP address of the default gateway. Returns: A tuple of stdout, stderr, exit_status.
codesearchnet
def _CalculateYLines(self, dists): tot_dist = sum(dists) if tot_dist > 0: pixel_dist = [float(d * (self._gheight-20))/tot_dist for d in dists] pixel_grid = [0]+[int(pd + sum(pixel_dist[0:i])) for i,pd in enumerate(pixel_dist)] else: pixel_grid = [] return ...
Builds a list with y-coordinates for the horizontal lines in the graph. Args: # One integer for each pair of stations # indicating the approximate distance dists: [0,33,140, ... ,X] Returns: # One integer y-coordinate for each station normalized between # 0 and X, where X is the height of the graph in pixels [0, 33, ...
juraj-google-style
def display_arr(screen, arr, video_size, transpose): if transpose: pyg_img = pygame.surfarray.make_surface(arr.swapaxes(0, 1)) else: pyg_img = arr pyg_img = pygame.transform.scale(pyg_img, video_size) screen.blit(pyg_img, (0, 0))
Display an image to the pygame screen. Args: screen (pygame.Surface): the pygame surface to write frames to arr (np.ndarray): numpy array representing a single frame of gameplay video_size (tuple): the size to render the frame as transpose (bool): whether to transpose the frame before displaying Returns: None
juraj-google-style
def _get_run_debug_urls(self): return ['file:
Get the debug_urls value for the current run() call. Returns: debug_urls: (list of str) Debug URLs for the current run() call. Currently, the list consists of only one URL that is a file:// URL.
github-repos
def _deduplicate_indexed_slices(values, indices): unique_indices, new_index_positions = array_ops.unique(indices) summed_values = math_ops.unsorted_segment_sum(values, new_index_positions, array_ops.shape(unique_indices)[0]) return (summed_values, unique_indices)
Sums `values` associated with any non-unique `indices`. Args: values: A `Tensor` with rank >= 1. indices: A one-dimensional integer `Tensor`, indexing into the first dimension of `values` (as in an IndexedSlices object). Returns: A tuple of (`summed_values`, `unique_indices`) where `unique_indices` is a de-duplicated ...
github-repos
def gmeta_pop(gmeta, info=False): if (type(gmeta) is GlobusHTTPResponse): gmeta = json.loads(gmeta.text) elif (type(gmeta) is str): gmeta = json.loads(gmeta) elif (type(gmeta) is not dict): raise TypeError('gmeta must be dict, GlobusHTTPResponse, or JSON string') results = [] ...
Remove GMeta wrapping from a Globus Search result. This function can be called on the raw GlobusHTTPResponse that Search returns, or a string or dictionary representation of it. Arguments: gmeta (dict, str, or GlobusHTTPResponse): The Globus Search result to unwrap. info (bool): If ``False``, will return a list of the...
codesearchnet
def has_chosen(state, correct, msgs): if (not issubclass(type(correct), int)): raise InstructorError('Inside `has_chosen()`, the argument `correct` should be an integer.') student_process = state.student_process if (not isDefinedInProcess(MC_VAR_NAME, student_process)): raise InstructorError...
Test multiple choice exercise. Test for a MultipleChoiceExercise. The correct answer (as an integer) and feedback messages are passed to this function. Args: correct (int): the index of the correct answer (should be an instruction). Starts at 1. msgs (list(str)): a list containing all feedback messages belonging to e...
codesearchnet
def read_data_event(self, whence, complete=False, can_flush=False): return Transition(None, _read_data_handler(whence, self, complete, can_flush))
Creates a transition to a co-routine for retrieving data as bytes. Args: whence (Coroutine): The co-routine to return to after the data is satisfied. complete (Optional[bool]): True if STREAM_END should be emitted if no bytes are read or available; False if INCOMPLETE should be emitted in that case. can_flush (Optiona...
codesearchnet
def plot_residuals(self, plot=None): if plot is None: import matplotlib.pyplot as plot x = numpy.arange(1, len(self.residuals) + 1) y = _gvar.mean(self.residuals) yerr = _gvar.sdev(self.residuals) plot.errorbar(x=x, y=y, yerr=yerr, fmt='o', color='b') ...
Plot normalized fit residuals. The sum of the squares of the residuals equals ``self.chi2``. Individual residuals should be distributed about one, in a Gaussian distribution. Args: plot: :mod:`matplotlib` plotter. If ``None``, uses ``matplotlib.pyplot`. Returns: Plotter ``plot``.
juraj-google-style
def _ParseApplicationPasswordRecord(self, parser_mediator, record): key = record.get('_key_', None) if ((not key) or (not key.startswith(b'ssgp'))): raise errors.ParseError('Unsupported application password record key value does not start with: "ssgp".') event_data = KeychainApplicationRecordEventDa...
Extracts the information from an application password record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. record (dict[str, object]): database record. Raises: ParseError: if Internet password record cannot be parsed.
codesearchnet
def add_physical_qubit(self, physical_qubit): if (not isinstance(physical_qubit, int)): raise CouplingError('Physical qubits should be integers.') if (physical_qubit in self.physical_qubits): raise CouplingError(('The physical qubit %s is already in the coupling graph' % physical_qubit)) sel...
Add a physical qubit to the coupling graph as a node. physical_qubit (int): An integer representing a physical qubit. Raises: CouplingError: if trying to add duplicate qubit
codesearchnet
def get_last_next(self, date): (past, future) = ((None, None), (None, None)) for (mjd, value) in reversed(self.data): if (mjd <= date): past = (mjd, value) break future = (mjd, value) return (past, future)
Provide the last and next leap-second events relative to a date Args: date (float): Date in MJD Return: tuple:
codesearchnet
def get_csv(filename): check_if_this_file_exist(filename) filename = os.path.abspath(filename) s = command_line(['exiftool', '-G', '-csv', '-sort', filename]) if s: s = s.decode('utf-8') return s else: return 0
Return a csv representation of the exif get a filename and returns a unicode string with a CSV format Arguments: filename {string} -- your filename Returns: [unicode] -- unicode string
codesearchnet
def _read_template(template): template = _read_content_or_path(template) file_obj = StringIO.StringIO(template) return ET.parse(file_obj)
Read XSLT template. Args: template (str): Filename or XML string. Don't use ``\\n`` in case of filename. Returns: obj: Required XML parsed with ``lxml.etree``.
juraj-google-style
def _calibrate_ir(radiance, coefs): logger.debug('Calibrating to brightness temperature') n = coefs['n'] bteff = ((C2 * n) / xu.log((1 + ((C1 * (n ** 3)) / radiance.where((radiance > 0)))))) bt = xr.DataArray(((bteff * coefs['b']) + coefs['a'])) return bt.where(xu.logical_and((bt >= coefs['btmin']),...
Convert IR radiance to brightness temperature Reference: [IR] Args: radiance: Radiance [mW m-2 cm-1 sr-1] coefs: Dictionary of calibration coefficients. Keys: n: The channel's central wavenumber [cm-1] a: Offset [K] b: Slope [1] btmin: Minimum brightness temperature threshold [K] btmax: Maximum brightness temperature...
codesearchnet
def seed(self, seed): if seed is None: self.env.seed = round(time.time()) else: self.env.seed = seed return self.env.seed
Sets the random seed of the environment to the given value (current time, if seed=None). Naturally deterministic Environments (e.g. ALE or some gym Envs) don't have to implement this method. Args: seed (int): The seed to use for initializing the pseudo-random number generator (default=epoch time in sec). Returns: The ...
juraj-google-style
def tscore(sample1, sample2): if len(sample1) != len(sample2): raise ValueError("different number of values") error = pooled_sample_variance(sample1, sample2) / len(sample1) diff = statistics.mean(sample1) - statistics.mean(sample2) return diff / math.sqrt(error * 2)
Calculate a t-test score for the difference between two samples. Args: sample1: one sample. sample2: the other sample. Returns: The t-test score, as a float.
juraj-google-style
def unarchive_user(self, user_id): url = self.record_url + "/unarchive" res = requests.patch(url=url, json={"user_id": user_id}, headers=HEADERS, verify=False) self.write_response_html_to_file(res,"bob.html") res.raise_for_status()
Unarchives the user with the specified user ID. Args: user_id: `int`. The ID of the user to unarchive. Returns: `NoneType`: None.
juraj-google-style
def _preserve_bonds(self, sliced_cartesian, use_lookup=None): if (use_lookup is None): use_lookup = settings['defaults']['use_lookup'] included_atoms_set = set(sliced_cartesian.index) assert included_atoms_set.issubset(set(self.index)), 'The sliced Cartesian has to be a subset of the bigger frame' ...
Is called after cutting geometric shapes. If you want to change the rules how bonds are preserved, when applying e.g. :meth:`Cartesian.cut_sphere` this is the function you have to modify. It is recommended to inherit from the Cartesian class to tailor it for your project, instead of modifying the source code of ChemCo...
codesearchnet
def get_value(data, key): ref = data try: for subkey in key.split('.'): if isinstance(ref, dict): ref = ref[subkey] else: print(('CRITICAL: Cannot use subkey %s on non-dictionary element' % subkey)) return None return ref ...
Follow the dot notation to get the proper field, then perform the action Args: data: the data as a dictionary (required to be a dictionary) key: the key (as dot notation) into the data that gives the field (IP.src) Returns: the value of the field(subfield) if it exist, otherwise None
codesearchnet
def add_moving_summary(*args, **kwargs): decay = kwargs.pop('decay', 0.95) coll = kwargs.pop('collection', MOVING_SUMMARY_OPS_KEY) summ_coll = kwargs.pop('summary_collections', None) assert (len(kwargs) == 0), ('Unknown arguments: ' + str(kwargs)) ctx = get_current_tower_context() if ((ctx is no...
Summarize the moving average for scalar tensors. This function is a no-op if not calling from main training tower. Args: args: scalar tensors to summarize decay (float): the decay rate. Defaults to 0.95. collection (str or None): the name of the collection to add EMA-maintaining ops. The default will work together wit...
codesearchnet
async def export_image(self, name: str): response = (await self.docker._query('images/{name}/get'.format(name=name), 'GET')) return response.content
Get a tarball of an image by name or id. Args: name: name/id of the image to be exported Returns: Streamreader of tarball image
codesearchnet
def print_args(output=sys.stdout): def decorator(func): 'The decorator function.\n ' @wraps(func) def _(*args, **kwargs): 'The decorated function.\n ' output.write('Args: {0}, KwArgs: {1}\n'.format(str(args), str(kwargs))) return func(*...
Decorate a function so that print arguments before calling it. Args: output: writable to print args. (Default: sys.stdout)
codesearchnet
def _rmsprop(self, grads, cache=None, decay_rate=0.95): if cache is None: cache = np.zeros_like(grads) cache = decay_rate * cache + (1 - decay_rate) * grads ** 2 step = -grads / np.sqrt(cache + K.epsilon()) return step, cache
Uses RMSProp to compute step from gradients. Args: grads: numpy array of gradients. cache: numpy array of same shape as `grads` as RMSProp cache decay_rate: How fast to decay cache Returns: A tuple of step: numpy array of the same shape as `grads` giving the step. Note that this does not yet take the learning rate in...
juraj-google-style
def open(self, host, port=23): self._telnet_client.open(host, port) config_str = self._telnet_client.cmd("MN?") if config_str.startswith("MN="): config_str = config_str[len("MN="):] self.properties = dict( zip(['model', 'max_freq', 'max_atten'], config_st...
Opens a telnet connection to the desired AttenuatorDevice and queries basic information. Args: host: A valid hostname (IP address or DNS-resolvable name) to an MC-DAT attenuator instrument. port: An optional port number (defaults to telnet default 23)
juraj-google-style
def jacobian_s(nodes, degree, dimension): r num_nodes = (degree * (degree + 1)) result = np.empty((dimension, num_nodes), order="F") index = 0 i = 0 for num_vals in six.moves.xrange(degree, 0, -1): for _ in six.moves.xrange(num_vals): result[:, index] = nodes[:, i + 1] - nod...
r"""Compute :math:`\frac{\partial B}{\partial s}`. .. note:: This is a helper for :func:`_jacobian_both`, which has an equivalent Fortran implementation. Args: nodes (numpy.ndarray): Array of nodes in a surface. degree (int): The degree of the surface. dimension (int): The dimension the surface lives in. Returns: n...
juraj-google-style
def set_label_list(self, label_lists): if isinstance(label_lists, annotations.LabelList): label_lists = [label_lists] for label_list in label_lists: if label_list.idx is None: label_list.idx = 'default' label_list.utterance = self ...
Set the given label-list for this utterance. If the label-list-idx is not set, ``default`` is used. If there is already a label-list with the given idx, it will be overriden. Args: label_list (LabelList, list): A single or multi. label-lists to add.
juraj-google-style
def needle_statistics(infile): alignments = list(AlignIO.parse(infile, 'emboss')) alignment_properties = defaultdict(dict) with open(infile) as f: line = f.readline() for i in range(len(alignments)): while (line.rstrip() != ' line = f.readline() if...
Reads in a needle alignment file and spits out statistics of the alignment. Args: infile (str): Alignment file name Returns: dict: alignment_properties - a dictionary telling you the number of gaps, identity, etc.
codesearchnet
def __init__(self, outer_index, inner_index): if outer_index.batch_dims != inner_index.batch_dims: raise ValueError('outer_index.batch_dims and inner_index.batch_dims must be the same.') super().__init__(indices=inner_index.indices + outer_index.indices * inner_index.num_segments, num_segments=inner_ind...
Combines indices i and j into pairs (i, j). The result is an index where each segment (i, j) is the intersection of segments i and j. For example if the inputs represent table cells indexed by respectively rows and columns the output will be a table indexed by (row, column) pairs, i.e. by cell. The implementation combi...
github-repos
def seq_int_arr(seqs): return np.array([[NT_TO_INT[c] for c in x.upper()] for x in seqs])
Convert list of ACGT strings to matix of 1-4 ints Args: seqs (list of str): nucleotide sequences with only 'ACGT' characters Returns: numpy.array of int: matrix of integers from 1 to 4 inclusive representing A, C, G, and T str: nucleotide sequence string
codesearchnet
def _handle_is_dag_stopped(self, request): return Response(success=True, uid=request.uid, payload={'is_stopped': (request.payload['dag_name'] in self._stop_dags)})
The handler for the dag_stopped request. The dag_stopped request checks whether a dag is flagged to be terminated. Args: request (Request): Reference to a request object containing the incoming request. The payload has to contain the following fields: 'dag_name': the name of the dag that should be checked Returns: R...
codesearchnet
def flags(cls): assert cls.__bases__ == (object,) d = dict(cls.__dict__) new_type = type(cls.__name__, (int,), d) new_type.__module__ = cls.__module__ map_ = {} for key, value in iteritems(d): if key.upper() == key and isinstance(value, integer_types): value_instance ...
A decorator for creating an int flags class. Makes the values a subclass of the type and implements repr/str. The new class will be a subclass of int. Args: cls (type): The class to convert to an flags Returns: type: A new class :: @flags class Foo(object): FOO = 1 BAR = 2
juraj-google-style
def add_graph( self, y, x_label=None, y_label="", title="", x_run=None, y_run=None, svg_size_px=None, key_position="bottom right", ): if x_run is None: x_run = self.default_x_run if y_run is None: ...
Add a new graph to the overlap report. Args: y (str): Value plotted on y-axis. x_label (str): Label on x-axis. y_label (str): Label on y-axis. title (str): Title of the plot. x_run ((float,float)): x-range. y_run ((int,int)): y-rang. svg_size_px ((int,int): Size of SVG image in pixels. key_position (str): GnuPlot posi...
juraj-google-style
def _maybe_partial_apply_variables(fn, args, kwargs): def is_distributed_var(x): flat = nest.flatten(x) return flat and isinstance(flat[0], values.DistributedVariable) var_kwargs = {} nonvar_kwargs = {} if kwargs: var_kwargs = {k: v for k, v in kwargs.items() if is_distributed_v...
Inspects arguments to partially apply any DistributedVariable. This avoids an automatic cast of the current variable value to tensor. Note that a variable may be captured implicitly with Python scope instead of passing it to run(), but supporting run() keeps behavior consistent with MirroredStrategy. Since positiona...
github-repos
def __init__(self, name, description=None): super(ArtifactDefinition, self).__init__() self.conditions = [] self.description = description self.name = name self.labels = [] self.provides = [] self.sources = [] self.supported_os = [] self.urls = []
Initializes an artifact definition. Args: name (str): name that uniquely identifiers the artifact definition. description (Optional[str]): description of the artifact definition.
juraj-google-style
def intersect(self, other): lowest_stop = min(self.stop_hz, other.stop_hz) highest_start = max(self.start_hz, other.start_hz) return FrequencyBand(highest_start, lowest_stop)
Return the intersection between this frequency band and another. Args: other (FrequencyBand): the instance to intersect with Examples:: >>> import zounds >>> b1 = zounds.FrequencyBand(500, 1000) >>> b2 = zounds.FrequencyBand(900, 2000) >>> intersection = b1.intersect(b2) >>> intersection.start_hz, intersection.stop_h...
codesearchnet
def plot_weight_posteriors(names, qm_vals, qs_vals, fname): fig = figure.Figure(figsize=(6, 3)) canvas = backend_agg.FigureCanvasAgg(fig) ax = fig.add_subplot(1, 2, 1) for (n, qm) in zip(names, qm_vals): sns.distplot(qm.flatten(), ax=ax, label=n) ax.set_title('weight means') ax.set_xlim(...
Save a PNG plot with histograms of weight means and stddevs. Args: names: A Python `iterable` of `str` variable names. qm_vals: A Python `iterable`, the same length as `names`, whose elements are Numpy `array`s, of any shape, containing posterior means of weight varibles. qs_vals: A Python `iterable`, the same length ...
codesearchnet
def call(self, input_ids: tf.Tensor | None=None, attention_mask: tf.Tensor | None=None, decoder_input_ids: tf.Tensor | None=None, decoder_attention_mask: tf.Tensor | None=None, decoder_position_ids: tf.Tensor | None=None, head_mask: tf.Tensor | None=None, decoder_head_mask: tf.Tensor | None=None, cross_attn_head_mask: ...
labels (`tf.tensor` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is only computed for the toke...
github-repos
def set_callback(self, property_name, callback): if property_name not in self._config: raise KeyError('%s is not a valid property name.' % property_name) if not callable(callback): raise TypeError('The callback object provided is not callable.') self._set_callbacks[property_name] = callback
Set a set-callback for given property. Args: property_name: Name of the property. callback: The callback as a `callable` of signature: def cbk(config): where config is the config after it is set to the new value. The callback is invoked each time the set() method is called with the matching property_name. Raises: Key...
github-repos
def get_clinvar_id(self, submission_id): submission_obj = self.clinvar_submission_collection.find_one({'_id': ObjectId(submission_id)}) clinvar_subm_id = submission_obj.get('clinvar_subm_id') return clinvar_subm_id
Returns the official Clinvar submission ID for a submission object Args: submission_id(str): submission_id(str) : id of the submission Returns: clinvar_subm_id(str): a string with a format: SUB[0-9]. It is obtained from clinvar portal when starting a new submission
codesearchnet
def optimize(self, sess, batch_index): feed_dict = {self._batch_index: batch_index, self._per_device_batch_size: self._loaded_per_device_batch_size, self._max_seq_len: self._loaded_max_seq_len} for tower in self._towers: feed_dict.update(tower.loss_graph.extra_compute_grad_feed_dict()) fetches = {'t...
Run a single step of SGD. Runs a SGD step over a slice of the preloaded batch with size given by self._loaded_per_device_batch_size and offset given by the batch_index argument. Updates shared model weights based on the averaged per-device gradients. Args: sess: TensorFlow session. batch_index: Offset into the prelo...
codesearchnet
def preprocess_JPEG(self, image, **kwargs): save_kwargs = { 'progressive': VERSATILEIMAGEFIELD_PROGRESSIVE_JPEG, 'quality': QUAL } if image.mode != 'RGB': image = image.convert('RGB') return (image, save_kwargs)
Receive a PIL Image instance of a JPEG and returns 2-tuple. Args: * [0]: Image instance, converted to RGB * [1]: Dict with a quality key (mapped to the value of `QUAL` as defined by the `VERSATILEIMAGEFIELD_JPEG_RESIZE_QUALITY` setting)
juraj-google-style
def waitForEvent(self, event_name, predicate, timeout=DEFAULT_TIMEOUT): deadline = (time.time() + timeout) while (time.time() <= deadline): rpc_timeout = (deadline - time.time()) if (rpc_timeout < 0): break rpc_timeout = min(rpc_timeout, MAX_TIMEOUT) try: ...
Wait for an event of a specific name that satisfies the predicate. This call will block until the expected event has been received or time out. The predicate function defines the condition the event is expected to satisfy. It takes an event and returns True if the condition is satisfied, False otherwise. Note all ev...
codesearchnet
def total_stored(self, wanted, slots=None): if slots is None: slots = self.window.slots wanted = make_slot_check(wanted) return sum(slot.amount for slot in slots if wanted(slot))
Calculates the total number of items of that type in the current window or given slot range. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata)
juraj-google-style
def convert(self, value): if self._type is str: return str(value) elif self._type is int: try: return int(value) except (UnicodeError, ValueError): raise WorkflowArgumentError('Cannot convert {} to int'.format(value)) e...
Convert the specified value to the type of the option. Args: value: The value that should be converted. Returns: The value with the type given by the option.
juraj-google-style
def ReceiveMessages(self, client_id, messages): if data_store.RelationalDBEnabled(): return self.ReceiveMessagesRelationalFlows(client_id, messages) now = time.time() with queue_manager.QueueManager(token=self.token) as manager: for (session_id, msgs) in iteritems(collection.Group(messages, ...
Receives and processes the messages from the source. For each message we update the request object, and place the response in that request's queue. If the request is complete, we send a message to the worker. Args: client_id: The client which sent the messages. messages: A list of GrrMessage RDFValues.
codesearchnet
def generate_host_passthrough(self, vcpu_num): cpu = ET.Element('cpu', mode='host-passthrough') cpu.append(self.generate_topology(vcpu_num)) if vcpu_num > 1: cpu.append(self.generate_numa(vcpu_num)) return cpu
Generate host-passthrough XML cpu node Args: vcpu_num(str): number of virtual CPUs Returns: lxml.etree.Element: CPU XML node
juraj-google-style
def _ReadParserPresetValues(self, preset_definition_values): if not preset_definition_values: raise errors.MalformedPresetError('Missing preset definition values.') name = preset_definition_values.get('name', None) if not name: raise errors.MalformedPresetError( 'Invalid preset d...
Reads a parser preset from a dictionary. Args: preset_definition_values (dict[str, object]): preset definition values. Returns: ParserPreset: a parser preset. Raises: MalformedPresetError: if the format of the preset definition is not set or incorrect, or the preset of a specific operating system has already been se...
juraj-google-style
def __init__(self, sess): _check_type(sess, (session.BaseSession, monitored_session.MonitoredSession)) self.session = sess
Constructor. Args: sess: A tensorflow Session object.
github-repos
def on_raw_update( self=None, group: int = 0 ) -> callable: def decorator(func: callable) -> Tuple[Handler, int]: if isinstance(func, tuple): func = func[0].callback handler = pyrogram.RawUpdateHandler(func) if isinstance(self, ...
Use this decorator to automatically register a function for handling raw updates. This does the same thing as :meth:`add_handler` using the :class:`RawUpdateHandler`. Args: group (``int``, *optional*): The group identifier, defaults to 0.
juraj-google-style
def read_model(input_tflite_file): if not gfile.Exists(input_tflite_file): raise RuntimeError('Input file not found at %r\n' % input_tflite_file) with gfile.GFile(input_tflite_file, 'rb') as input_file_handle: model_bytearray = bytearray(input_file_handle.read()) return read_model_from_bytea...
Reads a tflite model as a python object. Args: input_tflite_file: Full path name to the input tflite file Raises: RuntimeError: If input_tflite_file path is invalid. IOError: If input_tflite_file cannot be opened. Returns: A python object corresponding to the input tflite file.
github-repos
def _GetTypeIndicators(cls, signature_scanner, specification_store, remainder_list, path_spec, resolver_context=None): type_indicator_list = [] file_object = resolver.Resolver.OpenFileObject(path_spec, resolver_context=resolver_context) scan_state = pysigscan.scan_state() try: signature_scanner....
Determines if a file contains a supported format types. Args: signature_scanner (pysigscan.scanner): signature scanner. specification_store (FormatSpecificationStore): specification store. remainder_list (list[AnalyzerHelper]): remaining analyzer helpers that do not have a format specification. path_spec (PathSpec): p...
codesearchnet
def delete_resource_group(access_token, subscription_id, rgname): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourcegroups/', rgname, '?api-version=', RESOURCE_API]) return do_delete(endpoint, access_...
Delete the named resource group. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. rgname (str): Azure resource group name. Returns: HTTP response.
juraj-google-style
def parse_pyc_string(data): return pyc.loads(data)
Parse pyc data from a string. Args: data: pyc data Returns: An instance of pycnite.types.CodeTypeBase.
github-repos
def get_address_coords(self, address): url = ('https: r = requests.get(url) r.raise_for_status() results = r.json()['results'] lat = results[0]['geometry']['location']['lat'] lng = results[0]['geometry']['location']['lng'] return (lat, lng)
Use the google geocoder to get latitude and longitude for an address string Args: address: any address string Returns: A tuple of (lat,lng)
codesearchnet
def __init__(self, latent_size): super(ProbabilisticGrammarVariational, self).__init__() self.latent_size = latent_size self.encoder_net = tf.keras.Sequential([ tf.keras.layers.Conv1D(64, 3, padding="SAME"), tf.keras.layers.BatchNormalization(), tf.keras.layers.Activation(tf.nn....
Constructs a variational posterior for a probabilistic grammar. Args: latent_size: Number of dimensions in the latent code.
juraj-google-style
def resolve(node, source_info, graphs, resolver): visitor = FunctionVisitor(source_info, graphs, resolver) node = visitor.visit(node) return node
Performs type inference. Args: node: ast.AST source_info: transformer.SourceInfo graphs: Dict[ast.FunctionDef, cfg.Graph] resolver: Resolver Returns: ast.AST
github-repos
def complain(distribution_name): try: pkg_resources.get_distribution(distribution_name) warnings.warn( "The {pkg} distribution is now obsolete. " "Please `pip uninstall {pkg}`. " "In the future, this warning will become an ImportError.".format( ...
Issue a warning if `distribution_name` is installed. In a future release, this method will be updated to raise ImportError rather than just send a warning. Args: distribution_name (str): The name of the obsolete distribution.
juraj-google-style
def create_migration_template(name): assert name, 'Name of the migration can not be empty.' from . import migrations package = migrations prefix = package.__name__ + '.' all_versions = [] for importer, modname, ispkg in pkgutil.iter_modules(package.__path__, prefix): vers...
Creates migration file. Returns created file name. Args: name (str): name of the migration. Returns: str: name of the migration file.
juraj-google-style
def ipv4_lstrip_zeros(address): obj = address.strip().split('.') for x, y in enumerate(obj): obj[x] = y.split('/')[0].lstrip('0') if obj[x] in ['', None]: obj[x] = '0' return '.'.join(obj)
The function to strip leading zeros in each octet of an IPv4 address. Args: address (:obj:`str`): An IPv4 address. Returns: str: The modified IPv4 address.
juraj-google-style
def std(x, axis=None, keepdims=False): if x.dtype.base_dtype == dtypes_module.bool: x = math_ops.cast(x, floatx()) return math_ops.reduce_std(x, axis=axis, keepdims=keepdims)
Standard deviation of a tensor, alongside the specified axis. It is an alias to `tf.math.reduce_std`. Args: x: A tensor or variable. It should have numerical dtypes. Boolean type inputs will be converted to float. axis: An integer, the axis to compute the standard deviation. If `None` (the default), reduces all dimen...
github-repos
def contains(self, time: datetime.datetime, inclusive: bool = True) -> bool: if inclusive: return self.start <= time <= self.end else: return self.start < time < self.end
Does the interval contain a momentary time? Args: time: the ``datetime.datetime`` to check inclusive: use inclusive rather than exclusive range checks?
juraj-google-style
def add(self, key, value): if isinstance(value, list): for val in value: self._add_arg_python(key, val) elif isinstance(value, dict): err = 'Dictionary types are not currently supported for field.' print('{}{}{}'.format(c.Style.BR...
Add CLI Arg to lists value. Args: key (string): The CLI Args key (e.g., --name). value (string): The CLI Args value (e.g., bob).
juraj-google-style
def gen_sl_transform_matricies(area_multiple): return [np.array(((i, j), (0, area_multiple / i))) for i in get_factors(area_multiple) for j in range(area_multiple
Generates the transformation matricies that convert a set of 2D vectors into a super lattice of integer area multiple as proven in Cassels: Cassels, John William Scott. An introduction to the geometry of numbers. Springer Science & Business Media, 2012. Args: area_multiple(int): integer multiple of unit cell area for...
juraj-google-style
def read(self, nodes=None, **kwargs): if nodes is None: required_nodes = self.wishlist - set(self.datasets.keys()) nodes = self.dep_tree.leaves(nodes=required_nodes) return self._read_datasets(nodes, **kwargs)
Load datasets from the necessary reader. Args: nodes (iterable): DependencyTree Node objects **kwargs: Keyword arguments to pass to the reader's `load` method. Returns: DatasetDict of loaded datasets
juraj-google-style
def cctop_submit(seq_str): url = 'http: r = requests.post(url) jobid = r.text.split('ID: ')[1] return jobid
Submit a protein sequence string to CCTOP and return the job ID. Args: seq_str (str): Protein sequence as a string Returns: dict: Job ID on the CCTOP server
codesearchnet
def KernelVersion(): rtl_osversioninfoexw = RtlOSVersionInfoExw() try: RtlGetVersion(rtl_osversioninfoexw) except OSError: return 'unknown' return ('%d.%d.%d' % (rtl_osversioninfoexw.dwMajorVersion, rtl_osversioninfoexw.dwMinorVersion, rtl_osversioninfoexw.dwBuildNumber))
Gets the kernel version as string, eg. "5.1.2600". Returns: The kernel version, or "unknown" in the case of failure.
codesearchnet
def __init__(self, resolver_context, file_system, path_spec, is_root=False): location = getattr(path_spec, 'location', None) is_windows_device = False if platform.system() == 'Windows' and location: try: is_windows_device = pysmdev.check_device(location) except I...
Initializes a file entry. Args: resolver_context (Context): resolver context. file_system (FileSystem): file system. path_spec (PathSpec): path specification. is_root (Optional[bool]): True if the file entry is the root file entry of the corresponding file system. Raises: BackEndError: If an OSError comes up it is ca...
juraj-google-style
def IsNTFS(self): tsk_fs_type = self.GetFsType() return (tsk_fs_type in [pytsk3.TSK_FS_TYPE_NTFS, pytsk3.TSK_FS_TYPE_NTFS_DETECT])
Determines if the file system is NTFS. Returns: bool: True if the file system is NTFS.
codesearchnet
def _package_path(package): from os import path confdir = config_dir() return path.join(confdir, '{}.cfg'.format(package))
Returns the full path to the default package configuration file. Args: package (str): name of the python package to return a path for.
codesearchnet
def prepend_block(self, node, reverse=False): if (not isinstance(node, grammar.STATEMENTS)): raise ValueError if reverse: self.to_prepend_block[(- 1)].appendleft(node) else: self.to_prepend_block[(- 1)].append(node)
Prepend a statement to the current block. Args: node: The statement to prepend. reverse: When called multiple times, this flag determines whether the statement should be prepended or appended to the already inserted statements. Raises: ValueError: If the given node is not a statement.
codesearchnet
def __init__(self, swap, expiry_date=None, dtype=None, name=None): self._name = name or 'swaption' with tf.name_scope(self._name): self._dtype = dtype self._expiry_date = dates.convert_to_date_tensor(expiry_date) self._swap = swap
Initialize a batch of European swaptions. Args: swap: An instance of `InterestRateSwap` specifying the interest rate swaps underlying the swaptions. The batch size of the swaptions being created would be the same as the batch size of the `swap`. expiry_date: An optional rank 1 `DateTensor` specifying the expiry dates ...
github-repos
def _music_lib_search(self, search, start, max_items): response = self.contentDirectory.Browse([('ObjectID', search), ('BrowseFlag', 'BrowseDirectChildren'), ('Filter', '*'), ('StartingIndex', start), ('RequestedCount', max_items), ('SortCriteria', '')]) metadata = {} for tag in ['NumberReturned', 'TotalMat...
Perform a music library search and extract search numbers. You can get an overview of all the relevant search prefixes (like 'A:') and their meaning with the request: .. code :: response = device.contentDirectory.Browse([ ('ObjectID', '0'), ('BrowseFlag', 'BrowseDirectChildren'), ('Filter', '*'), ('StartingIndex', 0...
codesearchnet
def get_metric_values(self): group_names = self.properties.get('metric-groups', None) if (not group_names): group_names = self.manager.get_metric_values_group_names() ret = [] for group_name in group_names: try: mo_val = self.manager.get_metric_values(group_name) ...
Get the faked metrics, for all metric groups and all resources that have been prepared on the manager object of this context object. Returns: iterable of tuple (group_name, iterable of values): The faked metrics, in the order they had been added, where: group_name (string): Metric group name. values (:class:~zhmccl...
codesearchnet
def emit_code_from_ir(sql_query_tree, compiler_metadata): context = CompilationContext( query_path_to_selectable=dict(), query_path_to_location_info=sql_query_tree.query_path_to_location_info, query_path_to_output_fields=sql_query_tree.query_path_to_output_fields, query_path_to_...
Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query
juraj-google-style
def get_paginated_catalogs(self, querystring=None): return self._load_data(self.CATALOGS_ENDPOINT, default=[], querystring=querystring, traverse_pagination=False, many=False)
Return a paginated list of course catalogs, including name and ID. Returns: dict: Paginated response containing catalogs available for the user.
codesearchnet
def _find_mapreduce_yaml(start, checked): dir = start while dir not in checked: checked.add(dir) for mr_yaml_name in MR_YAML_NAMES: yaml_path = os.path.join(dir, mr_yaml_name) if os.path.exists(yaml_path): return yaml_path dir = os.path.dirname(dir) return None
Traverse the directory tree identified by start until a directory already in checked is encountered or the path of mapreduce.yaml is found. Checked is present both to make loop termination easy to reason about and so that the same directories do not get rechecked. Args: start: the path to start in and work upward fro...
juraj-google-style
def set_calibration(self, enabled, imus): if (len(imus) == 0): imus = list(range(MAX_IMUS)) for i in imus: if ((i < 0) or (i >= MAX_IMUS)): logger.warn('Invalid IMU index {} in set_calibration'.format(i)) continue self.imus[i]._use_calibration = enabled
Set calibration state for attached IMUs. Args: enabled (bool): True to apply calibration to IMU data (if available). False to output uncalibrated data. imus (list): indicates which IMUs the calibration state should be set on. Empty list or [0, 1, 2, 3, 4] will apply to all IMUs, [0, 1] only to first 2 IMUs, etc.
codesearchnet
def write_byte(self, value): if isinstance(value, bytes): self.stream.write(value) elif isinstance(value, str): self.stream.write(value.encode('utf-8')) elif isinstance(value, int): self.stream.write(bytes([value]))
Write a single byte to the stream. Args: value (bytes, str or int): value to write to the stream.
codesearchnet
def SetPreferredLanguageIdentifier(self, language_identifier): if not isinstance(language_identifier, py2to3.STRING_TYPES): raise ValueError('Language identifier is not a string.') values = language_ids.LANGUAGE_IDENTIFIERS.get( language_identifier.lower(), None) if not values: rai...
Sets the preferred language identifier. Args: language_identifier (str): language identifier string such as "en-US" for US English or "is-IS" for Icelandic. Raises: KeyError: if the language identifier is not defined. ValueError: if the language identifier is not a string type.
juraj-google-style
def combine_with_wd_noise(f_n, amp_n, f_n_wd, amp_n_wd): amp_n_wd_interp = interpolate.interp1d(f_n_wd, amp_n_wd, bounds_error=False, fill_value=1e-30) amp_n_wd = amp_n_wd_interp(f_n) amp_n = amp_n*(amp_n >= amp_n_wd) + amp_n_wd*(amp_n < amp_n_wd) return f_n, amp_n
Combine noise with wd noise. Combines noise and white dwarf background noise based on greater amplitude value at each noise curve step. Args: f_n (float array): Frequencies of noise curve. amp_n (float array): Amplitude values of noise curve. f_n_wd (float array): Frequencies of wd noise. amp_n_wd (float array): Ampl...
juraj-google-style
def open(self, mode=None): if mode is None: mode = self.mode elif mode not in ['r', 'w', 'a']: raise ValueError('Invalid mode! Modes: [\'a\', \'r\', \'w\']') if self._file is None: self._file = h5py.File(self.path, mode=mode)
Open the container file. Args: mode (str): Either 'r' for read-only, 'w' for truncate and write or 'a' for append. (default: 'a'). If ``None``, uses ``self.mode``.
juraj-google-style
def write_gff_file(self, outfile, force_rerun=False): if ssbio.utils.force_rerun(outfile=outfile, flag=force_rerun): with open(outfile, "w") as out_handle: GFF.write([self], out_handle) self.feature_path = outfile
Write a GFF file for the protein features, ``features`` will now load directly from this file. Args: outfile (str): Path to new FASTA file to be written to force_rerun (bool): If an existing file should be overwritten
juraj-google-style
def get_sari_score(source_ids, prediction_ids, list_of_targets, max_gram_size=4, beta_for_deletion=0): addition_scores = [] keep_scores = [] deletion_scores = [] for n in range(1, (max_gram_size + 1)): source_counts = _get_ngram_counter(source_ids, n) prediction_counts = _get_ngram_count...
Compute the SARI score for a single prediction and one or more targets. Args: source_ids: a list / np.array of SentencePiece IDs prediction_ids: a list / np.array of SentencePiece IDs list_of_targets: a list of target ID lists / np.arrays max_gram_size: int. largest n-gram size we care about (e.g. 3 for unigrams, bigr...
codesearchnet
def get_text_features(self, input_ids: Optional[torch.Tensor]=None, attention_mask: Optional[torch.Tensor]=None, token_type_ids: Optional[torch.Tensor]=None, position_ids: Optional[torch.Tensor]=None, output_attentions: Optional[bool]=None, output_hidden_states: Optional[bool]=None, return_dict: Optional[bool]=None) ->...
Returns: text_features (`torch.FloatTensor` of shape `(batch_size, output_dim`): The text embeddings obtained by applying the projection layer to the final [CLS] hidden state of Text-Transformer. Examples: ```python >>> from transformers import AutoTokenizer, ChineseCLIPModel >>> model = ChineseCLIPModel.from_pretra...
github-repos
def add_update_users(self, users, capacity=None): if not isinstance(users, list): raise HDXError('Users should be a list!') for user in users: self.add_update_user(user, capacity)
Add new or update existing users in organization with new metadata. Capacity eg. member, admin must be supplied either within the User object or dictionary or using the capacity argument (which takes precedence). Args: users (List[Union[User,Dict,str]]): A list of either user ids or users metadata from User objects or...
juraj-google-style
def replace_drive_enclosure(self, information): uri = "{}/replaceDriveEnclosure".format(self.data["uri"]) result = self._helper.create(information, uri) self.refresh() return result
When a drive enclosure has been physically replaced, initiate the replacement operation that enables the new drive enclosure to take over as a replacement for the prior drive enclosure. The request requires specification of both the serial numbers of the original drive enclosure and its replacement to be provided. Arg...
juraj-google-style
def load(nifti_filename): nifti_filename = os.path.expanduser(nifti_filename) try: data = nib.load(nifti_filename) img = data.get_data() except Exception as e: raise ValueError("Could not load file {0} for conversion." .format(nifti_filename)) ...
Import a nifti file into a numpy array. TODO: Currently only transfers raw data for compatibility with annotation and ND formats Arguments: nifti_filename (str): A string filename of a nifti datafile Returns: A numpy array with data from the nifti file
juraj-google-style
def confirm(question): if FORCE_YES: return True while True: answer = input((question + ' <Yes|No>')).lower() if ((answer == 'yes') or (answer == 'y')): confirmed = True break if ((answer == 'no') or (answer == 'n')): confirmed = False ...
Ask the user if he really want something to happen. Args: question(str): What can happen Returns: (boolean): Confirmed or not
codesearchnet
def set_size(self, height=220, width=350, height_threshold=120, width_threshold=160): self.set_integer('height', height) self.set_integer('width', width) self.set_integer('small_height_threshold', height_threshold) self.set_integer('small_width_threshold', width_threshold)
Set the size of the chart. Args: height (int): height in pixels. width (int): width in pixels. height_threshold (int): height threshold in pixels width_threshold (int): width threshold in pixesls
codesearchnet
def _lookup_tensor_name(self, tensor): return self._tensor_aliases.get(tensor.name, tensor.name)
Look up the name of a graph tensor. This method maps the name of a debugger-generated Identity or DebugIdentityV2 tensor to the name of the original instrumented tensor, if `tensor` is such a debugger-created tensor. Otherwise, it returns the name of `tensor` as is. Args: tensor: The graph tensor to look up the name ...
github-repos
def get_variantid(variant_obj, family_id): new_id = parse_document_id( chrom=variant_obj['chromosome'], pos=str(variant_obj['position']), ref=variant_obj['reference'], alt=variant_obj['alternative'], variant_type=variant_obj['variant_type'], case_id=family_id, ...
Create a new variant id. Args: variant_obj(dict) family_id(str) Returns: new_id(str): The new variant id
juraj-google-style
def __is_json_error(self, status, headers): content_header = headers.get('content-type', '') (content_type, unused_params) = cgi.parse_header(content_header) return (status.startswith('400') and (content_type.lower() in _ALL_JSON_CONTENT_TYPES))
Determine if response is an error. Args: status: HTTP status code. headers: Dictionary of (lowercase) header name to value. Returns: True if the response was an error, else False.
codesearchnet
def get_size(self, value=None): if value is None: return sum(cls_val.get_size(obj_val) for obj_val, cls_val in self._get_attributes()) elif isinstance(value, type(self)): return value.get_size() else: msg = "{} is not an instanc...
Calculate the total struct size in bytes. For each struct attribute, sum the result of each one's ``get_size()`` method. Args: value: In structs, the user can assign other value instead of a class' instance. Returns: int: Total number of bytes used by the struct. Raises: Exception: If the struct is not valid.
juraj-google-style
def _GetNetworkInfo(self, signatures_key): network_info = {} for category in signatures_key.GetSubkeys(): for signature in category.GetSubkeys(): profile_guid_value = signature.GetValueByName('ProfileGuid') if profile_guid_value: profile_guid = profile_guid_value.GetDataAsOb...
Retrieves the network info within the signatures subkey. Args: signatures_key (dfwinreg.WinRegistryKey): a Windows Registry key. Returns: dict[str, tuple]: a tuple of default_gateway_mac and dns_suffix per profile identifier (GUID).
juraj-google-style
def setup(self, check_all=None, exclude_private=None, exclude_uppercase=None, exclude_capitalized=None, exclude_unsupported=None, excluded_names=None, minmax=None, dataframe_format=None): assert self.shellwidget is not None self.check_all = che...
Setup the namespace browser with provided settings. Args: dataframe_format (string): default floating-point format for DataFrame editor
juraj-google-style
def _verify_static_batch_size_equality(tensors, columns): expected_batch_size = None for i in range(0, len(tensors)): if tensors[i].shape.dims[0].value is not None: if expected_batch_size is None: bath_size_column_index = i expected_batch_size = tensors[i].sha...
Validates that the first dim (batch size) of all tensors are equal or None. Args: tensors: list of tensors to check. columns: list of feature columns matching tensors. Will be used for error messaging. Raises: ValueError: if one of the tensors has a variant batch size
github-repos