code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _viz_prototype(self, vis_fn): def _viz_logger(*args, **kwargs): self.win = vis_fn(*args, win=self.win, env=self.env, opts=self.opts, **kwargs) return _viz_logg...
Outputs a function which will log the arguments to Visdom in an appropriate way. Args: vis_fn: A function, such as self.vis.image
juraj-google-style
def get_golden_chunk_records(): pattern = os.path.join(fsdb.golden_chunk_dir(), '*.zz') return sorted(tf.gfile.Glob(pattern), reverse=True)[:FLAGS.window_size]
Return up to num_records of golden chunks to train on. Returns: A list of golden chunks up to num_records in length, sorted by path.
codesearchnet
def print_debug(*args, **kwargs): if WTF_CONFIG_READER.get("debug", False) == True: print(*args, **kwargs)
Print if and only if the debug flag is set true in the config.yaml file. Args: args : var args of print arguments.
juraj-google-style
def delete_idx_status(self, rdf_class): sparql_template = rdf_types = [rdf_class.uri] + [item.uri for item in rdf_class.subclasses] sparql = sparql_template.format("\n\t\t".join(rdf_types)) log.warn("Deleting index status for %s", rdf_cla...
Removes all of the index status triples from the datastore Args: ----- rdf_class: The class of items to remove the status from
juraj-google-style
def __init__(self, volume, layers=None): if isinstance(volume, string_types): volume = nb.load(volume) self.volume = volume data = self.volume.get_data() self.dims = data.shape self.vox_dims = self.get_header().get_zooms() self.full = np.float64(data....
Initialize a new Masker. Args: volume: A volume indicating the global space within which all subsequent layers must reside. Any voxel in the mask with a non-zero valid is considered valid for analyses. Can be either an image filename or a NiBabel image. layers: Optional masking layers to add; see docstring for add().
juraj-google-style
def lu_slogdet(LU): r LU = (asarray(LU[0], float), asarray(LU[1], float)) adet = _sum(log(_abs(LU[0].diagonal()))) s = prod(sign(LU[0].diagonal())) nrows_exchange = LU[1].size - _sum(LU[1] == arange(LU[1].size, dtype="int32")) odd = nrows_exchange % 2 == 1 if odd: s *= -1.0 r...
r"""Natural logarithm of a LU decomposition. Args: LU (tuple): LU decomposition. Returns: tuple: sign and log-determinant.
juraj-google-style
def reindex(self): _map = dict(zip(self.micro_indices, reindex(self.micro_indices))) partition = tuple((tuple((_map[index] for index in group)) for group in self.partition)) output_indices = tuple((_map[i] for i in self.output_indices)) return Blackbox(partition, output_indices)
Squeeze the indices of this blackboxing to ``0..n``. Returns: Blackbox: a new, reindexed |Blackbox|. Example: >>> partition = ((3,), (2, 4)) >>> output_indices = (2, 3) >>> blackbox = Blackbox(partition, output_indices) >>> blackbox.reindex() Blackbox(partition=((1,), (0, 2)), output_indices=(0, 1))
codesearchnet
def create_volume(self, volume_name: str, driver_spec: str=None): if driver_spec: driver = driver_spec else: driver = 'local' if (not self._manager): raise RuntimeError('Services can only be deleted on swarm manager nodes') self._client.volumes.create(name=volume_name, driver=dri...
Create new docker volumes. Only the manager nodes can create a volume Args: volume_name (string): Name for the new docker volume driver_spec (string): Driver for the docker volume
codesearchnet
def write_double(self, value, little_endian=True): if little_endian: endian = "<" else: endian = ">" return self.pack('%sd' % endian, value)
Pack the value as a double and write 8 bytes to the stream. Args: value (number): the value to write to the stream. little_endian (bool): specify the endianness. (Default) Little endian. Returns: int: the number of bytes written.
juraj-google-style
def get_signature_def_map(saved_model_dir, tag_set): meta_graph = saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set) return meta_graph.signature_def
Gets SignatureDef map from a MetaGraphDef in a SavedModel. Returns the SignatureDef map for the given tag-set in the SavedModel directory. Args: saved_model_dir: Directory containing the SavedModel to inspect or execute. tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in string format, separat...
github-repos
def _geodetic_to_cartesian(cls, lat, lon, alt): C = Earth.r / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2) S = Earth.r * (1 - Earth.e ** 2) / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2) r_d = (C + alt) * np.cos(lat) r_k = (S + alt) * np.sin(lat) norm = np.sqrt(r_d ** 2 + r_k...
Conversion from latitude, longitude and altitude coordinates to cartesian with respect to an ellipsoid Args: lat (float): Latitude in radians lon (float): Longitude in radians alt (float): Altitude to sea level in meters Return: numpy.array: 3D element (in meters)
juraj-google-style
def _eager_metrics_fn(model, outputs, targets, sample_weights=None, masks=None): outputs = nest.flatten(outputs) targets = nest.flatten(targets) metric_results = [] if targets: if len(model._targets) != len(targets): new_targets = [None if t is None else targets.pop(0) for t in model...
Calculates the metrics for each output of the given model. Args: model: The model on which metrics are being calculated. outputs: The outputs of the given model. targets: The predictions or targets of the given model. sample_weights: Optional list of sample weights for each output. masks: Optional list of masks for ea...
github-repos
def Write(self, string): try: encoded_string = codecs.encode(string, self._encoding, self._errors) except UnicodeEncodeError: if self._errors == 'strict': logger.error( 'Unable to properly write output due to encoding error. ' 'Switching to error to...
Writes a string to the output. Args: string (str): output.
juraj-google-style
def get_room_messages(self, room_id, token, direction, limit=10, to=None): query = { "roomId": room_id, "from": token, "dir": direction, "limit": limit, } if to: query["to"] = to return self._send("GET", "/rooms/{}/me...
Perform GET /rooms/{roomId}/messages. Args: room_id (str): The room's id. token (str): The token to start returning events from. direction (str): The direction to return events from. One of: ["b", "f"]. limit (int): The maximum number of events to return. to (str): The token to stop returning events at.
juraj-google-style
def signbit(x): if any_symbolic_tensors((x,)): return Signbit().symbolic_call(x) return backend.numpy.signbit(x)
Return the sign bit of the elements of `x`. The output boolean tensor contains `True` where the sign of `x` is negative, and `False` otherwise. Args: x: Input tensor. Returns: Output boolean tensor of same shape as `x`.
github-repos
def __init__(self, model): self._model_id = None if model is not None: self._model_id = model.id
Create a new base event. Args: model (Model) : a Bokeh model to register event callbacks on
juraj-google-style
def add_exit_callback_to_default_func_graph(fn) -> None: default_graph = get_default_graph() if not default_graph._building_function: raise RuntimeError('Cannot add scope exit callbacks when not building a function. Default graph: {}'.format(default_graph)) default_graph._add_scope_exit_callback(fn...
Add a callback to run when the default function graph goes out of scope. Usage: ```python @tf.function def fn(x, v): expensive = expensive_object(v) add_exit_callback_to_default_func_graph(lambda: expensive.release()) return g(x, expensive) fn(x=tf.constant(...), v=...) # `expensive` has been released. ``` Args: fn...
github-repos
def minimize(self, session=None, feed_dict=None, fetches=None, step_callback=None, loss_callback=None, **run_kwargs): session = (session or ops.get_default_session()) feed_dict = (feed_dict or {}) fetches = (fetches or []) loss_callback = (loss_callback or (lambda *fetches: None)) step_callback = (s...
Minimize a scalar `Tensor`. Variables subject to optimization are updated in-place at the end of optimization. Note that this method does *not* just return a minimization `Op`, unlike `Optimizer.minimize()`; instead it actually performs minimization by executing commands to control a `Session`. Args: session: A `Ses...
codesearchnet
def _extractPayload(response, slaveaddress, mode, functioncode): BYTEPOSITION_FOR_ASCII_HEADER = 0 BYTEPOSITION_FOR_SLAVEADDRESS = 0 BYTEPOSITION_FOR_FUNCTIONCODE = 1 NUMBER_OF_RESPONSE_STARTBYTES = 2 NUMBER_OF_CRC_BYTES = 2 NUMBER_OF_LRC_BYTES = 1 BITNUMBER_FUNCTIONCODE_ERRORINDICATION = 7 ...
Extract the payload data part from the slave's response. Args: * response (str): The raw response byte string from the slave. * slaveaddress (int): The adress of the slave. Used here for error checking only. * mode (str): The modbus protcol mode (MODE_RTU or MODE_ASCII) * functioncode (int): Used here for error checki...
codesearchnet
def list(self, pattern='*'): if self._group_dict is None: self._group_dict = collections.OrderedDict( (group.id, group) for group in self._client.list_groups()) return [group for group in self._group_dict.values() if fnmatch.fnmatch(group.display_name, pattern)]
Returns a list of groups that match the filters. Args: pattern: An optional pattern to filter the groups based on their display name. This can include Unix shell-style wildcards. E.g. ``"Production*"``. Returns: A list of Group objects that match the filters.
juraj-google-style
def _get_rest_doc(self, request, start_response): api = request.body_json['api'] version = request.body_json['version'] generator = discovery_generator.DiscoveryGenerator(request=request) services = [s for s in self._backend.api_services if ((s.api_info.name == api) and (s.api_info.api_version == versio...
Sends back HTTP response with API directory. This calls start_response and returns the response body. It will return the discovery doc for the requested api/version. Args: request: An ApiRequest, the transformed request sent to the Discovery API. start_response: A function with semantics defined in PEP-333. Returns...
codesearchnet
def _data_from_df(df): _df = df.copy() if isinstance(df.columns, pd.MultiIndex): try: _df.columns = ['_'.join(col) for col in _df.columns.values] except TypeError: raise TypeError('Could not flatten MultiIndex columns. ' ...
Create a ``dict`` of columns from a Pandas ``DataFrame``, suitable for creating a ColumnDataSource. Args: df (DataFrame) : data to convert Returns: dict[str, np.array]
juraj-google-style
def cdot(L, out=None): L = asarray(L, float) layout_error = 'Wrong matrix layout.' if (L.ndim != 2): raise ValueError(layout_error) if (L.shape[0] != L.shape[1]): raise ValueError(layout_error) if (out is None): out = empty((L.shape[0], L.shape[1]), float) return einsum('...
r"""Product of a Cholesky matrix with itself transposed. Args: L (array_like): Cholesky matrix. out (:class:`numpy.ndarray`, optional): copy result to. Returns: :class:`numpy.ndarray`: :math:`\mathrm L\mathrm L^\intercal`.
codesearchnet
def _sendMouseEvent(ev, x, y, dwData=0): assert x != None and y != None, 'x and y cannot be set to None' width, height = _size() convertedX = 65536 * x convertedY = 65536 * y ctypes.windll.user32.mouse_event(ev, ctypes.c_long(convertedX)...
The helper function that actually makes the call to the mouse_event() win32 function. Args: ev (int): The win32 code for the mouse event. Use one of the MOUSEEVENTF_* constants for this argument. x (int): The x position of the mouse event. y (int): The y position of the mouse event. dwData (int): The argument for mous...
juraj-google-style
def get_type(name, env, non_generic): if (name in env): if isinstance(env[name], MultiType): return clone(env[name]) return fresh(env[name], non_generic) else: print('W: Undefined symbol {0}'.format(name)) return TypeVariable()
Get the type of identifier name from the type environment env. Args: name: The identifier name env: The type environment mapping from identifier names to types non_generic: A set of non-generic TypeVariables Raises: ParseError: Raised if name is an undefined symbol in the type environment.
codesearchnet
def _stride(stride_spec): if (stride_spec is None): return [1, 1, 1, 1] elif isinstance(stride_spec, tf.compat.integral_types): return [1, stride_spec, stride_spec, 1] elif (len(stride_spec) == 1): return [1, stride_spec[0], stride_spec[0], 1] elif (len(stride_spec) == 2): ...
Expands the stride spec into a length 4 list. Args: stride_spec: If length 0, 1 or 2 then assign the inner dimensions, otherwise return stride_spec if it is length 4. Returns: A length 4 list.
codesearchnet
def parse_sv_frequencies(variant): frequency_keys = ['clingen_cgh_benignAF', 'clingen_cgh_benign', 'clingen_cgh_pathogenicAF', 'clingen_cgh_pathogenic', 'clingen_ngi', 'clingen_ngiAF', 'swegen', 'swegenAF', 'decipherAF', 'decipher'] sv_frequencies = {} for key in frequency_keys: value = variant.INFO...
Parsing of some custom sv frequencies These are very specific at the moment, this will hopefully get better over time when the field of structural variants is more developed. Args: variant(cyvcf2.Variant) Returns: sv_frequencies(dict)
codesearchnet
def prompt(self, message, text_input=False, timeout_s=None, cli_color=''): self.start_prompt(message, text_input, cli_color) return self.wait_for_prompt(timeout_s)
Display a prompt and wait for a response. Args: message: A string to be presented to the user. text_input: A boolean indicating whether the user must respond with text. timeout_s: Seconds to wait before raising a PromptUnansweredError. cli_color: An ANSI color code, or the empty string. Returns: A string response, or...
codesearchnet
def __init__(self, pos_filename, interval=2): if not pos_filename: pos_filename = os.path.join(os.getcwd(), 'mysqlbinlog2blinker.binlog.pos') self.pos_storage_filename = pos_filename assert self.pos_storage_filename self.interv...
Create instance of FileBasedBinlogPosMemory Args: pos_filename (str|None): position storage file. None will makes *mysqlbinlog2blinker.binlog.pos* at current working dir interval (float): the interval in second
juraj-google-style
def shapes_match(a, b): if isinstance(a, (tuple, list)) and isinstance(b, (tuple, list)): if len(a) != len(b): return False return all([shapes_match(ia, ib) for ia, ib in zip(a, b)]) elif isinstance(a, dict) and isinstance(b, dict): if len(a) != len(b): return False match = True f...
Recursively check if shapes of object `a` and `b` match. Will walk lists, tuples and dicts. Args: a: object of type (numpy.ndarray,tf.Tensor,list,tuple,dict) to check for matching shapes against `b`. b: object to check for matching shape against `a`. Returns: A boolean indicating whether the shapes of `a` and `b` ma...
juraj-google-style
def bootstrap_results(self, init_state): with tf.compat.v1.name_scope( name=mcmc_util.make_name(self.name, 'remc', 'bootstrap_results'), values=[init_state]): replica_results = [ self.replica_kernels[i].bootstrap_results(init_state) for i in range(self.num_replica) ...
Returns an object with the same type as returned by `one_step`. Args: init_state: `Tensor` or Python `list` of `Tensor`s representing the initial state(s) of the Markov chain(s). Returns: kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within th...
juraj-google-style
def dec(self, byts): envl = s_msgpack.un(byts) iv = envl.get('iv', b'') asscd = envl.get('asscd', b'') data = envl.get('data', b'') decryptor = AESGCM(self.ekey) try: data = decryptor.decrypt(iv, data, asscd) except Exception: lo...
Decode an envelope dict and decrypt the given bytes. Args: byts (bytes): Bytes to decrypt. Returns: bytes: Decrypted message.
juraj-google-style
def scan_storage(self, area_name, callable, start=0, stop=None): if (area_name == u'storage'): data = self.storage_data elif (area_name == u'streaming'): data = self.streaming_data else: raise ArgumentError(('Unknown area name in scan_storage (%s) should be storage or streaming' % ar...
Iterate over streaming or storage areas, calling callable. Args: area_name (str): Either 'storage' or 'streaming' to indicate which storage area to scan. callable (callable): A function that will be called as (offset, reading) for each reading between start_offset and end_offset (inclusive). If the scan function want...
codesearchnet
def contains(self, x: int, y: int) -> bool: return ( self.x <= x < self.x + self.width and self.y <= y < self.y + self.height )
Returns True if this node contains these coordinates. Args: x (int): X position to check. y (int): Y position to check. Returns: bool: True if this node contains these coordinates. Otherwise False.
juraj-google-style
def set_generation_type(self, num_processors=(- 1), num_splits=1000, verbose=(- 1)): self.parallel_input.num_processors = num_processors self.parallel_input.num_splits = num_splits self.parallel_input.verbose = verbose return
Change generation type. Choose weather to generate the data in parallel or on a single processor. Args: num_processors (int or None, optional): Number of parallel processors to use. If ``num_processors==-1``, this will use multiprocessing module and use available cpus. If single generation is desired, num_processors ...
codesearchnet
def __init__(self, name: Text, num_replicas: int, pivot: ops.Operation): super(TPUReplicateContext, self).__init__() self._num_replicas = num_replicas self._outer_device_function_stack = None self._oc_dev_fn_stack = None self._outside_compilation_cluster = None self._is_map_outside_compilation =...
Builds a new TPUReplicateContext. Args: name: a unique name for the context, used to populate the `_tpu_replicate` attribute. num_replicas: an integer that gives the number of replicas for the computation. pivot: a pivot node. Nodes in the TPUReplicateContext that do not have any inputs will have a control dependency ...
github-repos
def categorical_accuracy(y_true, y_pred): return math_ops.cast(math_ops.equal(math_ops.argmax(y_true, axis=-1), math_ops.argmax(y_pred, axis=-1)), backend.floatx())
Calculates how often predictions match one-hot labels. Standalone usage: >>> y_true = [[0, 0, 1], [0, 1, 0]] >>> y_pred = [[0.1, 0.9, 0.8], [0.05, 0.95, 0]] >>> m = tf.keras.metrics.categorical_accuracy(y_true, y_pred) >>> assert m.shape == (2,) >>> m.numpy() array([0., 1.], dtype=float32) You can provide logits of c...
github-repos
def build_album_art_full_uri(self, url): if (not url.startswith(('http:', 'https:'))): url = ((('http: return url
Ensure an Album Art URI is an absolute URI. Args: url (str): the album art URI. Returns: str: An absolute URI.
codesearchnet
def verify(self, message, signature): message = _helpers._to_bytes(message, encoding='utf-8') try: return rsa.pkcs1.verify(message, signature, self._pubkey) except (ValueError, rsa.pkcs1.VerificationError): return False
Verifies a message against a signature. Args: message: string or bytes, The message to verify. If string, will be encoded to bytes as utf-8. signature: string or bytes, The signature on the message. If string, will be encoded to bytes as utf-8. Returns: True if message was signed by the private key associated with th...
juraj-google-style
def load_supported_categories(categories_path: str): global _load_supported_categories if _load_supported_categories: return with open(categories_path, encoding='utf-8') as supported_categories: yaml_object = yaml.load(supported_categories.read(), Loader=yaml.SafeLoader) Tag.Config.suppo...
Load the list of supported categories from categories_path file into Tag model config Args: categories_path: path to the file with categories.
github-repos
def detect_format(program, attributes) -> str: def fmt(attr): '\n For internal use only.\n ' return ((attr.array_length * attr.dimension), attr.shape) return ' '.join((('%d%s' % fmt(program[a])) for a in attributes))
Detect format for vertex attributes. The format returned does not contain padding. Args: program (Program): The program. attributes (list): A list of attribute names. Returns: str
codesearchnet
def clear_cached_modules(modules: py_utils.StrOrStrList, *, recursive: bool=True, verbose: bool=False, invalidate: bool=True) -> None: modules_to_clear = get_module_names(modules, recursive=recursive) if not modules_to_clear: return modules = set(py_utils.normalize_str_to_list(modules)) for modu...
Clear the `sys.modules` cache. Helpful for interactive development to reload from Jupyter notebook the code we're currently editing (without having to restart the notebook kernel). Usage: ```python ecolab.clear_cached_modules(['visu3d', 'other_module.submodule']) import visu3d import other_module.submodule ``` Arg...
github-repos
def BuildFindSpecs(self, environment_variables=None): path_attributes = {} if environment_variables: for environment_variable in environment_variables: attribute_name = environment_variable.name.lower() attribute_value = environment_variable.value if (not isinstance(a...
Build find specification from a filter file. Args: environment_variables (Optional[list[EnvironmentVariableArtifact]]): environment variables. Returns: list[dfvfs.FindSpec]: find specification.
codesearchnet
def __init__(self, key_dtype, value_dtype): self._key_dtype = dtypes.as_dtype(key_dtype) self._value_dtype = dtypes.as_dtype(value_dtype) super(LookupInterface, self).__init__()
Construct a lookup table interface. Args: key_dtype: The table key type. value_dtype: The table value type.
github-repos
def market(self, accountID, **kwargs): return self.create( accountID, order=MarketOrderRequest(**kwargs) )
Shortcut to create a Market Order in an Account Args: accountID : The ID of the Account kwargs : The arguments to create a MarketOrderRequest Returns: v20.response.Response containing the results from submitting the request
juraj-google-style
def sget_timestamp(self, cycle, step, dataset_number=None): dataset_number = self._validate_dataset_number(dataset_number) if (dataset_number is None): self._report_empty_dataset() return cycle_index_header = self.headers_normal.cycle_index_txt timestamp_header = self.headers_normal.test...
Returns timestamp for cycle, step. Convinience function; same as issuing dfdata[(dfdata[cycle_index_header] == cycle) & (dfdata[step_index_header] == step)][timestamp_header] Args: cycle: cycle number step: step number dataset_number: the dataset number (automatic selection if None) Returns: pandas.Series
codesearchnet
def assert_no_text(self, *args, **kwargs): query = TextQuery(*args, **kwargs) @self.synchronize(wait=query.wait) def assert_no_text(): count = query.resolve_for(self) if (matches_count(count, query.options) and ((count > 0) or expects_none(query.options))): raise ExpectationNotM...
Asserts that the page or current node doesn't have the given text content, ignoring any HTML tags. Args: *args: Variable length argument list for :class:`TextQuery`. **kwargs: Arbitrary keyword arguments for :class:`TextQuery`. Returns: True Raises: ExpectationNotMet: If the assertion hasn't succeeded during the wai...
codesearchnet
def create_ondemand_streaming_locator(access_token, encoded_asset_id, pid, starttime=None): path = '/Locators' endpoint = ''.join([ams_rest_endpoint, path]) if starttime is None: body = '{ \ "AccessPolicyId":"' + pid + '", \ "AssetId":"' + encoded_asset_id + '", \ "Type": "2" \ }' ...
Create Media Service OnDemand Streaming Locator. Args: access_token (str): A valid Azure authentication token. encoded_asset_id (str): A Media Service Encoded Asset ID. pid (str): A Media Service Encoded PID. starttime (str): A Media Service Starttime. Returns: HTTP response. JSON body.
juraj-google-style
def load_all_yamls(cls, directories): yaml_files = [] loaded_yamls = {} for d in directories: if d.startswith('/home') and not os.path.exists(d): os.makedirs(d) for dirname, subdirs, files in os.walk(d): yaml_files.extend(map(lamb...
Loads yaml files from all given directories. Args: directories: list of directories to search Returns: dict of {fullpath: loaded_yaml_structure}
juraj-google-style
async def client_event_handler(self, client_id, event_tuple, user_data): (conn_string, event_name, _event) = event_tuple self._logger.debug('Ignoring event %s from device %s forwarded for client %s', event_name, conn_string, client_id) return None
Method called to actually send an event to a client. Users of this class should override this method to actually forward device events to their clients. It is called with the client_id passed to (or returned from) :meth:`setup_client` as well as the user_data object that was included there. The event tuple is a 3-tu...
codesearchnet
class FlaxSampleOutput(ModelOutput): sequences: Optional[jnp.ndarray] = None
Flax Base class for outputs of decoder-only generation models using sampling. Args: sequences (`jnp.ndarray` of shape `(batch_size, max_length)`): The generated sequences.
github-repos
def riak_multi_get(self, key_list_tuple): pool = PyokoMG() objs = self._client.multiget(key_list_tuple, pool=pool) pool.stop() return objs
Sends given tuples of list to multiget method and took riak objs' keys and data. For each multiget call, separate pools are used and after execution, pools are stopped. Args: key_list_tuple(list of tuples): [('bucket_type','bucket','riak_key')] Example: [('models','personel','McAPchPZzB6RVJ8QI2XSVQk4mUR')] Returns: o...
juraj-google-style
def heightmap_add_hill( hm: np.ndarray, x: float, y: float, radius: float, height: float ) -> None: lib.TCOD_heightmap_add_hill(_heightmap_cdata(hm), x, y, radius, height)
Add a hill (a half spheroid) at given position. If height == radius or -radius, the hill is a half-sphere. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. x (float): The x position at the center of the new hill. y (float): The y position at the center of the new hill. radius (float): The ...
juraj-google-style
def __init__(self, name, freevars, extra_locals): self._name = name self._freevars = freevars self._extra_locals = extra_locals self._unbound_factory = None self.module = None self.source_map = None
Creates a new factory for a Python function. Args: name: The function name. freevars: The list of non-global free variables for the function. extra_locals: Dict[Text, Any], names and values for custom variables that are accessible to the generated code as local variables.
github-repos
def get_cached_or_new(url, new=False): garbage_collection() old_req = DATABASE.get(url) if old_req and not new: return old_req if not (url.startswith("http: raise ValueError("Invalid URL `%s`!" % url) req = RequestInfo(url=url) DATABASE[url] = req return req
Look into the database and return :class:`RequestInfo` if the `url` was already analyzed, or create and return new instance, if not. If the `new` is set to True, always create new instance. Args: url (str): URL of the analyzed resource. new (bool, default False): Force new instance? Returns: obj: :class:`RequestInfo...
juraj-google-style
def fill_rect(self, rect): check_int_err(lib.SDL_RenderFillRect(self._ptr, rect._ptr))
Fill a rectangle on the current rendering target with the drawing color. Args: rect (Rect): The destination rectangle, or None to fill the entire rendering target. Raises: SDLError: If an error is encountered.
codesearchnet
def EnsureAstName(ast, module_name, fix=False): raw_ast = ast.ast if fix and module_name != raw_ast.name: ast = ast.Replace(class_type_nodes=None) ast = ast.Replace(ast=raw_ast.Visit(visitors.RenameModuleVisitor(raw_ast.name, module_name))) else: assert module_name == raw_ast.name ...
Verify that serializable_ast has the name module_name, or repair it. Args: ast: An instance of SerializableAst. module_name: The name under which ast.ast should be loaded. fix: If this function should repair the wrong name. Returns: The updated SerializableAst.
github-repos
def _show_tag_sets(saved_model_dir): tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir) print('The given SavedModel contains the following tag-sets:') for tag_set in sorted(tag_sets): print('%r' % ', '.join(sorted(tag_set)))
Prints the tag-sets stored in SavedModel directory. Prints all the tag-sets for MetaGraphs stored in SavedModel directory. Args: saved_model_dir: Directory containing the SavedModel to inspect.
github-repos
def transmute_sites(self, old_site_label, new_site_label, n_sites_to_change): selected_sites = self.select_sites(old_site_label) for site in random.sample(selected_sites, n_sites_to_change): site.label = new_site_label self.site_labels = set([site.label for site in self.sites])
Selects a random subset of sites with a specific label and gives them a different label. Args: old_site_label (String or List(String)): Site label(s) of the sites to be modified.. new_site_label (String): Site label to be applied to the modified sites. n_sites_to_change (Int): Number of...
codesearchnet
def output_types(self): return nest.map_structure(lambda component_spec: component_spec._to_legacy_output_types(), self._element_spec)
Returns the type of each component of an element of this iterator. Returns: A (nested) structure of `tf.DType` objects corresponding to each component of an element of this dataset.
github-repos
def _CheckFormatTokenSubtypes(self, llines, list_of_expected): actual = [] for lline in llines: filtered_values = [(ft.value, ft.subtypes) for ft in lline.tokens if ft.name not in pytree_utils.NONSEMANTIC_TOKENS] if filtered_values: actual.append(filtered_values) self.assertEqual...
Check that the tokens in the LogicalLines have the expected subtypes. Args: llines: list of LogicalLine. list_of_expected: list of (name, subtype) pairs. Non-semantic tokens are filtered out from the expected values.
github-repos
def _dynamic_range_quantize(src_saved_model_path: str, dst_saved_model_path: str, quantization_options: _QuantizationOptions) -> autotrackable.AutoTrackable: mode_str = 'dynamic-range quantization' if _is_qat_saved_model(src_saved_model_path): raise ValueError('The models trained with quantization-aware...
Quantizes the given SavedModel via post-training dynamic range quantization. Args: src_saved_model_path: Path to the saved model. dst_saved_model_path: The path to save the output SavedModel. The directory will be overwritten if not empty. quantization_options: QuantizationOptions proto describing quantization related...
github-repos
def _attempt_shard_retry(self, shard_state, tstate): shard_attempts = shard_state.retries + 1 if shard_attempts >= parameters.config.SHARD_MAX_ATTEMPTS: logging.warning( "Shard attempt %s exceeded %s max attempts.", shard_attempts, parameters.config.SHARD_MAX_ATTEMPTS) retu...
Whether to retry shard. This method may modify shard_state and tstate to prepare for retry or fail. Args: shard_state: model.ShardState for current shard. tstate: model.TransientShardState for current shard. Returns: A _TASK_DIRECTIVE enum. RETRY_SHARD if shard should be retried. FAIL_TASK otherwise.
juraj-google-style
def on_predict_batch_begin(self, batch, logs=None):
Called at the beginning of a batch in `predict` methods. Subclasses should override for any actions to run. Note that if the `steps_per_execution` argument to `compile` in `tf.keras.Model` is set to `N`, this method will only be called every `N` batches. Args: batch: Integer, index of batch within the current epoch....
github-repos
def __init__(self, path_elements: List[Union[str, int]], parent: Optional['Key']=None, project: Optional[str]=None, namespace: Optional[str]=None): self.path_elements = tuple(path_elements) self.parent = parent self.namespace = namespace self.project = project
Represents a Datastore key. The partition ID is represented by its components: namespace and project. If key has a parent, project and namespace should either be unset or match the parent's. Args: path_elements: (list of str and int) Key path: an alternating sequence of kind and identifier. The kind must be of type `...
github-repos
def vec_size(nodes, s_val): r result_vec = evaluate_multi(nodes, np.asfortranarray([s_val])) return np.linalg.norm(result_vec[:, 0], ord=2)
r"""Compute :math:`\|B(s)\|_2`. .. note:: This is a helper for :func:`_compute_length` and does not have a Fortran speedup. Intended to be used with ``functools.partial`` to fill in the value of ``nodes`` and create a callable that only accepts ``s_val``. Args: nodes (numpy.ndarray): The nodes defining a curve. s_v...
juraj-google-style
def pre_ref_resolution_callback(self, other_model): filename = other_model._tx_filename assert (filename) other_model._tx_model_repository = \ GlobalModelRepository(self.all_models) self.all_models.filename_to_model[filename] = other_model
(internal: used to store a model after parsing into the repository) Args: other_model: the parsed model Returns: nothing
juraj-google-style
def get_indices(self, axis=0, index_func=None, old_blocks=None): ErrorMessage.catch_bugs_and_request_email((not callable(index_func))) func = self.preprocess_func(index_func) if (axis == 0): new_indices = ([idx.apply(func).get() for idx in self._partitions_cache.T[0]] if len(self._partitions_cache.T...
This gets the internal indices stored in the partitions. Note: These are the global indices of the object. This is mostly useful when you have deleted rows/columns internally, but do not know which ones were deleted. Args: axis: This axis to extract the labels. (0 - index, 1 - columns). index_func: The function to be...
codesearchnet
def InnermostClass(self): for i in range(len(self.stack), 0, (- 1)): classinfo = self.stack[(i - 1)] if isinstance(classinfo, _ClassInfo): return classinfo return None
Get class info on the top of the stack. Returns: A _ClassInfo object if we are inside a class, or None otherwise.
codesearchnet
def get(self, uri: str) -> Optional[_T]: resource = self.resources_by_uri.get(uri) if resource is None: return None if isinstance(resource, self.proto_cls): return resource parsed = self._parse_resource(uri, resource) self.resources_by_uri[uri] = parsed return parsed
Retrieves a protocol buffer for the resource with the given uri. Args: uri: URI of the resource to retrieve. Returns: A protocol buffer for the resource or `None` if the `uri` is not present in the ResourceCollection. Raises: RuntimeError: The resource could not be found or the retrieved resource did not have the ex...
github-repos
def _read_arg(arg): if (arg is None): arg_out = arg else: if ((len(arg) == 1) and os.path.exists(arg[0])): arg_out = grp.read(arg[0]) else: arg_out = arg assert isinstance(arg_out, list), 'arg_out must be a list.' assert (type(arg_out[0]) == str), ...
If arg is a list with 1 element that corresponds to a valid file path, use set_io.grp to read the grp file. Otherwise, check that arg is a list of strings. Args: arg (list or None) Returns: arg_out (list or None)
codesearchnet
def folderExist(self, name, folders): if name is not None and name != '': folderID = None for folder in folders: if folder['title'].lower() == name.lower(): return True del folders return folderID else: ...
Determines if a folder exists, case insensitively. Args: name (str): The name of the folder to check. folders (list): A list of folder dicts to check against. The dicts must contain the key:value pair ``title``. Returns: bool: ``True`` if the folder exists in the list, ``False`` otherwise.
juraj-google-style
def save(self, config=None): if (config is not None): clist = [config] else: clist = [self._system_config, self._global_config, self._repo_config, self._local_config] for conf in clist: if (conf.filename is None): continue try: logger.debug("Writing '{...
Saves config to config files. Args: config (configobj.ConfigObj): optional config object to save. Raises: dvc.config.ConfigError: thrown if failed to write config file.
codesearchnet
def lineno(self): return self.first.lineno
Return the line number of this logical line. Returns: The line number of the first token in this logical line.
github-repos
def convert_md_to_rst(md_path, rst_temp_path): command = "pandoc --write=rst --output=%s %s" % (rst_temp_path, md_path) print("converting with pandoc: %s to %s\n-->%s" % (md_path, rst_temp_path, command)) if os.path.exists(rst_temp_path): ...
Convert the contents of a file from Markdown to reStructuredText. Returns the converted text as a Unicode string. Arguments: md_path: a path to a UTF-8 encoded Markdown file to convert. rst_temp_path: a temporary path to which to write the converted contents.
juraj-google-style
def close(self): if self.reuse: logger.debug('Ipcontroller not shutting down: reuse enabled') return if (self.mode == 'manual'): logger.debug('Ipcontroller not shutting down: Manual mode') return try: pgid = os.getpgid(self.proc.pid) os.killpg(pgid, signal.SIG...
Terminate the controller process and its child processes. Args: - None
codesearchnet
def no_company_with_insufficient_companies_house_data(value): for prefix, name in company_types_with_insufficient_companies_house_data: if value.upper().startswith(prefix): raise ValidationError( MESSAGE_INSUFFICIENT_DATA, params={'name': name} )
Confirms that the company number is not for for a company that Companies House does not hold information on. Args: value (string): The company number to check. Raises: django.forms.ValidationError
juraj-google-style
def _poll_once(self, timeout_ms, max_records): self._coordinator.poll() if (not self._subscription.has_all_fetch_positions()): self._update_fetch_positions(self._subscription.missing_fetch_positions()) (records, partial) = self._fetcher.fetched_records(max_records) if records: if (not pa...
Do one round of polling. In addition to checking for new data, this does any needed heart-beating, auto-commits, and offset updates. Arguments: timeout_ms (int): The maximum time in milliseconds to block. Returns: dict: Map of topic to list of records (may be empty).
codesearchnet
def listen_tcp(cls, host='', port=0, echo=False): return cls(TCPServerSocketChannel(host, port), echo=echo)
Set up a :class:`TCPServerSocketChannel` and create a :class:`Flow` instance for it. Args: host(str): The hostname or IP address to bind to. port(int): The port number to listen on. echo(bool): Whether to echo read/written data to stdout by default. Returns: :class:`Flow`: A Flow instance initialised with the TCP soc...
juraj-google-style
def getModelSummaryAsGeoJson(self, session, withStreamNetwork=True, withNodes=False): watershedMaskCard = self.getCard('WATERSHED_MASK') maskFilename = watershedMaskCard.value maskExtension = maskFilename.strip('"').split('.')[1] maskMap = session.query(RasterMapFile)....
Retrieve a GeoJSON representation of the model. Includes vectorized mask map and stream network. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database withStreamNetwork (bool, optional): Include stream network. Defaults to True. withNodes (bool, optional): I...
juraj-google-style
def _CheckKeyPath(self, registry_key, search_depth): if (self._key_path_segments is None): return False if ((search_depth < 0) or (search_depth > self._number_of_key_path_segments)): return False if (search_depth == 0): segment_name = '' else: segment_name = self._key_pat...
Checks the key path find specification. Args: registry_key (WinRegistryKey): Windows Registry key. search_depth (int): number of key path segments to compare. Returns: bool: True if the Windows Registry key matches the find specification, False if not.
codesearchnet
def ensure_files(self, filenames): logger.debug('Testing {0} for the following files: {1}'.format(self.working_dir, filenames)) dircontent = os.listdir(self.working_dir) for fname in filenames: if (fname not in dircontent): return False return True
Checks the student submission for specific files. Args: filenames (tuple): The list of file names to be cjecked for. Returns: bool: Indicator if all files are found in the student archive.
codesearchnet
def __init__(self, base: ModelHandler[ExampleT, PredictionT, ModelT], postprocess_fn: Callable[[PredictionT], PostProcessT]): self._base = base self._env_vars = getattr(base, '_env_vars', {}) self._postprocess_fn = postprocess_fn
A ModelHandler that has a preprocessing function associated with it. Args: base: An implementation of the underlying model handler. postprocess_fn: the preprocessing function to use.
github-repos
def import_file_object(filename): try: handle = open(filename, 'r') file_obj = handle.read() dict_obj = json.loads(file_obj) except IOError as e: logger.critical( 'import_file_object: %s error opening %s' % (str(e), str(filename)) ) raise e e...
Summary: Imports block filesystem object Args: :filename (str): block filesystem object Returns: dictionary obj (valid json file), file data object
juraj-google-style
def __init__(self, config, input_size=None): super().__init__() dim = config.hidden_size num_heads = config.num_attention_heads self.num_heads = num_heads head_dim = dim self.scale = head_dim ** (-0.5) self.qkv = nn.Linear(dim, dim * 3, bias=config.qkv_bias) self.proj = nn.Linear(dim, d...
Args: config (`VitDetConfig`): Model configuration. input_size (`Tuple[int]`, *optional*): Input resolution, only required in case relative position embeddings are added.
github-repos
def from_json(cls, json_data): if not isinstance(json_data, dict): json_data = json.loads(_helpers._from_bytes(json_data)) private_key_pkcs8_pem = None pkcs12_val = json_data.get(_PKCS12_KEY) password = None if pkcs12_val is None: private_key_pkc...
Deserialize a JSON-serialized instance. Inverse to :meth:`to_json`. Args: json_data: dict or string, Serialized JSON (as a string or an already parsed dictionary) representing a credential. Returns: ServiceAccountCredentials from the serialized data.
juraj-google-style
def wind44(msg): d = hex2bin(data(msg)) status = int(d[4]) if (not status): return None speed = bin2int(d[5:14]) direction = ((bin2int(d[14:23]) * 180.0) / 256.0) return (round(speed, 0), round(direction, 1))
Wind speed and direction. Args: msg (String): 28 bytes hexadecimal message string Returns: (int, float): speed (kt), direction (degree)
codesearchnet
def es_mapping(cls, base_class=None, role='rdf_class', **kwargs): def _prop_filter(prop, value, **kwargs): try: use_prop = len(set(value.owl_inverseOf) - parent_props) > 0 except AttributeError: use_prop = True ...
Returns the es mapping for the class args: ----- base_class: The root class being indexed role: the role states how the class should be mapped depending upon whether it is used as a subject of an object. options are es_Nested or rdf_class
juraj-google-style
def VerifyMaps(self, conf): retval = 0 for map_name in conf.maps: self.log.info('Verifying map: %s.', map_name) if map_name == config.MAP_NETGROUP: self.log.info('The netgroup map does not support enumeration, skipping.') continue if map_name == config.MAP_AUTOMOU...
Compare each configured map against data retrieved from NSS. For each configured map, build a Map object from NSS and compare it against a Map object retrieved directly from the cache. We expect the cache Map to be a subset of the nss Map due to possible inclusion of other NSS map types (e.g. files, nis, ldap, etc). ...
github-repos
def to_string(cls, error_code): if error_code == cls.ILLEGAL_COMMAND: return 'Failed to erase sector.' return super(JLinkEraseErrors, cls).to_string(error_code)
Returns the string message for the given ``error_code``. Args: cls (JLinkEraseErrors): the ``JLinkEraseErrors`` class error_code (int): error code to convert Returns: An error string corresponding to the error code. Raises: ValueError: if the error code is invalid.
juraj-google-style
def __init__(self, session_creator, hooks, should_recover, stop_grace_period_secs=120): self._graph_was_finalized = ops.get_default_graph().finalized self._hooks = hooks or [] for h in self._hooks: h.begin() worker_context = distribute_coordinator_context.get_current_worker_context() if not ...
Sets up a Monitored or Hooked Session. Args: session_creator: A factory object to create session. Typically a `ChiefSessionCreator` or a `WorkerSessionCreator`. hooks: An iterable of `SessionRunHook' objects. should_recover: A bool. Indicates whether to recover from `AbortedError` and `UnavailableError` or not. stop_g...
github-repos
def get(self, key, mem_map=True): self.raise_error_if_not_open() if key in self._file: data = self._file[key] if not mem_map: data = data[()] return data else: return None
Read and return the data stored for the given key. Args: key (str): The key to read the data from. mem_map (bool): If ``True`` returns the data as memory-mapped array, otherwise a copy is returned. Note: The container has to be opened in advance. Returns: numpy.ndarray: The stored data.
juraj-google-style
def setFilter(self, search): if (not isinstance(search, DataSearch)): raise TypeError('The given parameter must an `qtpandas.DataSearch` object') self._search = search self.layoutAboutToBeChanged.emit() if (self._dataFrameOriginal is not None): self._dataFrame = self._dataFrameOriginal ...
Apply a filter and hide rows. The filter must be a `DataSearch` object, which evaluates a python expression. If there was an error while parsing the expression, the data will remain unfiltered. Args: search(qtpandas.DataSearch): data search object to use. Raises: TypeError: An error is raised, if the given parameter...
codesearchnet
def check_dihedral(self, construction_table): c_table = construction_table angles = self.get_angle_degrees(c_table.iloc[3:, :].values) problem_index = np.nonzero((175 < angles) | (angles < 5))[0] rename = dict(enumerate(c_table.index[3:])) problem_index = [rename[i] for ...
Checks, if the dihedral defining atom is colinear. Checks for each index starting from the third row of the ``construction_table``, if the reference atoms are colinear. Args: construction_table (pd.DataFrame): Returns: list: A list of problematic indices.
juraj-google-style
def _AddTokenOnNewline(self, dry_run, must_split): current = self.next_token previous = current.previous_token self.column = self._GetNewlineColumn() if not dry_run: indent_level = self.line.depth spaces = self.column if spaces: spaces -= indent_level * style.Get('IND...
Adds a line break and necessary indentation. Appends the next token to the state and updates information necessary for indentation. Arguments: dry_run: (bool) Don't commit whitespace changes to the FormatToken if True. must_split: (bool) A newline was required before this token. Returns: The split penalty for splitt...
github-repos
def from_string(key, password=b'notasecret'): key = _helpers._to_bytes(key) parsed_pem_key = _helpers._parse_pem_key(key) if parsed_pem_key: pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key) else: password = _helpers._to_bytes(password, e...
Construct a Signer instance from a string. Args: key: string, private key in PKCS12 or PEM format. password: string, password for the private key file. Returns: Signer instance. Raises: OpenSSL.crypto.Error if the key can't be parsed.
juraj-google-style
def UploadSignedConfigBlob(content, aff4_path, client_context=None, limit=None, token=None): if limit is None: limit = config.CONFIG["Datastore.maximum_blob_size"] if client_context is None: ...
Upload a signed blob into the datastore. Args: content: File content to upload. aff4_path: aff4 path to upload to. client_context: The configuration contexts to use. limit: The maximum size of the chunk to use. token: A security token. Raises: IOError: On failure to write.
juraj-google-style
def helper_list(access_token, oid, path): if oid != "": path = ''.join([path, "('", oid, "')"]) endpoint = ''.join([ams_rest_endpoint, path]) return do_ams_get(endpoint, path, access_token)
Helper Function to list a URL path. Args: access_token (str): A valid Azure authentication token. oid (str): An OID. path (str): A URL Path. Returns: HTTP response. JSON body.
juraj-google-style
def run(self, fn, args=None, kwargs=None): _check_initialization() multi_process_lib.Process() if self._runner is None: self._start() fn = dill.dumps(fn, dill.HIGHEST_PROTOCOL) for conn in self._conn.values(): conn.send((fn, args or [], kwargs or {})) process_statuses = [] fo...
Runs `fn` with `args` and `kwargs` on all jobs. Args: fn: The function to be run. args: Optional positional arguments to be supplied in `fn`. kwargs: Optional keyword arguments to be supplied in `fn`. Returns: A list of return values.
github-repos
def __init__(self, num_experts, gates): self._gates = gates self._num_experts = num_experts where = tf.to_int32(tf.where(tf.transpose(gates) > 0)) self._expert_index, self._batch_index = tf.unstack(where, num=2, axis=1) self._part_sizes_tensor = tf.reduce_sum(tf.to_int32(gates > 0), [0]) s...
Create a SparseDispatcher. Args: num_experts: an integer. gates: a `Tensor` of shape `[batch_size, num_experts]`. Returns: a SparseDispatcher
juraj-google-style