code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def write_makeconfig(_path):
http_proxy = str(CFG["gentoo"]["http_proxy"])
ftp_proxy = str(CFG["gentoo"]["ftp_proxy"])
rsync_proxy = str(CFG["gentoo"]["rsync_proxy"])
path.mkfile_uchroot(local.path('/') / _path)
with open(_path, 'w') as makeconf:
lines =
makeconf.write(lines)... | Write a valid gentoo make.conf file to :path:.
Args:
path - The output path of the make.conf | juraj-google-style |
def _get_lr_tensor(self):
lr = (tf.squared_difference(1.0, tf.sqrt(self._mu)) / self._h_min)
return lr | Get lr minimizing the surrogate.
Returns:
The lr_t. | codesearchnet |
def _print_download_progress_msg(self, msg, flush=False):
if self._interactive_mode():
self._max_prog_str = max(self._max_prog_str, len(msg))
sys.stdout.write("\r%-{}s".format(self._max_prog_str) % msg)
sys.stdout.flush()
if flush:
print("\n")
else:
... | Prints a message about download progress either to the console or TF log.
Args:
msg: Message to print.
flush: Indicates whether to flush the output (only used in interactive
mode). | juraj-google-style |
def get_images_by_tail_number(self, tail_number, page=1, limit=100):
url = REG_BASE.format(tail_number, str(self.AUTH_TOKEN), page, limit)
return self._fr24.get_aircraft_image_data(url) | Fetch the images of a particular aircraft by its tail number.
This method can be used to get the images of the aircraft. The images are in 3 sizes and you can use what suits your need.
Args:
tail_number (str): The tail number, e.g. VT-ANL
page (int): Optional page number; for users who are on a plan with flightradar2... | codesearchnet |
def generate(cls, country_code, bank_code, account_code):
spec = _get_iban_spec(country_code)
bank_code_length = code_length(spec, 'bank_code')
branch_code_length = code_length(spec, 'branch_code')
bank_and_branch_code_length = (bank_code_length + branch_code_length)
account_code_length = code_lengt... | Generate an IBAN from it's components.
If the bank-code and/or account-number have less digits than required by their
country specific representation, the respective component is padded with zeros.
Examples:
To generate an IBAN do the following::
>>> bank_code = '37040044'
>>> account_code = '532013000'
>>> iban = ... | codesearchnet |
def process_files(self, path, recursive=False):
self._logger.info('Processing files in "%s"', path)
for (path, file) in files_generator(path, recursive):
if not file.endswith(BATCH_EXTENSION):
self.process_file(os.path.join(path, file)) | Apply normalizations over all files in the given directory.
Iterate over all files in a given directory. Normalizations
will be applied to each file, storing the result in a new file.
The extension for the new file will be the one defined in
BATCH_EXTENSION.
Args:
path: Path to the directory.
recursive: Whether to fi... | juraj-google-style |
def _FindFileContainingSymbolInDb(self, symbol):
try:
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
else:
raise error
if not file_proto... | Finds the file in descriptor DB containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file cannot be found in the descriptor database. | juraj-google-style |
def __init__(self, agent_interface_format=None, map_size=None):
if not agent_interface_format:
raise ValueError("Please specify agent_interface_format")
self._agent_interface_format = agent_interface_format
aif = self._agent_interface_format
if (aif.use_feature_units
or aif.use_came... | Initialize a Features instance matching the specified interface format.
Args:
agent_interface_format: See the documentation for `AgentInterfaceFormat`.
map_size: The size of the map in world units, needed for feature_units.
Raises:
ValueError: if agent_interface_format isn't specified.
ValueError: if map_size isn't s... | juraj-google-style |
def _create_or_restore_slot_variable(self, slot_variable_position, slot_name, variable):
named_slots = self._slot_dict(slot_name)
variable_key = _var_key(variable)
slot_variable = named_slots.get(variable_key, None)
if slot_variable is None and context.executing_eagerly() and slot_variable_position.is_s... | Restore a slot variable's value, possibly creating it.
Called when a variable which has an associated slot variable is created or
restored. When executing eagerly, we create the slot variable with a
restoring initializer.
No new variables are created when graph building. Instead,
_restore_slot_variable catches these ... | github-repos |
def _ReadSelectedVolumes(self, volume_system, prefix='v'):
volume_identifiers_string = self._input_reader.Read()
volume_identifiers_string = volume_identifiers_string.strip()
if (not volume_identifiers_string):
return []
selected_volumes = self._ParseVolumeIdentifiersString(volume_identifiers_st... | Reads the selected volumes provided by the user.
Args:
volume_system (APFSVolumeSystem): volume system.
prefix (Optional[str]): volume identifier prefix.
Returns:
list[str]: selected volume identifiers including prefix.
Raises:
KeyboardInterrupt: if the user requested to abort.
ValueError: if the volume identifiers ... | codesearchnet |
def image_from_console(console: tcod.console.Console) -> tcod.image.Image:
return tcod.image.Image._from_cdata(ffi.gc(lib.TCOD_image_from_console(_console(console)), lib.TCOD_image_delete)) | Return an Image with a Consoles pixel data.
This effectively takes a screen-shot of the Console.
Args:
console (Console): Any Console instance. | codesearchnet |
def __exit__(self, exc_type, exc_val, exc_tb) -> bool:
if self._worker_pool is not None:
self._worker_pool.close()
self._worker_pool = None
self._context_manager_active = False
if exc_type is ChildProcessError:
sys.stderr.write(str(exc_val))
return True
return False | Context manager cleanup.
Closes the worker pool if it exists.
Args:
exc_type: The type of the raised exception, if any.
exc_val: The raised exception, if any.
exc_tb: The traceback of the raised exception, if any.
Returns:
`True` if an exception should be suppressed, `False` otherwise. | github-repos |
def __mul__(self, rhs):
if isinstance(rhs, scipy.sparse.spmatrix):
def qIter(qs):
for j in range(qs.shape[1]):
qi = qs.getcol(j).toarray().ravel()
yield qi
return
else:
def qIter(qs):
... | Carries out the action of solving for wavefields.
Args:
rhs (sparse matrix): Right-hand side vector(s)
Returns:
np.ndarray: Wavefields | juraj-google-style |
def _FormatAttrToken(self, token_data):
return {'mode': token_data.file_mode, 'uid': token_data.user_identifier, 'gid': token_data.group_identifier, 'system_id': token_data.file_system_identifier, 'node_id': token_data.file_identifier, 'device': token_data.device} | Formats an attribute token as a dictionary of values.
Args:
token_data (bsm_token_data_attr32|bsm_token_data_attr64): AUT_ATTR32 or
AUT_ATTR64 token data.
Returns:
dict[str, str]: token values. | codesearchnet |
def conv_block(name, x, mid_channels, dilations=None, activation="relu",
dropout=0.0):
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
x_shape = common_layers.shape_list(x)
is_2d = len(x_shape) == 4
num_steps = x_shape[1]
if is_2d:
first_filter = [3, 3]
second_filter ... | 2 layer conv block used in the affine coupling layer.
Args:
name: variable scope.
x: 4-D or 5-D Tensor.
mid_channels: Output channels of the second layer.
dilations: Optional, list of integers.
activation: relu or gatu.
If relu, the second layer is relu(W*x)
If gatu, the second layer is tanh(W1*x) * sigmoid(W2*x)
drop... | juraj-google-style |
def assign_nested_vars(variables, tensors, indices=None):
if isinstance(variables, (tuple, list)):
return tf.group(*[
assign_nested_vars(variable, tensor)
for variable, tensor in zip(variables, tensors)])
if indices is None:
return variables.assign(tensors)
else:
return tf.scatter_u... | Assign tensors to matching nested tuple of variables.
Args:
variables: Nested tuple or list of variables to update.
tensors: Nested tuple or list of tensors to assign.
indices: Batch indices to assign to; default to all.
Returns:
Operation. | juraj-google-style |
def set(self, key, value):
changed = super().set(key=key, value=value)
if not changed:
return False
self._log.info('Saving configuration to "%s"...', self._filename)
with open(self._filename, 'w') as stream:
stream.write(self.content)
self... | Updates the value of the given key in the file.
Args:
key (str): Key of the property to update.
value (str): New value of the property.
Return:
bool: Indicates whether or not a change was made. | juraj-google-style |
def delete(self, file_path):
now = datetime.datetime.now().isoformat()
url = nurls['put'] + upload_path + file_name
headers = {'userid': self.user_id,
'useridx': self.useridx,
'Content-Type': "application/x-www-form-urlencoded; charset=UTF-8",
... | DELETE
Args:
file_path: Full path for a file you want to delete
upload_path: Ndrive path where you want to delete file
ex) /Picture/
Returns:
True: Delete success
False: Delete failed | juraj-google-style |
def sections_list(self, cmd=None):
sections = list(self.common.sections)
if not cmd:
if self.bare is not None:
sections.extend(self.bare.sections)
return sections
return []
sections.extend(self.subcmds[cmd].sections)
if cmd... | List of config sections used by a command.
Args:
cmd (str): command name, set to ``None`` or ``''`` for the bare
command.
Returns:
list of str: list of configuration sections used by that command. | juraj-google-style |
def handle_message_registered(self, msg_data, host):
response = None
if msg_data["method"] == "EVENT":
logger.debug("<%s> <euuid:%s> Event message "
"received" % (msg_data["cuuid"], msg_data["euuid"]))
response = self.event(msg_data["cuuid"],
... | Processes messages that have been delivered by a registered client.
Args:
msg (string): The raw packet data delivered from the listener. This
data will be unserialized and then processed based on the packet's
method.
host (tuple): The (address, host) tuple of the source message.
Returns:
A response that will be sent ... | juraj-google-style |
def _map_condition(self, wire_map, condition):
if (condition is None):
new_condition = None
else:
bit0 = (condition[0], 0)
new_condition = (wire_map.get(bit0, bit0)[0], condition[1])
return new_condition | Use the wire_map dict to change the condition tuple's creg name.
Args:
wire_map (dict): a map from wires to wires
condition (tuple): (ClassicalRegister,int)
Returns:
tuple(ClassicalRegister,int): new condition | codesearchnet |
def compose_q(self, r: Rotation, normalize_quats: bool=True) -> Rotation:
q1 = self.get_quats()
q2 = r.get_quats()
new_quats = quat_multiply(q1, q2)
return Rotation(rot_mats=None, quats=new_quats, normalize_quats=normalize_quats) | Compose the quaternions of the current Rotation object with those of another.
Depending on whether either Rotation was initialized with quaternions, this function may call
torch.linalg.eigh.
Args:
r:
An update rotation object
Returns:
An updated rotation object | github-repos |
def ssa(scatterer, h_pol=True):
ext_xs = ext_xsect(scatterer, h_pol=h_pol)
return sca_xsect(scatterer, h_pol=h_pol)/ext_xs if ext_xs > 0.0 else 0.0 | Single-scattering albedo for the current setup, with polarization.
Args:
scatterer: a Scatterer instance.
h_pol: If True (default), use horizontal polarization.
If False, use vertical polarization.
Returns:
The single-scattering albedo. | juraj-google-style |
def __init__(
self,
optimizer,
ls_max_iterations=10,
ls_accept_ratio=0.9,
ls_mode='exponential',
ls_parameter=0.5,
ls_unroll_loop=False,
scope='optimized-step',
summary_labels=()
):
self.solver = LineSearch(
max_ite... | Creates a new optimized step meta optimizer instance.
Args:
optimizer: The optimizer which is modified by this meta optimizer.
ls_max_iterations: Maximum number of line search iterations.
ls_accept_ratio: Line search acceptance ratio.
ls_mode: Line search mode, see LineSearch solver.
ls_parameter: Line search paramete... | juraj-google-style |
def __init__(self, unit_def):
if isinstance(unit_def, str):
unit = collections.defaultdict(int)
import re
for m in re.finditer(r"([A-Za-z]+)\s*\^*\s*([\-0-9]*)", unit_def):
p = m.group(2)
p = 1 if not p else int(p)
k =... | Constructs a unit.
Args:
unit_def: A definition for the unit. Either a mapping of unit to
powers, e.g., {"m": 2, "s": -1} represents "m^2 s^-1",
or simply as a string "kg m^2 s^-1". Note that the supported
format uses "^" as the power operator and all units must be
space-separated. | juraj-google-style |
def state_scope(self, state_fluents: Sequence[tf.Tensor]) -> Dict[(str, TensorFluent)]:
return dict(zip(self.rddl.domain.state_fluent_ordering, state_fluents)) | Returns a partial scope with current state-fluents.
Args:
state_fluents (Sequence[tf.Tensor]): The current state fluents.
Returns:
A mapping from state fluent names to :obj:`rddl2tf.fluent.TensorFluent`. | codesearchnet |
def _GetDecodedStreamSize(self):
self._file_object.seek(0, os.SEEK_SET)
self._decoder = self._GetDecoder()
self._decoded_data = b''
encoded_data_offset = 0
encoded_data_size = self._file_object.get_size()
decoded_stream_size = 0
while (encoded_data_offset < encoded_data_size):
read_c... | Retrieves the decoded stream size.
Returns:
int: decoded stream size. | codesearchnet |
def make_sample_her_transitions(replay_strategy, replay_k, reward_fun):
if (replay_strategy == 'future'):
future_p = (1 - (1.0 / (1 + replay_k)))
else:
future_p = 0
def _sample_her_transitions(episode_batch, batch_size_in_transitions):
'episode_batch is {key: array(buffer_size x T x... | Creates a sample function that can be used for HER experience replay.
Args:
replay_strategy (in ['future', 'none']): the HER replay strategy; if set to 'none',
regular DDPG experience replay is used
replay_k (int): the ratio between HER replays and regular replays (e.g. k = 4 -> 4 times
as many HER replays as regular ... | codesearchnet |
async def find(self, seq_set: SequenceSet, selected: SelectedMailbox, requirement: FetchRequirement=FetchRequirement.METADATA) -> AsyncIterable[Tuple[(int, MessageT)]]:
for (seq, cached_msg) in selected.messages.get_all(seq_set):
msg = (await self.get(cached_msg.uid, cached_msg, requirement))
if (ms... | Find the active message UID and message pairs in the mailbox that
are contained in the given sequences set. Message sequence numbers
are resolved by the selected mailbox session.
Args:
seq_set: The sequence set of the desired messages.
selected: The selected mailbox session.
requirement: The data required from each me... | codesearchnet |
def impulse_noise(x, severity=1):
c = [0.03, 0.06, 0.09, 0.17, 0.27][(severity - 1)]
x = tfds.core.lazy_imports.skimage.util.random_noise((np.array(x) / 255.0), mode='s&p', amount=c)
x_clip = (np.clip(x, 0, 1) * 255)
return around_and_astype(x_clip) | Impulse noise corruption to images.
Args:
x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255].
severity: integer, severity of corruption.
Returns:
numpy array, image with uint8 pixels in [0,255]. Added impulse noise. | codesearchnet |
def loader(self, file_name, bad_steps=None, **kwargs):
new_tests = []
if (not os.path.isfile(file_name)):
self.logger.info(('Missing file_\n %s' % file_name))
return None
filesize = os.path.getsize(file_name)
hfilesize = humanize_bytes(filesize)
txt = ('Filesize: %i (%s)' % (filesi... | Loads data from biologics .mpr files.
Args:
file_name (str): path to .res file.
bad_steps (list of tuples): (c, s) tuples of steps s
(in cycle c) to skip loading.
Returns:
new_tests (list of data objects) | codesearchnet |
def _table_viewer(table, rows_per_page=25, fields=None):
if not table.exists():
raise Exception('Table %s does not exist' % table.full_name)
if not table.is_listable():
return "Done"
_HTML_TEMPLATE = u
if fields is None:
fields = google.datalab.utils.commands.get_field_list(fields, table.... | Return a table viewer.
This includes a static rendering of the first page of the table, that gets replaced
by the charting code in environments where Javascript is executable and BQ is available.
Args:
table: the table to view.
rows_per_page: how many rows to display at one time.
fields: an array of field names to di... | juraj-google-style |
def update_paths_and_config(self, config, pkg_dir_name,
pkg_cache_dir=None):
if pkg_cache_dir is None:
pkg_cache_dir = self.package_cache_dir
cached_dir_path = os.path.join(pkg_cache_dir, pkg_dir_name)
if config.get('paths'):
... | Handle remote source defined sys.paths & configs.
Args:
config (dict): git config dictionary
pkg_dir_name (string): directory name of the stacker archive
pkg_cache_dir (string): fully qualified path to stacker cache
cache directory | juraj-google-style |
def __init__(self, left, right):
if isinstance(left, Dist) and len(left) > 1:
if (not isinstance(left, J) or
evaluation.get_dependencies(*list(left.inverse_map))):
raise StochasticallyDependentError(
"Joint distribution with dependenci... | Constructor.
Args:
left (Dist, numpy.ndarray) : Left hand side.
right (Dist, numpy.ndarray) : Right hand side. | juraj-google-style |
def add_update_user(self, user, capacity=None):
if isinstance(user, str):
user = hdx.data.user.User.read_from_hdx(user, configuration=self.configuration)
elif isinstance(user, dict):
user = hdx.data.user.User(user, configuration=self.configuration)
if is... | Add new or update existing user in organization with new metadata. Capacity eg. member, admin
must be supplied either within the User object or dictionary or using the capacity argument (which takes
precedence).
Args:
user (Union[User,Dict,str]): Either a user id or user metadata either from a User object or a diction... | juraj-google-style |
def __extract_directory(self, path, files, destination):
destination_path = os.path.join(destination, path)
if not os.path.exists(destination_path):
os.makedirs(destination_path)
for name, contents in files.items():
item_path = os.path.join(path, name)... | Extracts a single directory to the specified directory on disk.
Args:
path (str):
Relative (to the root of the archive) path of the directory
to extract.
files (dict):
A dictionary of files from a *.asar file header.
destination (str):
The path to extract the files to. | juraj-google-style |
def calculate(self, token_list_x, token_list_y):
x, y = self.unique(token_list_x, token_list_y)
try:
result = len(x & y) / len(x | y)
except ZeroDivisionError:
result = 0.0
return result | Calculate similarity with the Jaccard coefficient.
Concrete method.
Args:
token_list_x: [token, token, token, ...]
token_list_y: [token, token, token, ...]
Returns:
Similarity. | juraj-google-style |
def plot_seebeck_temp(self, doping='all', output='average'):
import matplotlib.pyplot as plt
if output == 'average':
sbk = self._bz.get_seebeck(output='average')
elif output == 'eigs':
sbk = self._bz.get_seebeck(output='eigs')
plt.figure(figsize=(22, 14... | Plot the Seebeck coefficient in function of temperature for different
doping levels.
Args:
dopings: the default 'all' plots all the doping levels in the analyzer.
Specify a list of doping levels if you want to plot only some.
output: with 'average' you get an average of the three directions
with 'eigs' you get all the... | juraj-google-style |
def random_array(shape, mean=128., std=20.):
x = np.random.random(shape)
x = (x - np.mean(x)) / (np.std(x) + K.epsilon())
x = (x * std) + mean
return x | Creates a uniformly distributed random array with the given `mean` and `std`.
Args:
shape: The desired shape
mean: The desired mean (Default value = 128)
std: The desired std (Default value = 20)
Returns: Random numpy array of given `shape` uniformly distributed with desired `mean` and `std`. | juraj-google-style |
def start(self):
self._server.start() | Starts this server.
>>> dispatcher = tf.data.experimental.service.DispatchServer(start=False)
>>> dispatcher.start()
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
starting the server. | github-repos |
def create_dir(path):
full_path = abs_path(path)
if (not os.path.exists(full_path)):
try:
os.makedirs(full_path)
except OSError as e:
if (e.errno != os.errno.EEXIST):
raise | Creates a directory if it does not exist already.
Args:
path: The path of the directory to create. | codesearchnet |
def response(self, in_thread: Optional[bool] = None) -> "Message":
data = {"channel": self["channel"]}
if in_thread:
if "message" in self:
data["thread_ts"] = (
self["message"].get("thread_ts") or self["message"]["ts"]
)
... | Create a response message.
Depending on the incoming message the response can be in a thread. By default the response follow where the
incoming message was posted.
Args:
in_thread (boolean): Overwrite the `threading` behaviour
Returns:
a new :class:`slack.event.Message` | juraj-google-style |
def __init__(self, optimizer_path: str, optimizer_args: ListOrTuple[str], worker_count: Optional[int]=None, expand_to_input: str=_DEFAULT_INPUT_FILE_EXPANSION_TOKEN):
if worker_count is not None and worker_count < 1:
raise ValueError(f'The `worker_count` argument must be either `None` or a positive integer;... | TestCheckWriter constructor.
Args:
optimizer_path: The program to use for optimizing the HLO.
optimizer_args: The arguments to pass into the optimizer tool.
worker_count: The number of worker threads to use for parallel test-case
transformations. If `None`, the worker count will be inferred. If 1, or
if the instance i... | github-repos |
def prod(x, axis=None, keepdims=False):
from .function_bases import prod as prod_base
if axis is None:
axis = range(x.ndim)
elif not hasattr(axis, '__iter__'):
axis = [axis]
return prod_base(x, axis, keepdims) | Reduction along axes with product operation.
Args:
x (Variable): An input variable.
axis (None, int or tuple of ints): Axis or axes along which product is
calculated. Passing the default value `None` will reduce all dimensions.
keepdims (bool): Flag whether the reduced axes are kept as a dimension with 1 element.
Ret... | juraj-google-style |
def get_special_tokens_mask(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None, already_has_special_tokens: bool=False) -> List[int]:
if already_has_special_tokens:
return super().get_special_tokens_mask(token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True)
... | Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer `prepare_for_model` method.
Args:
token_ids_0 (`List[int]`):
List of IDs.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
already_has_spe... | github-repos |
def _load_from_file_object(self, f):
subtoken_strings = []
for line in f:
s = line.strip()
if ((s.startswith("'") and s.endswith("'")) or (s.startswith('"') and s.endswith('"'))):
s = s[1:(- 1)]
subtoken_strings.append(native_to_unicode(s))
self._init_subtokens_from_list(... | Load from a file object.
Args:
f: File object to load vocabulary from | codesearchnet |
def get_rms_dist(self, struct1, struct2):
struct1, struct2 = self._process_species([struct1, struct2])
struct1, struct2, fu, s1_supercell = self._preprocess(struct1, struct2)
match = self._match(struct1, struct2, fu, s1_supercell, use_rms=True,
break_on_match... | Calculate RMS displacement between two structures
Args:
struct1 (Structure): 1st structure
struct2 (Structure): 2nd structure
Returns:
rms displacement normalized by (Vol / nsites) ** (1/3)
and maximum distance between paired sites. If no matching
lattice is found None is returned. | juraj-google-style |
def gene_to_panels(self, case_obj):
LOG.info("Building gene to panels")
gene_dict = {}
for panel_info in case_obj.get('panels', []):
panel_name = panel_info['panel_name']
panel_version = panel_info['version']
panel_obj = self.gene_panel(panel_name, v... | Fetch all gene panels and group them by gene
Args:
case_obj(scout.models.Case)
Returns:
gene_dict(dict): A dictionary with gene as keys and a set of
panel names as value | juraj-google-style |
def run_pass_pipeline(mlir_txt, pass_pipeline, show_debug_info=False):
return pywrap_mlir.experimental_run_pass_pipeline(mlir_txt, pass_pipeline, show_debug_info) | Runs a pipeline over input module.
Args:
mlir_txt: Textual representation of the MLIR module.
pass_pipeline: Pass pipeline to run on module.
show_debug_info: Whether to include locations in the emitted textual form.
Returns:
A textual representation of the MLIR module corresponding to the
transformed module. | github-repos |
def make_merged_spec(self, dev):
return self.__class__(*self._get_combined_properties(dev)) | Returns a new DeviceSpec which incorporates `dev`.
When combining specs, `dev` will take precedence over the current spec.
So for instance:
```
first_spec = tf.DeviceSpec(job=0, device_type="CPU")
second_spec = tf.DeviceSpec(device_type="GPU")
combined_spec = first_spec.make_merged_spec(second_spec)
```
is equivalent... | github-repos |
def append(self, data):
if (isinstance(data, list) and (len(data) > 0)):
self.nodes.append(data)
else:
self.nodes.append([data]) | Appends items or lists to the Lattice
Args:
data (item,list) : The Item or List to be added to the Lattice | codesearchnet |
def tersoff_input(self, structure, periodic=False, uc=True, *keywords):
gin = self.keyword_line(*keywords)
gin += self.structure_lines(structure, cell_flg=periodic, frac_flg=periodic, anion_shell_flg=False, cation_shell_flg=False, symm_flg=(not uc))
gin += self.tersoff_potential(structure)
return gin | Gets a GULP input with Tersoff potential for an oxide structure
Args:
structure: pymatgen.core.structure.Structure
periodic (Default=False): Flag denoting whether periodic
boundary conditions are used
library (Default=None): File containing the species and potential.
uc (Default=True): Unit Cell Flag.
keywords: GULP f... | codesearchnet |
def reply(self, text):
data = {'text': text, 'vchannel_id': self['vchannel_id']}
if self.is_p2p():
data['type'] = RTMMessageType.P2PMessage
data['to_uid'] = self['uid']
else:
data['type'] = RTMMessageType.ChannelMessage
data['channel_id'] = self['channel_id']
return RTMMe... | Replys a text message
Args:
text(str): message content
Returns:
RTMMessage | codesearchnet |
def check_list_type(objects, allowed_type, name, allow_none=True):
if (objects is None):
if (not allow_none):
raise TypeError(('%s is None, which is not allowed.' % name))
return objects
if (not isinstance(objects, (tuple, list))):
raise TypeError(('%s is not a list.' % name)... | Verify that objects in list are of the allowed type or raise TypeError.
Args:
objects: The list of objects to check.
allowed_type: The allowed type of items in 'settings'.
name: Name of the list of objects, added to the exception.
allow_none: If set, None is also allowed.
Raises:
TypeError: if object is not of the al... | codesearchnet |
def sanitize_spec_name(name: str) -> str:
if not name:
return 'unknown'
swapped = ''.join([c if c.isalnum() else '_' for c in name.lower()])
if swapped[0].isalpha():
return swapped
else:
return 'tensor_' + swapped | Sanitizes Spec names. Matches Graph Node and Python naming conventions.
Without sanitization, names that are not legal Python parameter names can be
set which makes it challenging to represent callables supporting the named
calling capability.
Args:
name: The name to sanitize.
Returns:
A string that meets Python par... | github-repos |
def __init__(self, channel):
self.Login = channel.unary_unary(
'/api.Dgraph/Login',
request_serializer=api__pb2.LoginRequest.SerializeToString,
response_deserializer=api__pb2.Response.FromString,
)
self.Query = channel.unary_unary(
'/api.Dgraph/Query',
reques... | Constructor.
Args:
channel: A grpc.Channel. | juraj-google-style |
def save(nifti_filename, numpy_data):
nifti_filename = os.path.expanduser(nifti_filename)
try:
nifti_img = nib.Nifti1Image(numpy_data, numpy.eye(4))
nib.save(nifti_img, nifti_filename)
except Exception as e:
raise ValueError("Could not save file {0}.".format(nifti_filenam... | Export a numpy array to a nifti file. TODO: currently using dummy
headers and identity matrix affine transform. This can be expanded.
Arguments:
nifti_filename (str): A filename to which to save the nifti data
numpy_data (numpy.ndarray): The numpy array to save to nifti
Returns:
String. The expanded filename that no... | juraj-google-style |
def get_authority(config, metrics, rrset_channel, **kwargs):
builder = authority.GCEAuthorityBuilder(config, metrics, rrset_channel, **kwargs)
return builder.build_authority() | Get a GCEAuthority client.
A factory function that validates configuration and creates a
proper GCEAuthority.
Args:
config (dict): GCEAuthority related configuration.
metrics (obj): :interface:`IMetricRelay` implementation.
rrset_channel (asyncio.Queue): Queue used for sending messages
to the reconciler plugin.
kw (d... | codesearchnet |
def has_no_unchecked_field(self, locator, **kwargs):
kwargs['checked'] = False
return self.has_no_selector('field', locator, **kwargs) | Checks if the page or current node has no radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist. | codesearchnet |
def _real_set(self, obj, old, value, hint=None, setter=None):
if (self.property.matches(value, old) and (hint is None)):
return
was_set = (self.name in obj._property_values)
if was_set:
old_attr_value = obj._property_values[self.name]
else:
old_attr_value = old
if (old_attr_v... | Internal implementation helper to set property values.
This function handles bookkeeping around noting whether values have
been explicitly set, etc.
Args:
obj (HasProps)
The object the property is being set on.
old (obj) :
The previous value of the property to compare
hint (event hint or None, optional)
An optional... | codesearchnet |
def slice(self, begin, end):
if begin < 0 or end < 0:
raise ValueError('Encountered negative index.')
lines = self.lines[begin:end]
font_attr_segs = {}
for key in self.font_attr_segs:
if key >= begin and key < end:
font_attr_segs[key - begin] = self.font_attr_segs[key]
an... | Slice a RichTextLines object.
The object itself is not changed. A sliced instance is returned.
Args:
begin: (int) Beginning line index (inclusive). Must be >= 0.
end: (int) Ending line index (exclusive). Must be >= 0.
Returns:
(RichTextLines) Sliced output instance of RichTextLines.
Raises:
ValueError: If begin or ... | github-repos |
def _read_from_seg(self, n):
result = self._seg.read(size=n)
if (result == ''):
return result
offset = self._seg.tell()
if (offset > self._seg_valid_length):
extra = (offset - self._seg_valid_length)
result = result[:((- 1) * extra)]
self._offset += len(result)
return res... | Read from current seg.
Args:
n: max number of bytes to read.
Returns:
valid bytes from the current seg. "" if no more is left. | codesearchnet |
def GetUnicodeString(value):
if isinstance(value, list):
value = [GetUnicodeString(item) for item in value]
return ''.join(value)
if isinstance(value, py2to3.INTEGER_TYPES):
value = '{0:d}'.format(value)
if (not isinstance(value, py2to3.UNICODE_TYPE)):
return codecs.decode(va... | Attempts to convert the argument to a Unicode string.
Args:
value (list|int|bytes|str): value to convert.
Returns:
str: string representation of the argument. | codesearchnet |
def prune_non_existent_outputs(compound_match_query):
if (len(compound_match_query.match_queries) == 1):
return compound_match_query
elif (len(compound_match_query.match_queries) == 0):
raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQuery objects.')
else:
... | Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound... | codesearchnet |
def getServerSSLContext(self, hostname=None):
sslctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if (hostname is None):
hostname = socket.gethostname()
certfile = self.getHostCertPath(hostname)
if (certfile is None):
raise s_exc.NoCertKey(('Missing .crt for %s' % hostname))
... | Returns an ssl.SSLContext appropriate to listen on a socket
Args:
hostname: if None, the value from socket.gethostname is used to find the key in the servers directory.
This name should match the not-suffixed part of two files ending in .key and .crt in the hosts subdirectory | codesearchnet |
def __setattr__(cls, name, value):
if cls.__initialized and name not in _POST_INIT_ATTRIBUTE_NAMES:
raise AttributeError('May not change values: %s' % name)
else:
type.__setattr__(cls, name, value) | Overridden to avoid setting variables after init.
Setting attributes on a class must work during the period of
initialization to set the enumation value class variables and
build the name/number maps. Once __init__ has set the
__initialized flag to True prohibits setting any more values
on the class. The class is in e... | juraj-google-style |
def fit(self, X, truncated=3):
self.n_sample, self.n_var = X.shape
self.columns = X.columns
self.tau_mat = X.corr(method='kendall').values
self.u_matrix = np.empty([self.n_sample, self.n_var])
self.truncated = truncated
self.depth = self.n_var - 1
self.t... | Fit a vine model to the data.
Args:
X(numpy.ndarray): data to be fitted.
truncated(int): max level to build the vine. | juraj-google-style |
def create_identity_with_grad_check_fn(expected_gradient, expected_dtype=None):
@custom_gradient.custom_gradient
def _identity_with_grad_check(x):
x = array_ops.identity(x)
def grad(dx):
if expected_dtype:
assert dx.dtype == expected_dtype, 'dx... | Returns a function that asserts it's gradient has a certain value.
This serves as a hook to assert intermediate gradients have a certain value.
This returns an identity function. The identity's gradient function is also
the identity function, except it asserts that the gradient equals
`expected_gradient` and has dtype... | github-repos |
def _prefix_exists_in_gcs(gcs_prefix, credentials=None):
gcs_service = _get_storage_service(credentials)
bucket_name, prefix = gcs_prefix[len('gs:
request = gcs_service.objects().list(
bucket=bucket_name, prefix=prefix, maxResults=1)
response = request.execute()
return response.get('items', No... | Check whether there is a GCS object whose name starts with the prefix.
Since GCS doesn't actually have folders, this is how we check instead.
Args:
gcs_prefix: The path; should start with 'gs://'.
credentials: Optional credential to be used to load the file from gcs.
Returns:
True if the prefix matches at least one ... | juraj-google-style |
def print_dict(d, show_missing=True):
for (k, v) in sorted(d.items()):
if ((not v) and show_missing):
print('{} -'.format(k))
elif isinstance(v, list):
print(k)
for item in v:
print(' {}'.format(item))
elif isinstance(v, dict):
... | Prints a shallow dict to console.
Args:
d: Dict to print.
show_missing: Whether to show keys with empty values. | codesearchnet |
def encode(self, vecs):
assert vecs.dtype == np.float32
assert vecs.ndim == 2
N, D = vecs.shape
assert D == self.Ds * self.M, "input dimension must be Ds * M"
codes = np.empty((N, self.M), dtype=self.code_dtype)
for m in range(self.M):
if se... | Encode input vectors into PQ-codes.
Args:
vecs (np.ndarray): Input vectors with shape=(N, D) and dtype=np.float32.
Returns:
np.ndarray: PQ codes with shape=(N, M) and dtype=self.code_dtype | juraj-google-style |
def AddBackpropIndexedSlicesAccumulator(self, op: ops.Operation, grad):
values = grad.values
indices = grad.indices
dense_shape = grad.dense_shape
self.Exit()
if self.outer_context:
self.outer_context.Enter()
if values.get_shape().is_fully_defined():
values_shape = tensor_shape.T... | This is used for accumulating gradients that are IndexedSlices.
This is essentially the equivalent of AddBackpropAccumulator but optimized
for things like updating embeddings from within a while loop.
Args:
op: The Enter op for a loop invariant.
grad: The partial gradients represented as an IndexedSlices.
Returns:
T... | github-repos |
def diff_toDelta(self, diffs):
text = []
for (op, data) in diffs:
if (op == self.DIFF_INSERT):
data = data.encode('utf-8')
text.append(('+' + urllib.quote(data, "!~*'();/?:@&=+$,
elif (op == self.DIFF_DELETE):
text.append(('-%d' % len(data)))
elif (op ... | Crush the diff into an encoded string which describes the operations
required to transform text1 into text2.
E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
Operations are tab-separated. Inserted text is escaped using %xx notation.
Args:
diffs: Array of diff tuples.
Returns:
Delta text. | codesearchnet |
def register(config_class, processor_class, exist_ok=False):
PROCESSOR_MAPPING.register(config_class, processor_class, exist_ok=exist_ok) | Register a new processor for this class.
Args:
config_class ([`PretrainedConfig`]):
The configuration corresponding to the model to register.
processor_class ([`ProcessorMixin`]): The processor to register. | github-repos |
async def storm(self, text, opts=None, num=None, cmdr=False):
mesgs = (await self._runStorm(text, opts, cmdr))
if (num is not None):
nodes = [m for m in mesgs if (m[0] == 'node')]
if (len(nodes) != num):
raise AssertionError(f'Expected {num} nodes, got {len(nodes)}')
return mesgs | A helper for executing a storm command and getting a list of storm messages.
Args:
text (str): Storm command to execute.
opts (dict): Opt to pass to the cortex during execution.
num (int): Number of nodes to expect in the output query. Checks that with an assert statement.
cmdr (bool): If True, executes the line via t... | codesearchnet |
def __init__(self, input_energy: energy.BitstringEnergy, num_expectation_samples: int, num_burnin_samples: int, name: Union[None, str]=None):
super().__init__(input_energy, num_expectation_samples, name=name)
self._kernel = GibbsWithGradientsKernel(input_energy)
self._chain_state = tf.Variable(tfp.distribut... | Initializes a GibbsWithGradientsInference.
Args:
input_energy: The parameterized energy function which defines this
distribution via the equations of an energy based model. This class
assumes that all parameters of `energy` are `tf.Variable`s and that
they are all returned by `energy.variables`.
num_expectation_sampl... | github-repos |
def build_and_pickle_dump(self, abivalidate=False):
self.build()
if not abivalidate: return self.pickle_dump()
isok, errors = self.abivalidate_inputs()
if isok: return self.pickle_dump()
errlines = []
for i, e in enumerate(errors):
errlines.... | Build dirs and file of the `Flow` and save the object in pickle format.
Returns 0 if success
Args:
abivalidate: If True, all the input files are validate by calling
the abinit parser. If the validation fails, ValueError is raise. | juraj-google-style |
def __init__(self, vlan_pcp=None):
super().__init__(action_type=ActionType.OFPAT_SET_VLAN_PCP, length=8)
self.vlan_pcp = vlan_pcp | Create an ActionVlanPCP with the optional parameters below.
Args:
vlan_pcp (int): VLAN Priority.
.. note:: The vlan_pcp field is 8 bits long,
but only the lower 3 bits have meaning. | juraj-google-style |
def truncate_to_field_length(self, field, value):
max_len = getattr(self.__class__, field).prop.columns[0].type.length
if (value and (len(value) > max_len)):
return value[:max_len]
else:
return value | Truncate the value of a string field to the field's max length.
Use this in a validator to check/truncate values before inserting them into the database.
Copy the below example code after ``@validates`` to your model class and replace ``field1`` and ``field2`` with
your field name(s).
:Example:
from sqlalchemy.orm i... | codesearchnet |
def json_to_data(fn=None, return_json=True):
def json_to_data_decorator(fn):
@handle_type_error
@wraps(fn)
def get_data_wrapper(*args, **kwargs):
kwargs['data'] = decode_json_body()
if (not return_json):
return fn(*args, **kwargs)
return ... | Decode JSON from the request and add it as ``data`` parameter for wrapped
function.
Args:
return_json (bool, default True): Should the decorator automatically
convert returned value to JSON? | codesearchnet |
def _escaped_token_to_subtoken_strings(self, escaped_token):
ret = []
start = 0
token_len = len(escaped_token)
while start < token_len:
for end in xrange(min(token_len, start + self._max_subtoken_len), start, -1):
subtoken = escaped_... | Converts an escaped token string to a list of subtoken strings.
Args:
escaped_token: An escaped token as a unicode string.
Returns:
A list of subtokens as unicode strings. | juraj-google-style |
def _FormatField(self, field):
if (self._FIELD_DELIMITER and isinstance(field, py2to3.STRING_TYPES)):
return field.replace(self._FIELD_DELIMITER, ' ')
return field | Formats a field.
Args:
field (str): field value.
Returns:
str: formatted field value. | codesearchnet |
def match(self, path):
match = self._re.search(path)
if match is None:
return None
kwargs_indexes = match.re.groupindex.values()
args_indexes = [i for i in range(1, match.re.groups + 1)
if i not in kwargs_indexes]
args = [match.group... | Return route handler with arguments if path matches this route.
Arguments:
path (str): Request path
Returns:
tuple or None: A tuple of three items:
1. Route handler (callable)
2. Positional arguments (list)
3. Keyword arguments (dict)
``None`` if the route does not match the path. | juraj-google-style |
def strace_configure(self, port_width):
if (port_width not in [1, 2, 4]):
raise ValueError(('Invalid port width: %s' % str(port_width)))
config_string = ('PortWidth=%d' % port_width)
res = self._dll.JLINK_STRACE_Config(config_string.encode())
if (res < 0):
raise errors.JLinkException('Fa... | Configures the trace port width for tracing.
Note that configuration cannot occur while STRACE is running.
Args:
self (JLink): the ``JLink`` instance
port_width (int): the trace port width to use.
Returns:
``None``
Raises:
ValueError: if ``port_width`` is not ``1``, ``2``, or ``4``.
JLinkException: on error. | codesearchnet |
def maybe_download(directory, filename, uri):
tf.gfile.MakeDirs(directory)
filepath = os.path.join(directory, filename)
if tf.gfile.Exists(filepath):
tf.logging.info(('Not downloading, file already found: %s' % filepath))
return filepath
tf.logging.info(('Downloading %s to %s' % (uri, fi... | Download filename from uri unless it's already in directory.
Copies a remote file to local if that local file does not already exist. If
the local file pre-exists this function call, it does not check that the local
file is a copy of the remote.
Remote filenames can be filepaths, any URI readable by tensorflow.gfile... | codesearchnet |
def has_shell_command(self, command):
try:
output = self.shell(['command', '-v',
command]).decode('utf-8').strip()
return command in output
except AdbError:
return False | Checks to see if a given check command exists on the device.
Args:
command: A string that is the name of the command to check.
Returns:
A boolean that is True if the command exists and False otherwise. | juraj-google-style |
def decode_response(status: int, headers: MutableMapping, body: bytes) -> dict:
data = decode_body(headers, body)
raise_for_status(status, headers, data)
raise_for_api_error(headers, data)
return data | Decode incoming response
Args:
status: Response status
headers: Response headers
body: Response body
Returns:
Response data | codesearchnet |
def tangent(f):
node = annotate.resolve_calls(f)
RemoveWith().visit(node)
wrapped = functools.wraps(f)(compile_.compile_function(node))
wrapped.tangent = f
return wrapped | A decorator which removes the `with insert_grad_of` statement.
This allows the function to be called as usual.
Args:
f: A function
Returns:
A function with any `with insert_grad_of` context managers removed. | juraj-google-style |
def get_build_info():
return build_info.build_info | Get a dictionary describing TensorFlow's build environment.
Values are generated when TensorFlow is compiled, and are static for each
TensorFlow package. The return value is a dictionary with string keys such as:
- cuda_version
- cudnn_version
- is_cuda_build
- is_rocm_build
- msvcp_dll_names
- nvcuda_dll_name
- cuda... | github-repos |
def get_layer_index_bound_by_layer_name(layers, layer_range=None):
if layer_range is not None:
if len(layer_range) != 2:
raise ValueError(f'layer_range must be a list or tuple of length 2. Received: layer_range = {layer_range} of length {len(layer_range)}')
if not isinstance(layer_range[... | Get the layer indexes from the model based on layer names.
The layer indexes can be used to slice the model into sub models for
display.
Args:
model: `Model` instance.
layer_names: a list or tuple of 2 strings, the starting layer name and
ending layer name (both inclusive) for the result. All layers will
be included ... | github-repos |
def local_variables_initializer():
if context.executing_eagerly():
return control_flow_ops.no_op(name='local_variables_initializer')
return variables_initializer(local_variables()) | Returns an Op that initializes all local variables.
This is just a shortcut for `variables_initializer(local_variables())`
@compatibility(TF2)
In TF2, variables are initialized immediately when they are created. There is
no longer a need to run variable initializers before using them.
@end_compatibility
Returns:
An ... | github-repos |
def get_template(template):
from cloud_inquisitor.database import db
tmpl = db.Template.find_one(template_name=template)
if not tmpl:
raise InquisitorError('No such template found: {}'.format(template))
tmplenv = Environment(loader=BaseLoader, autoescape=True)
tmplenv.filters['json_lo... | Return a Jinja2 template by filename
Args:
template (str): Name of the template to return
Returns:
A Jinja2 Template object | juraj-google-style |
def pretty_emit(self, record, is_header=False, task_level=None):
task = record.task or self.cur_task
if task_level is None:
task_level = self.cur_depth_level
if is_header:
extra_prefix = (
self.get_task_indicator(task_level - 1) + ' ' +
... | Wrapper around the :class:`logging.StreamHandler` emit method to add
some decoration stuff to the message
Args:
record (logging.LogRecord): log record to emit
is_header (bool): if this record is a header, usually, a start or
end task message
task_level (int): If passed, will take that as the current nested
task level ... | juraj-google-style |
def group_alleles_by_start_end_Xbp(arr, bp=28):
starts = arr[(:, 0:bp)]
ends = arr[(:, (- bp):)]
starts_ends_idxs = defaultdict(list)
(l, seq_len) = arr.shape
for i in range(l):
start_i = starts[i]
end_i = ends[i]
start_i_str = ''.join([str(x) for x in start_i])
end_i... | Group alleles by matching ends
Args:
arr (numpy.array): 2D int matrix of alleles
bp (int): length of ends to group by
Returns:
dict of lists: key of start + end strings to list of indices of alleles with matching ends | codesearchnet |
def _parse_session_run_index(self, event):
metadata_string = event.log_message.message
try:
metadata = json.loads(metadata_string)
except ValueError as e:
logger.error(
"Could not decode metadata string '%s' for step value: %s",
metadata_string, e)
return constants... | Parses the session_run_index value from the event proto.
Args:
event: The event with metadata that contains the session_run_index.
Returns:
The int session_run_index value. Or
constants.SENTINEL_FOR_UNDETERMINED_STEP if it could not be determined. | juraj-google-style |
def prune(self, limit=None, n=None, percentile=None, keep_ends=False):
strip = self.copy()
if (not (limit or n or percentile)):
m = 'You must provide a limit or n or percentile for pruning.'
raise StriplogError(m)
if limit:
prune = [i for (i, iv) in enumerate(strip) if (iv.thickness ... | Remove intervals below a certain limit thickness. In place.
Args:
limit (float): Anything thinner than this will be pruned.
n (int): The n thinnest beds will be pruned.
percentile (float): The thinnest specified percentile will be
pruned.
keep_ends (bool): Whether to keep the first and last, regardless
of whether they... | codesearchnet |
def start_task(self, method, *args, **kwargs):
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread) | Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments | codesearchnet |
def filepaths_in_dir(path):
filepaths = []
for (root, directories, filenames) in os.walk(path):
for filename in filenames:
filepath = os.path.join(root, filename)
filepath = filepath.replace(path, '').lstrip('/')
filepaths.append(filepath)
return filepaths | Find all files in a directory, and return the relative paths to those files.
Args:
path (str): the directory path to walk
Returns:
list: the list of relative paths to all files inside of ``path`` or its
subdirectories. | codesearchnet |
def save_config(self, lookup_key, config):
with self._config_lock:
self._configs[lookup_key] = config | Save a configuration to the cache of configs.
Args:
lookup_key: A string containing the cache lookup key.
config: The dict containing the configuration to save to the cache. | codesearchnet |
def delete_keys(d: Dict[(Any, Any)], keys_to_delete: List[Any], keys_to_keep: List[Any]) -> None:
for k in keys_to_delete:
if ((k in d) and (k not in keys_to_keep)):
del d[k] | Deletes keys from a dictionary, in place.
Args:
d:
dictonary to modify
keys_to_delete:
if any keys are present in this list, they are deleted...
keys_to_keep:
... unless they are present in this list. | codesearchnet |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.