code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def read_chunk_header(self):
try:
chunk_size_hex = (yield from self._connection.readline())
except ValueError as error:
raise ProtocolError('Invalid chunk size: {0}'.format(error)) from error
if (not chunk_size_hex.endswith(b'\n')):
raise NetworkError('Connection closed.')
try:
... | Read a single chunk's header.
Returns:
tuple: 2-item tuple with the size of the content in the chunk and
the raw header byte string.
Coroutine. | codesearchnet |
def _call_with_structured_signature(self, args, kwargs):
bound_args = function_type_utils.canonicalize_function_inputs(args, kwargs, self.function_type)
filtered_flat_args = self.function_type.unpack_inputs(bound_args)
return self._call_flat(filtered_flat_args, captured_inputs=self.captured_inputs) | Executes the wrapped function with the structured signature.
Args:
args: Positional arguments to the concrete function.
kwargs: Keyword arguments to the concrete function.
Returns:
The result of applying the function on the Tensors/Variables contained in
`args` and `kwargs`.
Raises:
TypeError: if `args` and `kwargs` ... | github-repos |
def run(self, args):
jlink = self.create_jlink(args)
erased = jlink.erase()
print('Bytes Erased: %d' % erased) | Erases the device connected to the J-Link.
Args:
self (EraseCommand): the ``EraseCommand`` instance
args (Namespace): the arguments passed on the command-line
Returns:
``None`` | juraj-google-style |
def get_ssm_parameter(parameter_name):
try:
response = boto3.client('ssm').get_parameters(
Names=[parameter_name],
WithDecryption=True
)
return response.get('Parameters', None)[0].get('Value', '')
except Exception:
pass
return '' | Get the decrypted value of an SSM parameter
Args:
parameter_name - the name of the stored parameter of interest
Return:
Value if allowed and present else None | juraj-google-style |
def get_min_instability(self, min_voltage=None, max_voltage=None):
data = []
for pair in self._select_in_voltage_range(min_voltage, max_voltage):
if (pair.decomp_e_charge is not None):
data.append(pair.decomp_e_charge)
if (pair.decomp_e_discharge is not None):
data.append... | The minimum instability along a path for a specific voltage range.
Args:
min_voltage: The minimum allowable voltage.
max_voltage: The maximum allowable voltage.
Returns:
Minimum decomposition energy of all compounds along the insertion
path (a subset of the path can be chosen by the optional arguments) | codesearchnet |
def isloaded(self, name):
if (name is None):
return True
if isinstance(name, str):
return (name in [x.__module__ for x in self])
if isinstance(name, Iterable):
return set(name).issubset([x.__module__ for x in self])
return False | Checks if given hook module has been loaded
Args:
name (str): The name of the module to check
Returns:
bool. The return code::
True -- Loaded
False -- Not Loaded | codesearchnet |
def parse(filename, encoding=None):
with open(filename, encoding=encoding) as source:
for line in source:
for word in line.split():
(yield word) | !DEMO!
Simple file parsing generator
Args:
filename: absolute or relative path to file on disk
encoding: encoding string that is passed to open function | codesearchnet |
def number_of_shards(self):
return self._sharding_policies[0].number_of_shards | Gets the number of shards to use for the InfeedQueue.
Returns:
Number of shards or None if the number of shards has not been set. | github-repos |
def getAsGeoJson(self, session):
statement = .format(self.geometryColumnName,
self.tableName,
self.id)
result = session.execute(statement)
for row in result:
return row.json | Retrieve the geometry in GeoJSON format.
This method is a veneer for an SQL query that calls the ``ST_AsGeoJSON()`` function on the geometry column.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database.
Returns:
str: GeoJSON string representation of geome... | juraj-google-style |
def _Open(self, path_spec, mode='rb'):
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
self._file_object = fil... | Opens the file system object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode. The default is 'rb' which
represents read-only binary.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be ope... | juraj-google-style |
def _get_resized_embeddings(self, old_embeddings, new_num_tokens=None) -> tf.Variable:
old_embedding_dim = shape_list(old_embeddings)[1]
init_range = getattr(self.config, 'initializer_range', 0.02)
embeddings_mask, current_embeddings = init_copy_embeddings(old_embeddings, new_num_tokens)
new_embeddings ... | Build a resized Embedding weights from a provided token Embedding weights. Increasing the size will add newly
initialized vectors at the end. Reducing the size will remove vectors from the end
Args:
old_embeddings (`tf.Variable`):
Old embeddings to be resized.
new_num_tokens (`int`, *optional*):
New number of tokens i... | github-repos |
def _CheckFileEntryType(self, file_entry):
if (not self._file_entry_types):
return None
return (self._CheckIsDevice(file_entry) or self._CheckIsDirectory(file_entry) or self._CheckIsFile(file_entry) or self._CheckIsLink(file_entry) or self._CheckIsPipe(file_entry) or self._CheckIsSocket(file_entry)) | Checks the file entry type find specifications.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if
not or None if no file entry type specification is defined. | codesearchnet |
def apply_operation(self, symmop, fractional=False):
if not fractional:
self._lattice = Lattice([symmop.apply_rotation_only(row)
for row in self._lattice.matrix])
def operate_site(site):
new_cart = symmop.operate(site.coords)... | Apply a symmetry operation to the structure and return the new
structure. The lattice is operated by the rotation matrix only.
Coords are operated in full and then transformed to the new lattice.
Args:
symmop (SymmOp): Symmetry operation to apply.
fractional (bool): Whether the symmetry operation is applied in
fractio... | juraj-google-style |
def peek_all(self, model_class):
if self._cache:
return self._cache.get_records(model_class.__name__)
else:
return [] | Return a list of models from the local cache.
Args:
model_class (:class:`cinder_data.model.CinderModel`): A subclass of
:class:`cinder_data.model.CinderModel` of your chosen model.
Returns:
list: A list of instances of you model_class or and empty list. | codesearchnet |
def get_interpolated_value(self, energy):
f = {}
for spin in self.densities.keys():
f[spin] = get_linear_interpolated_value(self.energies, self.densities[spin], energy)
return f | Returns interpolated density for a particular energy.
Args:
energy: Energy to return the density for. | codesearchnet |
async def vsetup(self, author):
if self.vready:
logger.warning("Attempt to init voice when already initialised")
return
if self.state != 'starting':
logger.error("Attempt to init from wrong state ('{}'), must be 'starting'.".format(self.state))
... | Creates the voice client
Args:
author (discord.Member): The user that the voice ui will seek | juraj-google-style |
def get_variant_by_name(self, name):
results = []
try:
for info, dosage in self._bgen.get_variant(name):
results.append(Genotypes(
Variant(
info.name,
CHROM_STR_ENCODE.get(info.chrom, info.chrom),
... | Get the genotype of a marker using it's name.
Args:
name (str): The name of the marker.
Returns:
list: A list of Genotypes. | juraj-google-style |
def __ComputeEndByte(self, start, end=None, use_chunks=True):
end_byte = end
if ((start < 0) and (not self.total_size)):
return end_byte
if use_chunks:
alternate = ((start + self.chunksize) - 1)
if (end_byte is not None):
end_byte = min(end_byte, alternate)
else:
... | Compute the last byte to fetch for this request.
This is all based on the HTTP spec for Range and
Content-Range.
Note that this is potentially confusing in several ways:
* the value for the last byte is 0-based, eg "fetch 10 bytes
from the beginning" would return 9 here.
* if we have no information about size, and do... | codesearchnet |
def is_insert_grad_of_statement(node):
tangent_calls = [anno.getanno(item.context_expr, 'func', None)
is utils.insert_grad_of for item in node.items]
if all(tangent_calls):
return True
elif any(tangent_calls):
raise ValueError
else:
return False | Check whether a context manager calls `insert_grad_of`.
Args:
node: The context manager node.
Returns:
Whether or not this node contains `insert_grad_of` calls.
Raises:
ValueError: If the `insert_grad_of` calls are mixed with other calls. | juraj-google-style |
def add_to_cache(cls, remote_info, container):
if (not isinstance(container, cls)):
raise TypeError(('%r not an instance of %r, could not be added to cache.' % (container, cls)))
if (remote_info in cls.__remote_info_cache):
raise KeyError('Cache has collision but should not.')
cls.__remote_i... | Adds a ResourceContainer to a cache tying it to a protorpc method.
Args:
remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding
to a method.
container: An instance of ResourceContainer.
Raises:
TypeError: if the container is not an instance of cls.
KeyError: if the remote method has been reference b... | codesearchnet |
def AddIndex(self, path_segment_index):
if (path_segment_index in self._weight_per_index):
raise ValueError('Path segment index already set.')
self._weight_per_index[path_segment_index] = 0 | Adds a path segment index and sets its weight to 0.
Args:
path_segment_index: an integer containing the path segment index.
Raises:
ValueError: if the path segment weights already contains
the path segment index. | codesearchnet |
def _update_unenrolled_list(sailthru_client, email, course_url, unenroll):
try:
sailthru_response = sailthru_client.api_get("user", {"id": email, "fields": {"vars": 1}})
if not sailthru_response.is_ok():
error = sailthru_response.get_error()
logger.error("Error ... | Maintain a list of courses the user has unenrolled from in the Sailthru user record
Arguments:
sailthru_client (object): SailthruClient
email (str): user's email address
course_url (str): LMS url for course info page.
unenroll (boolean): True if unenrolling, False if enrolling
Returns:
False if retryable error, else ... | juraj-google-style |
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
local_stream = BytearrayStream()
if self._wrapping_method:
self._wrapping_method.write(local_stream, kmip_version=kmip_version)
else:
raise ValueError('Invalid struct missing the wrapping method attribute.')
if self... | Write the data encoding the KeyWrappingSpecification struct to a
stream.
Args:
output_stream (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. O... | codesearchnet |
def is_diagonal(matrix: np.ndarray, *, atol: float=1e-08) -> bool:
matrix = np.copy(matrix)
for i in range(min(matrix.shape)):
matrix[(i, i)] = 0
return tolerance.all_near_zero(matrix, atol=atol) | Determines if a matrix is a approximately diagonal.
A matrix is diagonal if i!=j implies m[i,j]==0.
Args:
matrix: The matrix to check.
atol: The per-matrix-entry absolute tolerance on equality.
Returns:
Whether the matrix is diagonal within the given tolerance. | codesearchnet |
def validate(self):
for schema in (self.headers_schema, Message.headers_schema):
_log.debug('Validating message headers "%r" with schema "%r"', self._headers, schema)
jsonschema.validate(self._headers, schema)
for schema in (self.body_schema, Message.body_schema):
_log.debug('Validating ... | Validate the headers and body with the message schema, if any.
In addition to the user-provided schema, all messages are checked against
the base schema which requires certain message headers and the that body
be a JSON object.
.. warning:: This method should not be overridden by sub-classes.
Raises:
jsonschema.Vali... | codesearchnet |
def fetch(self, payment_id, data={}, **kwargs):
return super(Payment, self).fetch(payment_id, data, **kwargs) | Fetch Payment for given Id
Args:
payment_id : Id for which payment object has to be retrieved
Returns:
Payment dict for given payment Id | juraj-google-style |
def switch_window(self, window_id: int):
if window_id not in self.tmux_available_window_ids:
for i in range(max(self.tmux_available_window_ids)+1, window_id+1):
self._run_raw(f'tmux new-window -t {self.tmux_session} -d')
tmux_window = self.tmux_session + ':' + str(i)
c... | Switches currently active tmux window for given task. 0 is the default window
Args:
window_id: integer id of tmux window to use | juraj-google-style |
def on_channel_open(self, channel):
self.in_channel.exchange_declare(exchange='input_exc', type='topic', durable=True)
channel.queue_declare(callback=self.on_input_queue_declare, queue=self.INPUT_QUEUE_NAME) | Input channel creation callback
Queue declaration done here
Args:
channel: input channel | juraj-google-style |
class SquaredHinge(reduction_metrics.MeanMetricWrapper):
def __init__(self, name='squared_hinge', dtype=None):
super().__init__(fn=squared_hinge, name=name, dtype=dtype)
self._direction = 'down'
def get_config(self):
return {'name': self.name, 'dtype': self.dtype} | Computes the hinge metric between `y_true` and `y_pred`.
`y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are
provided we will convert them to -1 or 1.
Args:
name: (Optional) string name of the metric instance.
dtype: (Optional) data type of the metric result.
Example:
>>> m = keras.metrics.Squ... | github-repos |
def supports_suggested_actions(channel_id: str, button_cnt: int=100) -> bool:
max_actions = {Channels.facebook: 10, Channels.skype: 10, Channels.line: 13, Channels.kik: 20, Channels.telegram: 100, Channels.slack: 100, Channels.emulator: 100, Channels.direct_line: 100, Channels.webchat: 100}
return ((button_cnt ... | Determine if a number of Suggested Actions are supported by a Channel.
Args:
channel_id (str): The Channel to check the if Suggested Actions are supported in.
button_cnt (int, optional): Defaults to 100. The number of Suggested Actions to check for the Channel.
Returns:
bool: True if the Channel supports the button_c... | codesearchnet |
def _add_length_constrain(token_lst: List[Dict], lengths: List) -> List[Dict]:
result = []
for a_token in token_lst:
for length in lengths:
if type(length) == str and length and length.isdigit():
a_token[attrs.LENGTH] = int(length)
... | Add length constrain for some token type, create cross production
Args:
token_lst: List[Dict]
lengths: List
Returns: List[Dict] | juraj-google-style |
def memory_write16(self, addr, data, zone=None):
return self.memory_write(addr, data, zone, 16) | Writes half-words to memory of a target system.
Args:
self (JLink): the ``JLink`` instance
addr (int): start address to write to
data (list): list of half-words to write
zone (str): optional memory zone to access
Returns:
Number of half-words written to target.
Raises:
JLinkException: on memory access error. | codesearchnet |
def create(self, path, mime_type='application/octet-stream', compression_type=CompressionTypes.AUTO) -> BinaryIO:
dirname = os.path.dirname(path)
if dirname:
os.makedirs(os.path.dirname(path), exist_ok=True)
return self._path_open(path, 'wb', mime_type, compression_type) | Returns a write channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use | github-repos |
def __init__(self, component1=None, component2=None):
if component1 is None and component2 is not None:
component1 = component2
component2 = None
self._llhead = None
self._lltail = None
if isinstance(component1, CompositeBitarray):
... | Create a bitarray object that stores its components by reference).
Args:
*components: Any number of bitarray instances to store in this composition. | juraj-google-style |
def NewEvent(type: str, id: UUID=None, data: JsonDict=None, metadata: JsonDict=None) -> NewEventData:
return NewEventData((id or uuid4()), type, data, metadata) | Build the data structure for a new event.
Args:
type: An event type.
id: The uuid identifier for the event.
data: A dict containing data for the event. These data
must be json serializable.
metadata: A dict containing metadata about the event.
These must be json serializable. | codesearchnet |
def crt(self, mp, mq):
u = (mq - mp) * self.p_inverse % self.q
return mp + (u * self.p) | The Chinese Remainder Theorem as needed for decryption. Returns the solution modulo n=pq.
Args:
mp(int): the solution modulo p.
mq(int): the solution modulo q. | juraj-google-style |
def save(self, representative_dataset: RepresentativeDatasetMapping) -> Mapping[str, _RepresentativeDatasetFile]:
raise NotImplementedError('Method "save" is not implemented.') | Saves the representative dataset.
Args:
representative_dataset: RepresentativeDatasetMapping which is a
signature_def_key -> representative dataset mapping. | github-repos |
def handle_subscribed_event(self, event_obj, event_name):
handler, args = self.handlers[event_name]
self.executor.submit(handler, event_obj, *args) | Execute the registered handler of an event.
Retrieve the handler and its arguments, and execute the handler in a
new thread.
Args:
event_obj: Json object of the event.
event_name: Name of the event to call handler for. | juraj-google-style |
def group_associations_types(self, group_type, api_entity=None, api_branch=None, params=None):
if params is None:
params = {}
if not self.can_update():
self._tcex.handle_error(910, [self.type])
target = self._tcex.ti.group(group_type)
for gat in self.tc... | Gets the group association from a Indicator/Group/Victim
Args:
group_type:
api_entity:
api_branch:
params:
Returns: | juraj-google-style |
def _list_profile_sort_key(profile_datum, sort_by):
if sort_by == SORT_OPS_BY_OP_NAME:
return profile_datum.node_exec_stats.node_name
elif sort_by == SORT_OPS_BY_OP_TYPE:
return profile_datum.op_type
elif sort_by == SORT_OPS_BY_LINE:
return profile_datum.file_line_func
elif sort_... | Get a profile_datum property to sort by in list_profile command.
Args:
profile_datum: A `ProfileDatum` object.
sort_by: (string) indicates a value to sort by.
Must be one of SORT_BY* constants.
Returns:
profile_datum property to sort by. | github-repos |
def _process_counter_example(self, mma, w_string):
w_string = self._find_bad_transition(mma, w_string)
diff = len(w_string)
same = 0
while True:
i = (same + diff) / 2
access_string = self._run_in_hypothesis(mma, w_string, i)
is_diff = self._c... | Process a counterexample in the Rivest-Schapire way.
Args:
mma (DFA): The hypothesis automaton
w_string (str): The examined string to be consumed
Returns:
None | juraj-google-style |
def raw_sql(cls, cur, query: str, values: tuple):
(yield from cur.execute(query, values))
return (yield from cur.fetchall()) | Run a raw sql query
Args:
query : query string to execute
values : tuple of values to be used with the query
Returns:
result of query as list of named tuple | codesearchnet |
def pack_sequence_as(structure, flat_sequence):
flat_sequence = list(flat_sequence)
flattened_structure = nest.flatten(structure, expand_composites=True)
if len(flattened_structure) != len(flat_sequence):
raise ValueError('Mismatch in element count')
for i in range(len(flat_sequence)):
i... | Like `nest.pack_sequence_as` but also builds TensorArrays from flows.
Args:
structure: The structure to pack into. May contain Tensors,
CompositeTensors, or TensorArrays.
flat_sequence: An iterable containing tensors.
Returns:
A nested structure.
Raises:
AssertionError if `structure` and `flat_sequence` are not comp... | github-repos |
def _process_example_section(func_documentation, func, parent_class, class_name, model_name_lowercase, config_class, checkpoint, indent_level):
from transformers.models import auto as auto_module
example_docstring = ''
if func_documentation is not None and (match := re.search('(?m)^([ \\t]*)(?=Example)', fu... | Process the example section of the docstring.
Args:
func_documentation (`str`): Existing function documentation (manually specified in the docstring)
func (`function`): Function being processed
parent_class (`class`): Parent class of the function
class_name (`str`): Name of the class
model_name_lowercase (`str`): Lowe... | github-repos |
def gff3_verifier(entries, line=None):
regex = r'^[a-zA-Z0-9.:^*$@!+_?-|]+\t.+\t.+\t\d+\t\d+\t' \
+ r'\d*\.?\d*\t[+-.]\t[.0-2]\t.+{0}$'.format(os.linesep)
delimiter = r'\t'
for entry in entries:
try:
entry_verifier([entry.write()], regex, delimiter)
except Form... | Raises error if invalid GFF3 format detected
Args:
entries (list): A list of GFF3Entry instances
line (int): Line number of first entry
Raises:
FormatError: Error when GFF3 format incorrect with descriptive message | juraj-google-style |
def parse_object_like_triples(self):
self.rdf.triples = SimpleNamespace()
for (s, p, o) in self.rdf.graph:
(ns_prefix, ns_uri, predicate) = self.rdf.graph.compute_qname(p)
if (not hasattr(self.rdf.triples, ns_prefix)):
setattr(self.rdf.triples, ns_prefix, SimpleNamespace())
i... | method to parse triples from self.rdf.graph for object-like
access
Args:
None
Returns:
None: sets self.rdf.triples | codesearchnet |
def match_next_flag(tt_flags, pos):
match = _FLAG_DOUBLE_QUOTE_PAT.match(tt_flags, pos)
if match:
return (match, True)
match = _FLAG_SINGLE_QUOTE_PAT.match(tt_flags, pos)
if match:
return (match, True)
match = _FLAG_NO_QUOTE_PAT.match(tt_flags, pos)
if match:
return (matc... | Returns the match for the next TensorTracer flag.
Args:
tt_flags: a string that contains the flags.
pos: where in flags to start the search.
Returns:
A pair where the first element is the regular-expression
match found and the second element indicates if the match
has a value. | github-repos |
def _AnsiCmd(command_list):
if not isinstance(command_list, list):
raise ValueError('Invalid list: %s' % command_list)
for sgr in command_list:
if sgr.lower() not in SGR:
raise ValueError('Invalid or unsupported SGR name: %s' % sgr)
command_str = [str(SGR[x.lower()]) for x in command_lis... | Takes a list of SGR values and formats them as an ANSI escape sequence.
Args:
command_list: List of strings, each string represents an SGR value.
e.g. 'fg_blue', 'bg_yellow'
Returns:
The ANSI escape sequence.
Raises:
ValueError: if a member of command_list does not map to a valid SGR value. | juraj-google-style |
def charges(self, num, charge_id=None, **kwargs):
baseuri = (self._BASE_URI + 'company/{}/charges'.format(num))
if (charge_id is not None):
baseuri += '/{}'.format(charge_id)
res = self.session.get(baseuri, params=kwargs)
else:
res = self.session.get(baseuri, params=kwargs)
self.... | Search for charges against a company by company number.
Args:
num (str): Company number to search on.
transaction (Optional[str]): Filing record number.
kwargs (dict): additional keywords passed into
requests.session.get params keyword. | codesearchnet |
def write(self, destination, filename, content):
if (not os.path.exists(destination)):
try:
os.makedirs(destination)
except:
pass
filepath = ('%s/%s' % (destination, filename))
f = open(filepath, 'w+')
f.write(content)
f.close() | Write a file at the specific destination with the content.
Args:
destination (string): the destination location
filename (string): the filename that will be written
content (string): the content of the filename | codesearchnet |
def get_key_flags_for_module(self, module):
if not isinstance(module, str):
module = module.__name__
key_flags = self._get_flags_defined_by_module(module)
for flag in self.key_flags_by_module_dict().get(module, []):
if flag not in key_flags:
key_flags.append(fl... | Returns the list of key flags for a module.
Args:
module: module|str, the module to get key flags from.
Returns:
[Flag], a new list of Flag instances. Caller may update this list as
desired: none of those changes will affect the internals of this
FlagValue instance. | juraj-google-style |
def contains(self, sub):
sub = sub.lower()
found_words = set()
res = cgaddag.gdg_contains(self.gdg, sub.encode(encoding="ascii"))
tmp = res
while tmp:
word = tmp.contents.str.decode("ascii")
found_words.add(word)
tmp = tmp.contents.n... | Find all words containing a substring.
Args:
sub: A substring to be searched for.
Returns:
A list of all words found. | juraj-google-style |
def get(self, txn_id):
if (txn_id not in self._receipt_db):
raise KeyError('Unknown transaction id {}'.format(txn_id))
txn_receipt_bytes = self._receipt_db[txn_id]
txn_receipt = TransactionReceipt()
txn_receipt.ParseFromString(txn_receipt_bytes)
return txn_receipt | Returns the TransactionReceipt
Args:
txn_id (str): the id of the transaction for which the receipt
should be retrieved.
Returns:
TransactionReceipt: The receipt for the given transaction id.
Raises:
KeyError: if the transaction id is unknown. | codesearchnet |
def mt_excel_files(store, case_obj, temp_excel_dir):
today = datetime.datetime.now().strftime('%Y-%m-%d')
samples = case_obj.get('individuals')
query = {'chrom': 'MT'}
mt_variants = list(store.variants(case_id=case_obj['_id'], query=query, nr_of_variants=(- 1), sort_key='position'))
written_files = ... | Collect MT variants and format line of a MT variant report
to be exported in excel format
Args:
store(adapter.MongoAdapter)
case_obj(models.Case)
temp_excel_dir(os.Path): folder where the temp excel files are written to
Returns:
written_files(int): the number of files written to temp_excel_dir | codesearchnet |
def DisableInterfaces(interface):
set_tested_versions = ['vista', '2008']
set_args = ['/c', 'netsh', 'set', 'interface', interface, 'DISABLED']
host_version = platform.platform().lower()
for version in set_tested_versions:
if (host_version.find(version) != (- 1)):
res = client_utils_... | Tries to disable an interface. Only works on Vista and 7.
Args:
interface: Name of the interface to disable.
Returns:
res which is a tuple of (stdout, stderr, exit_status, time_taken). | codesearchnet |
def get_resize_output_image_size(input_image: np.ndarray, size: Union[int, Tuple[int, int], List[int]], max_size: Optional[int]=None, input_data_format: Optional[Union[str, ChannelDimension]]=None) -> Tuple[int, int]:
image_size = get_image_size(input_image, input_data_format)
if isinstance(size, (list, tuple))... | Computes the output image size given the input image size and the desired output size. If the desired output size
is a tuple or list, the output image size is returned as is. If the desired output size is an integer, the output
image size is computed by keeping the aspect ratio of the input image size.
Args:
input_ima... | github-repos |
def apply_operation(self, symmop):
def operate_site(site):
new_cart = symmop.operate(site.coords)
return Site(site.species, new_cart,
properties=site.properties)
self._sites = [operate_site(s) for s in self._sites] | Apply a symmetry operation to the molecule.
Args:
symmop (SymmOp): Symmetry operation to apply. | juraj-google-style |
def chdir(self, target_directory):
target_directory = self.filesystem.resolve_path(
target_directory, allow_fd=True)
self.filesystem.confirmdir(target_directory)
directory = self.filesystem.resolve(target_directory)
if not is_root() and not director... | Change current working directory to target directory.
Args:
target_directory: The path to new current working directory.
Raises:
OSError: if user lacks permission to enter the argument directory
or if the target is not a directory. | juraj-google-style |
def _resolve_non_literal_route(self, method, path):
for route_dict in (self._wildcard, self._regex):
if method in route_dict:
for route in reversed(route_dict[method]):
callback_data = route.match(path)
if callback_data is not None:
... | Resolve a request to a wildcard or regex route handler.
Arguments:
method (str): HTTP method name, e.g. GET, POST, etc.
path (str): Request path
Returns:
tuple or None: A tuple of three items:
1. Route handler (callable)
2. Positional arguments (list)
3. Keyword arguments (dict)
``None`` if no route matches the req... | juraj-google-style |
def message(self, tree, spins, subtheta, auxvars):
energy_sources = set()
for v, children in tree.items():
aux = auxvars[v]
assert all(u in spins for u in self._ancestors[v])
def energy_contributions():
... | Determine the energy of the elimination tree.
Args:
tree (dict): The current elimination tree
spins (dict): The current fixed spins
subtheta (dict): Theta with spins fixed.
auxvars (dict): The auxiliary variables for the given spins.
Returns:
The formula for the energy of the tree. | juraj-google-style |
def create_streaming_endpoint(access_token, name, description="New Streaming Endpoint", \
scale_units="1"):
path = '/StreamingEndpoints'
endpoint = ''.join([ams_rest_endpoint, path])
body = '{ \
"Id":null, \
"Name":"' + name + '", \
"Description":"' + description + '", \
"Created":"0001-01-01T0... | Create Media Service Streaming Endpoint.
Args:
access_token (str): A valid Azure authentication token.
name (str): A Media Service Streaming Endpoint Name.
description (str): A Media Service Streaming Endpoint Description.
scale_units (str): A Media Service Scale Units Number.
Returns:
HTTP response. JSON body. | juraj-google-style |
def as_dict(self, verbosity=0):
species_list = []
for spec, occu in self._species.items():
d = spec.as_dict()
del d["@module"]
del d["@class"]
d["occu"] = occu
species_list.append(d)
d = {"species": species_list,
... | Json-serializable dict representation of PeriodicSite.
Args:
verbosity (int): Verbosity level. Default of 0 only includes the
matrix representation. Set to 1 for more details such as
cartesian coordinates, etc. | juraj-google-style |
def init_database(connection=None, dbname=None):
connection = connection or connect()
dbname = dbname or bigchaindb.config['database']['name']
create_database(connection, dbname)
create_tables(connection, dbname) | Initialize the configured backend for use with BigchainDB.
Creates a database with :attr:`dbname` with any required tables
and supporting indexes.
Args:
connection (:class:`~bigchaindb.backend.connection.Connection`): an
existing connection to use to initialize the database.
Creates one if not given.
dbname (str): th... | juraj-google-style |
def save_imgs(x, fname):
n = x.shape[0]
fig = figure.Figure(figsize=(n, 1), frameon=False)
canvas = backend_agg.FigureCanvasAgg(fig)
for i in range(n):
ax = fig.add_subplot(1, n, (i + 1))
ax.imshow(x[i].squeeze(), interpolation='none', cmap=cm.get_cmap('binary'))
ax.axis('off')
... | Helper method to save a grid of images to a PNG file.
Args:
x: A numpy array of shape [n_images, height, width].
fname: The filename to write to (including extension). | codesearchnet |
def CreateSharedBudget(client):
budget_service = client.GetService('BudgetService', version='v201809')
budget = {'name': ('Shared Interplanetary Budget
operation = {'operator': 'ADD', 'operand': budget}
response = budget_service.mutate([operation])
return response['value'][0] | Creates an explicit budget to be used only to create the Campaign.
Args:
client: AdWordsClient the client to run the example with.
Returns:
dict An object representing a shared budget. | codesearchnet |
def map_into_course(self, course_key):
return self.replace(usage_key=self.usage_key.map_into_course(course_key)) | Return a new :class:`UsageKey` or :class:`AssetKey` representing this usage inside the
course identified by the supplied :class:`CourseKey`. It returns the same type as
`self`
Args:
course_key (:class:`CourseKey`): The course to map this object into.
Returns:
A new :class:`CourseObjectMixin` instance. | juraj-google-style |
def generate_mediation_matrix(dsm):
cat = dsm.categories
ent = dsm.entities
size = dsm.size[0]
if (not cat):
cat = (['appmodule'] * size)
packages = [e.split('.')[0] for e in ent]
mediation_matrix = [[0 for _ in range(size)] for _ in range(size)]
for i in range(0, size):
for ... | Generate the mediation matrix of the given matrix.
Rules for mediation matrix generation:
Set -1 for items NOT to be considered
Set 0 for items which MUST NOT be present
Set 1 for items which MUST be present
Each module has optional dependencies to itself.
- Framework has optional dependency to all framework items ... | codesearchnet |
def coordinate_tensor(shape, axis):
if axis < 0:
axis = tf.size(shape) + axis
r = tf.range(shape[axis])
r_shape = tf.one_hot(
axis, tf.size(shape), on_value=-1, off_value=1, dtype=tf.int32)
return tf.zeros(shape, dtype=tf.int32) + tf.reshape(r, r_shape) | Return a tensor with given shape containing coordinate along given axis.
Args:
shape: a Tensor representing the shape of the output Tensor
axis: an integer
Returns:
A tensor with shape shape and type tf.int32, where each elements its
coordinate along the given axis. | juraj-google-style |
def duplicated_initializer(tc, init, graph_seed, shape=None):
if shape is None:
shape = [100]
with tc.test_session(graph=ops.Graph()):
random_seed.set_random_seed(graph_seed)
t1 = init(shape).eval()
t2 = init(shape).eval()
return np.allclose(t1, t2, rtol=1e-15, atol=1e-15... | Tests duplicated random initializer within the same graph.
This test generates two random kernels from the same initializer to the same
graph, and checks if the results are close enough. Even given the same global,
seed, two different instances of random kernels should generate different
results.
Args:
tc: An instanc... | github-repos |
def calc_control_outputs(self, graph):
control_outputs = {}
for op in graph.get_operations():
for control_input in op.control_inputs:
if control_input not in control_outputs:
control_outputs[control_input] = set()
control_outputs[control_input].add(op)
return ... | Returns the map of control_outputs for a given graph.
Args:
graph: The graph to parse.
Returns:
A map of the control outputs. | github-repos |
def generate_token(key, user_id, action_id='', when=None):
digester = hmac.new(_helpers._to_bytes(key, encoding='utf-8'))
digester.update(_helpers._to_bytes(str(user_id), encoding='utf-8'))
digester.update(DELIMITER)
digester.update(_helpers._to_bytes(action_id, encoding='utf-8'))
digester.update(DE... | Generates a URL-safe token for the given user, action, time tuple.
Args:
key: secret key to use.
user_id: the user ID of the authenticated user.
action_id: a string identifier of the action they requested
authorization for.
when: the time in seconds since the epoch at which the user was
authorized for this action. If ... | codesearchnet |
def get_completions(self, context_word, prefix):
if context_word not in self._comp_dict:
return (None, None)
comp_items = self._comp_dict[context_word]
comp_items = sorted([item for item in comp_items if item.startswith(prefix)])
return (comp_items, self._common_prefix(comp_items)) | Get the tab completions given a context word and a prefix.
Args:
context_word: The context word.
prefix: The prefix of the incomplete word.
Returns:
(1) None if no registered context matches the context_word.
A list of str for the matching completion items. Can be an empty list
of a matching context exists, but no co... | github-repos |
def sap_sid_nr(broker):
insts = broker[DefaultSpecs.saphostctrl_listinstances].content
hn = broker[DefaultSpecs.hostname].content[0].split('.')[0].strip()
results = set()
for ins in insts:
ins_splits = ins.split(' - ')
if (ins_splits[2].strip() == hn):
results.add((ins_splits... | Get the SID and Instance Number
Typical output of saphostctrl_listinstances::
# /usr/sap/hostctrl/exe/saphostctrl -function ListInstances
Inst Info : SR1 - 01 - liuxc-rhel7-hana-ent - 749, patch 418, changelist 1816226
Returns:
(list): List of tuple of SID and Instance Number. | codesearchnet |
def Serialize(self, writer):
writer.WriteUInt256(self.PrevHash)
writer.WriteUInt16(self.PrevIndex) | Serialize object.
Args:
writer (neo.IO.BinaryWriter): | juraj-google-style |
def make_block(cls, header: str='', content: str | dict[str, Any] | list[Any] | tuple[Any, ...]=(), *, braces: Union[str, tuple[str, str]]='(', equal: str='=', limit: int=20) -> str:
if isinstance(braces, str):
braces = _BRACE_TO_BRACES[braces]
brace_start, brace_end = braces
if isinstance(content, ... | Util function to create a code block.
Example:
```python
epy.Lines.make_block('A', {}) == 'A()'
epy.Lines.make_block('A', {'x': '1'}) == 'A(x=1)'
epy.Lines.make_block('A', {'x': '1', 'y': '2'}) == '''A(
x=1,
y=2,
)'''
```
Pattern is as:
```
{header}{braces[0]}
{k}={v},
...
{braces[1]}
```
Args:
header: Prefix befo... | github-repos |
def reduce_concat(self, x):
return self.reduce(lambda y: y, x) | Performs a concat reduction on `x` across pfor iterations.
Note that this currently may not work inside a control flow construct.
Args:
x: an unvectorized Tensor.
Returns:
A Tensor that has rank one higher than `x`. The value is the vectorized
version of `x`, i.e. stacking the value of `x` across different pfor
itera... | github-repos |
def store_state(node, reaching, defined, stack):
defs = [def_ for def_ in reaching if (not isinstance(def_[1], gast.arguments))]
if (not len(defs)):
return node
(reaching, original_defs) = zip(*defs)
assignments = []
for id_ in (set(reaching) - defined):
assignments.append(quoting.qu... | Push the final state of the primal onto the stack for the adjoint.
Python's scoping rules make it possible for variables to not be defined in
certain blocks based on the control flow path taken at runtime. In order to
make sure we don't try to push non-existing variables onto the stack, we
defined these variables expl... | codesearchnet |
def new(self, user_id, tokens=None, user_data=None, valid_until=None, client_ip=None, encoding='utf-8'):
if (valid_until is None):
valid_until = (int(time.time()) + TicketFactory._DEFAULT_TIMEOUT)
else:
valid_until = int(valid_until)
user_id = ulp.quote(user_id)
token_str = ''
if tok... | Creates a new authentication ticket.
Args:
user_id: User id to store in ticket (stored in plain text)
tokens: Optional sequence of token strings to store in the ticket
(stored in plain text).
user_data: Optional user data to store in the ticket (string like
object stored in plain text)
valid_until: Expiration time of ... | codesearchnet |
def _truncate_float(matchobj, format_str='0.2g'):
if matchobj.group(0):
return format(float(matchobj.group(0)), format_str)
return '' | Truncate long floats
Args:
matchobj (re.Match): contains original float
format_str (str): format specifier
Returns:
str: returns truncated float | codesearchnet |
def rtt_control(self, command, config):
config_byref = (ctypes.byref(config) if (config is not None) else None)
res = self._dll.JLINK_RTTERMINAL_Control(command, config_byref)
if (res < 0):
raise errors.JLinkRTTException(res)
return res | Issues an RTT Control command.
All RTT control is done through a single API call which expects
specifically laid-out configuration structures.
Args:
self (JLink): the ``JLink`` instance
command (int): the command to issue (see enums.JLinkRTTCommand)
config (ctypes type): the configuration to pass by reference.
Retur... | codesearchnet |
def _Open(self, path_spec=None, mode='rb'):
if not path_spec:
raise ValueError('Missing path specification.')
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
self._gzip_file_object = resolver.Resolver.OpenFileObject(
... | Opens the file-like object defined by path specification.
Args:
path_spec (Optional[PathSpec]): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not b... | juraj-google-style |
def get_overlaps(self, offset, length):
if (''.join([chunk.word for chunk in self])[offset] == ' '):
offset += 1
index = 0
result = ChunkList()
for chunk in self:
if ((offset < (index + len(chunk.word))) and (index < (offset + length))):
result.append(chunk)
index += ... | Returns chunks overlapped with the given range.
Args:
offset (int): Begin offset of the range.
length (int): Length of the range.
Returns:
Overlapped chunks. (:obj:`budou.chunk.ChunkList`) | codesearchnet |
def GetLogdirSubdirectories(path):
if not tf.io.gfile.exists(path):
return ()
if not tf.io.gfile.isdir(path):
raise ValueError('GetLogdirSubdirectories: path exists and is not a '
'directory, %s' % path)
if IsCloudPath(path):
logger.info(
'GetLogdirSubd... | Obtains all subdirectories with events files.
The order of the subdirectories returned is unspecified. The internal logic
that determines order varies by scenario.
Args:
path: The path to a directory under which to find subdirectories.
Returns:
A tuple of absolute paths of all subdirectories each with at least 1 eve... | juraj-google-style |
def get_lambda_arn(app, account, region):
session = boto3.Session(profile_name=account, region_name=region)
lambda_client = session.client('lambda')
lambda_arn = None
paginator = lambda_client.get_paginator('list_functions')
for lambda_functions in paginator.paginate():
for lambda_function i... | Get lambda ARN.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1
app (str): Lambda function name
Returns:
str: ARN for requested lambda function | codesearchnet |
def register_recipe(cls, recipe):
recipe_name = recipe.contents['name']
cls._recipe_classes[recipe_name] = (recipe.contents, recipe.args, recipe.__doc__) | Registers a dftimewolf recipe.
Args:
recipe: imported python module representing the recipe. | codesearchnet |
def index_of(self, file_path, line_number, called_function_name, called_file_path, called_function_start_line):
location_key = (file_path, called_function_name, line_number)
if location_key in self._location_key_to_location:
location = self._location_key_to_location[location_key]
return location... | Returns index of the location, adding the location if needed.
Args:
file_path: (string) Path to file that makes the call.
line_number: (integer) Call line number.
called_function_name: (string) Function name of the function called at
`file_path` and `line_number`.
called_file_path: (string) Path to file where the call... | github-repos |
def on_value_event(self, event):
if not event.summary.value:
logger.info('The summary of the event lacks a value.')
return None
watch_key = event.summary.value[0].node_name
tensor_value = debug_data.load_tensor_from_event(event)
device_name = _extract_device_name_from_event(e... | Records the summary values based on an updated message from the debugger.
Logs an error message if writing the event to disk fails.
Args:
event: The Event proto to be processed. | juraj-google-style |
def create(self, name, description='', whitelisted_container_task_types=None, whitelisted_executable_task_types=None):
if (whitelisted_container_task_types is None):
whitelisted_container_task_types = []
if (whitelisted_executable_task_types is None):
whitelisted_executable_task_types = []
r... | Create a task whitelist.
Args:
name (str): The name of the task whitelist.
description (str, optional): A description of the task whitelist.
whitelisted_container_task_types (list, optional): A list of
whitelisted container task type IDs.
whitelisted_executable_task_types (list, optional): A list
of whitelisted execut... | codesearchnet |
def __eq__(self, other):
if isinstance(other, DocumentReference):
return self._client == other._client and self._path == other._path
else:
return NotImplemented | Equality check against another instance.
Args:
other (Any): A value to compare against.
Returns:
Union[bool, NotImplementedType]: Indicating if the values are
equal. | juraj-google-style |
def create_profiler_ui(graph, run_metadata, ui_type='readline', on_ui_exit=None, config=None):
del config
analyzer = ProfileAnalyzer(graph, run_metadata)
cli = ui_factory.get_ui(ui_type, on_ui_exit=on_ui_exit)
cli.register_command_handler('list_profile', analyzer.list_profile, analyzer.get_help('list_pr... | Create an instance of ReadlineUI based on a `tf.Graph` and `RunMetadata`.
Args:
graph: Python `Graph` object.
run_metadata: A `RunMetadata` protobuf object.
ui_type: (str) requested UI type, e.g., "readline".
on_ui_exit: (`Callable`) the callback to be called when the UI exits.
config: An instance of `cli_config.CLICo... | github-repos |
def exit_hook(callable, once=True):
r
if once and callable in ExitHooks:
return
ExitHooks.append(callable) | r"""A decorator that makes the decorated function to run while ec exits.
Args:
callable (callable): The target callable.
once (bool): Avoids adding a func to the hooks, if it has been added already. Defaults to True.
Note:
Hooks are processedd in a LIFO order. | juraj-google-style |
def generate(cache_fn):
if (not os.path.exists(cache_fn)):
((print >> sys.stderr), ("Can't access `%s`!" % cache_fn))
sys.exit(1)
with SqliteDict(cache_fn) as db:
for item in _pick_keywords(db):
(yield item) | Go thru `cache_fn` and filter keywords. Store them in `keyword_list.json`.
Args:
cache_fn (str): Path to the file with cache.
Returns:
list: List of :class:`KeywordInfo` objects. | codesearchnet |
def definition_package(cls):
outer_definition = cls.message_definition()
if (not outer_definition):
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_package() | Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to. | codesearchnet |
def shift_time(start_time, mins) -> str:
s_time = pd.Timestamp(start_time)
e_time = (s_time + (np.sign(mins) * pd.Timedelta(f'00:{abs(mins)}:00')))
return e_time.strftime('%H:%M') | Shift start time by mins
Args:
start_time: start time in terms of HH:MM string
mins: number of minutes (+ / -)
Returns:
end time in terms of HH:MM string | codesearchnet |
def findall_operations_with_gate_type(self, gate_type: Type[T_DESIRED_GATE_TYPE]) -> Iterable[Tuple[(int, ops.GateOperation, T_DESIRED_GATE_TYPE)]]:
result = self.findall_operations((lambda operation: bool(ops.op_gate_of_type(operation, gate_type))))
for (index, op) in result:
gate_op = cast(ops.GateOpe... | Find the locations of all gate operations of a given type.
Args:
gate_type: The type of gate to find, e.g. XPowGate or
MeasurementGate.
Returns:
An iterator (index, operation, gate)'s for operations with the given
gate type. | codesearchnet |
def image_data_format():
return _IMAGE_DATA_FORMAT | Return the default image data format convention.
Returns:
A string, either `'channels_first'` or `'channels_last'`.
Example:
>>> keras.config.image_data_format()
'channels_last' | github-repos |
def create_write_transform(self) -> beam.PTransform[Chunk, Any]:
raise NotImplementedError(type(self)) | Creates a PTransform that writes embeddings to the vector database.
Returns:
A PTransform that accepts PCollection[Chunk] and writes the chunks'
embeddings and metadata to the configured vector database.
The transform should handle:
- Converting Chunk format to database schema
- Setting up database connection/client
-... | github-repos |
def load(cls, campaign_dir, ns_path=None, runner_type='Auto', optimized=True, check_repo=True):
if (ns_path is not None):
ns_path = os.path.abspath(ns_path)
campaign_dir = os.path.abspath(campaign_dir)
db = DatabaseManager.load(campaign_dir)
script = db.get_script()
runner = None
if (ns_... | Load an existing simulation campaign.
Note that specifying an ns-3 installation is not compulsory when using
this method: existing results will be available, but in order to run
additional simulations it will be necessary to specify a
SimulationRunner object, and assign it to the CampaignManager.
Args:
campaign_dir (... | codesearchnet |
def ReadFromFile(self, path):
self._definitions = {}
with open(path, 'r') as file_object:
for preset_definition in self._ReadPresetsFromFileObject(file_object):
self._definitions[preset_definition.name] = preset_definition | Reads parser and parser plugin presets from a file.
Args:
path (str): path of file that contains the the parser and parser plugin
presets configuration.
Raises:
MalformedPresetError: if one or more plugin preset definitions are
malformed. | codesearchnet |
def _invalid_triple_quote(self, quote, row, col=None):
self.add_message(
'invalid-triple-quote',
line=row,
args=(quote, TRIPLE_QUOTE_OPTS.get(self.config.triple_quote)),
**self.get_offset(col)
) | Add a message for an invalid triple quote.
Args:
quote: The quote characters that were found.
row: The row number the quote characters were found on.
col: The column the quote characters were found on. | juraj-google-style |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.