code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def buckets_list(self, projection='noAcl', max_results=0, page_token=None, project_id=None):
if (max_results == 0):
max_results = Api._MAX_RESULTS
args = {'project': (project_id if project_id else self._project_id), 'maxResults': max_results}
if (projection is not None):
args['projection'] =... | Issues a request to retrieve the list of buckets.
Args:
projection: the projection of the bucket information to retrieve.
max_results: an optional maximum number of objects to retrieve.
page_token: an optional token to continue the retrieval.
project_id: the project whose buckets should be listed.
Returns:
A parsed li... | codesearchnet |
def filter(self, versions, key=lambda x: x):
return [x for x in versions if self.check(key(x))] | Filter all of the versions in an iterable that match this version range
Args:
versions (iterable): An iterable of SemanticVersion objects
Returns:
list: A list of the SemanticVersion objects that matched this range | juraj-google-style |
def make_tar_stream(build_context, buffer):
tf = tarfile.TarFile(fileobj=buffer, mode='w')
for context_path, fileobj in build_context.items():
if getattr(fileobj, 'localpath', None) is not None:
tf.add(fileobj.localpath, arcname=context_path)
else:
tar_add_bytes(tf, ... | Write a tar stream of the build context to the provided buffer
Args:
build_context (Mapping[str, pyccc.FileReferenceBase]): dict mapping filenames to file references
buffer (io.BytesIO): writable binary mode buffer | juraj-google-style |
def remove_interceptor(self, name):
for (index, interceptor) in enumerate(self.interceptors):
matches = ((type(interceptor).__name__ == name) or (getattr(interceptor, 'name') == name))
if matches:
self.interceptors.pop(index)
return True
return False | Removes a specific interceptor by name.
Arguments:
name (str): interceptor name to disable.
Returns:
bool: `True` if the interceptor was disabled, otherwise `False`. | codesearchnet |
def deep_update(d, u):
for k, v in u.items():
if isinstance(v, Mapping):
d[k] = deep_update(d.get(k, {}), v)
elif isinstance(v, list):
existing_elements = d.get(k, [])
d[k] = existing_elements + [ele for ele in v if ele not in existing_elements]
else:
d[k] = v
return d | Deeply updates a dictionary. List values are concatenated.
Args:
d (dict): First dictionary which will be updated
u (dict): Second dictionary use to extend the first one
Returns:
dict: The merge dictionary | juraj-google-style |
def reset_score(student_id, course_id, item_id, clear_state=False, emit_signal=True):
try:
student_item = StudentItem.objects.get(student_id=student_id, course_id=course_id, item_id=item_id)
except StudentItem.DoesNotExist:
return
try:
score = Score.create_reset_score(student_item)
... | Reset scores for a specific student on a specific problem.
Note: this does *not* delete `Score` models from the database,
since these are immutable. It simply creates a new score with
the "reset" flag set to True.
Args:
student_id (unicode): The ID of the student for whom to reset scores.
course_id (unicode): The ID... | codesearchnet |
def _get_default_help_message(func, args, description=None, args_help=None):
if description is None:
description = "Argument parsing for %s" % func.__name__
args_help = args_help or {}
for argument in [arg_name for arg_name in args
if arg_name not in args_help]:
... | Create a default description for the parser and help message for the
agurments if they are missing.
Args:
func: the method we are creating a parser for
args: the argument names of the method
description: a potentially existing description created from the
function docstring
args_help: a dict {arg_name: help} with pote... | juraj-google-style |
def _force_close(self, file_length=None):
if file_length is None:
file_length = self._get_offset_from_gcs() + 1
self._send_data('', 0, file_length) | Close this buffer on file_length.
Finalize this upload immediately on file_length.
Contents that are still in memory will not be uploaded.
This is a utility method that does not modify self.
Args:
file_length: file length. Must match what has been uploaded. If None,
it will be queried from GCS. | juraj-google-style |
def compress(d, output, fmt='gz', logger=None):
if not logger:
logger = log.get_logger('s3')
if type(d) not in [list, tuple]:
d = [d, ]
d = [os.path.expanduser(_d) for _d in d]
print_compress_info(d, output, compress, logger)
if fmt.lower() == 'none':
fmt = ''
elif f... | Creates a compressed/uncompressed tar file.
Args:
d: Can be one of three things:
1. the path to a single file, as a string
2. the path to a single directory, as a string
3. an iterable of file or directory paths
output (str): Output file path.
fmt: Compression method. Options are ``'gz'`` (gzip),
``'bz2'`` (bzip... | juraj-google-style |
def show(self, progress, msg=None):
if (self.whole_tag.style.display == 'none'):
self.whole_tag.style.display = 'block'
if (isinstance(progress, int) or isinstance(progress, float)):
percentage = progress
else:
percentage = self.__class__._compute_percentage(progress)
self.tag.cl... | Show the progress bar and set it to `progress` tuple or value.
Args:
progress (tuple / int / float): Tuple ``(done / len(all))`` or
the direct percentage value as int / float.
msg (str, default None): Alternative background description. | codesearchnet |
def related(self, *, exclude_self=False):
manager = type(self)._default_manager
queryset = manager.related_to(self)
if exclude_self:
queryset = queryset.exclude(id=self.id)
return queryset | Get a QuerySet for all trigger log objects for the same connected model.
Args:
exclude_self (bool): Whether to exclude this log object from the result list | codesearchnet |
def __init__(self, message, callback, color=''):
super(ConsolePrompt, self).__init__()
self.daemon = True
self._message = message
self._callback = callback
self._color = color
self._stop_event = threading.Event()
self._answered = False | Initializes a ConsolePrompt.
Args:
message: A string to be presented to the user.
callback: A function to be called with the response string.
color: An ANSI color code, or the empty string. | juraj-google-style |
def CreateSignatureScanner(cls, specification_store):
scanner_object = pysigscan.scanner()
for format_specification in specification_store.specifications:
for signature in format_specification.signatures:
pattern_offset = signature.offset
if pattern_offset is None:
signatu... | Creates a signature scanner for format specifications with signatures.
Args:
specification_store (FormatSpecificationStore): format specifications
with signatures.
Returns:
pysigscan.scanner: signature scanner. | juraj-google-style |
def from_text_vision_configs(cls, text_config: AlignTextConfig, vision_config: AlignVisionConfig, **kwargs):
return cls(text_config=text_config.to_dict(), vision_config=vision_config.to_dict(), **kwargs) | Instantiate a [`AlignConfig`] (or a derived class) from align text model configuration and align vision model
configuration.
Returns:
[`AlignConfig`]: An instance of a configuration object | github-repos |
def str2dict_values(str_in):
tmp_dict = str2dict(str_in)
if (tmp_dict is None):
return None
return [tmp_dict[key] for key in sorted((k for k in tmp_dict))] | Extracts the values from a string that represents a dict and returns them
sorted by key.
Args:
str_in (string) that contains python dict
Returns:
(list) with values or None if no valid dict was found
Raises:
- | codesearchnet |
def to_zmat(self, buf=None, upper_triangle=True, implicit_index=True, float_format='{:.6f}'.format, overwrite=True, header=False):
out = self.copy()
if implicit_index:
out = out.change_numbering(new_index=range(1, (len(self) + 1)))
if (not upper_triangle):
out = out._remove_upper_triangle()
... | Write zmat-file
Args:
buf (str): StringIO-like, optional buffer to write to
implicit_index (bool): If implicit_index is set, the zmat indexing
is changed to ``range(1, len(self) + 1)``.
Using :meth:`~chemcoord.Zmat.change_numbering`
Besides the index is omitted while writing which means,
that the index is given
implic... | codesearchnet |
def create_iam_role(self, account):
try:
iam = self.session.client('iam')
trust = get_template('vpc_flow_logs_iam_role_trust.json').render()
policy = get_template('vpc_flow_logs_role_policy.json').render()
newrole = iam.create_role(Path='/', RoleName=self.role_name, AssumeRolePolicyD... | Create a new IAM role. Returns the ARN of the newly created role
Args:
account (:obj:`Account`): Account where to create the IAM role
Returns:
`str` | codesearchnet |
def load_obj(fn):
position = [np.zeros(3, dtype=np.float32)]
normal = [np.zeros(3, dtype=np.float32)]
uv = [np.zeros(2, dtype=np.float32)]
tuple2idx = OrderedDict()
trinagle_indices = []
input_file = open(fn) if isinstance(fn, str) else fn
for line in input_file:
line = line.strip()
if no... | Load 3d mesh form .obj' file.
Args:
fn: Input file name or file-like object.
Returns:
dictionary with the following keys (some of which may be missing):
position: np.float32, (n, 3) array, vertex positions
uv: np.float32, (n, 2) array, vertex uv coordinates
normal: np.float32, (n, 3) array, vertex uv normals
face: np... | juraj-google-style |
def comparator(objective):
if isinstance(objective, Minimum):
return lambda l, r: l < r
else:
return lambda l, r: l > r | Higher order function creating a compare function for objectives.
Args:
objective (cipy.algorithms.core.Objective): The objective to create a
compare for.
Returns:
callable: Function accepting two objectives to compare.
Examples:
>>> a = Minimum(0.1)
>>> b = Minimum(0.2)
>>> compare = comparator(a)
>>> comparison = ... | juraj-google-style |
def highway_core_with_recurrent_dropout(
hidden_size,
num_layers,
keep_prob=0.5,
**kwargs):
core = HighwayCore(hidden_size, num_layers, **kwargs)
return RecurrentDropoutWrapper(core, keep_prob), core | Highway core with recurrent dropout.
Args:
hidden_size: (int) Hidden size dimensionality.
num_layers: (int) Number of highway layers.
keep_prob: the probability to keep an entry when applying dropout.
**kwargs: Extra keyword arguments to pass to the highway core.
Returns:
A tuple (train_core, test_core) where train_c... | juraj-google-style |
def make_one_shot_iterator(self) -> Union[iterator_ops.Iterator, iterator_ops.OwnedIterator]:
return self._make_one_shot_iterator() | Creates an iterator for elements of this dataset.
Note: The returned iterator will be initialized automatically.
A "one-shot" iterator does not currently support re-initialization. For
that see `make_initializable_iterator`.
Example:
```python
# Building graph ...
dataset = ...
next_value = dataset.make_one_shot_ite... | github-repos |
def compute_attr_metadata(self, own_attrs: list[Attribute], decorator: str) -> Sequence[Attribute]:
assert decorator in _METADATA_KEYS, f'No metadata key for {decorator}'
key = _METADATA_KEYS[decorator]
attrs = self._get_attrs_from_mro(own_attrs, key)
self.metadata[key] = attrs
return attrs | Sets combined metadata based on inherited and own attrs.
Args:
own_attrs: The attrs defined explicitly in this class
decorator: The fully qualified decorator name
Returns:
The list of combined attrs. | github-repos |
def list_keyvaults(access_token, subscription_id, rgname):
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourcegroups/', rgname, '/providers/Microsoft.KeyVault/vaults', '?api-version=', KEYVAULT_API])
return do_get_next(endpoint, access_token) | Lists key vaults in the named resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. 200 OK. | codesearchnet |
def dummy_inputs(self):
if self.config.use_lang_emb and self.config.n_langs > 1:
return {'input_ids': tf.constant(MULTIPLE_CHOICE_DUMMY_INPUTS, dtype=tf.int32), 'langs': tf.constant(MULTIPLE_CHOICE_DUMMY_INPUTS, dtype=tf.int32)}
else:
return {'input_ids': tf.constant(MULTIPLE_CHOICE_DUMMY_INPUTS... | Dummy inputs to build the network.
Returns:
tf.Tensor with dummy inputs | github-repos |
def shape_list(tensor: Union[tf.Tensor, np.ndarray]) -> list[int]:
if isinstance(tensor, np.ndarray):
return list(tensor.shape)
dynamic = tf.shape(tensor)
if tensor.shape == tf.TensorShape(None):
return dynamic
static = tensor.shape.as_list()
return [dynamic[i] if s is None else s fo... | Deal with dynamic shape in tensorflow cleanly.
Args:
tensor (`tf.Tensor` or `np.ndarray`): The tensor we want the shape of.
Returns:
`List[int]`: The shape of the tensor as a list. | github-repos |
def live(self):
session = self._session
url = '{}/live'.format(self._base_url)
supported_params = frozenset(['filter[port]'])
params = {k: v for (k, v) in iteritems(self._params) if (k in supported_params)}
return session.live(url, self._datapoint_class, {'is_aggregate': self._is_aggregate}, params=... | Get a live stream of timeseries readings.
This returns an Iterable over a live stream of readings. Note
that the result will need to be closed since the system can
not tell when you'll be done with it.
You can either call ``close`` on the endpoint when you're or
use the context management facilities of the endpoint.
... | codesearchnet |
def record_request_completion(self, created_time: float, request_id: str) -> None:
if not _has_opentelemetry:
return
latency_ms = (time.time() - created_time) * 1000.0
try:
self.request_latency_histogram.record(latency_ms)
logger.debug(f'Recorded request completion for {request_id}: ... | Record metrics about a completed request.
Args:
created_time: The time the request was created
request_id: The ID of the request | github-repos |
def disqualified(self, num, natural=True, **kwargs):
search_type = ('natural' if natural else 'corporate')
baseuri = (self._BASE_URI + 'disqualified-officers/{}/{}'.format(search_type, num))
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res | Search for disqualified officers by officer ID.
Searches for natural disqualifications by default. Specify
natural=False to search for corporate disqualifications.
Args:
num (str): Company number to search on.
natural (Optional[bool]): Natural or corporate search
kwargs (dict): additional keywords passed into
request... | codesearchnet |
def _read_addr_resolve(self, length, htype):
if (htype == 1):
_byte = self._read_fileng(6)
_addr = '-'.join(textwrap.wrap(_byte.hex(), 2))
else:
_addr = self._read_fileng(length)
return _addr | Resolve MAC address according to protocol.
Positional arguments:
* length -- int, hardware address length
* htype -- int, hardware type
Returns:
* str -- MAC address | codesearchnet |
def _has_extras(ctx):
if (not ctx.index.entries):
return False
return ((ctx.data_offset > 8) and (ctx.data_offset > (ctx.signatures.offset_end + 8))) | Determine if a MAR file has an additional section block or not.
It does this by looking at where file data starts in the file. If this
starts immediately after the signature data, then no additional sections are present.
Args:
ctx (context): construct parsing context
Returns:
True if the MAR file has an additional s... | codesearchnet |
def resize(self, image: np.ndarray, size: Dict[str, int], resample: PILImageResampling=PILImageResampling.BILINEAR, data_format: Optional[Union[str, ChannelDimension]]=None, input_data_format: Optional[Union[str, ChannelDimension]]=None, **kwargs) -> np.ndarray:
size = get_size_dict(size, default_to_square=False)
... | Resize an image.
Args:
image (`np.ndarray`):
Image to resize.
size (`Dict[str, int]`):
Size of the output image. If `size` is of the form `{"height": h, "width": w}`, the output image will
have the size `(h, w)`. If `size` is of the form `{"longest_edge": s}`, the output image will have its
longest edge of length `s` ... | github-repos |
def with_wget(url_dict=None, target_file=None):
def wget_decorator(cls):
def download_impl(self):
'Download the selected version from the url_dict value.'
t_file = (target_file if target_file else self.SRC_FILE)
t_version = url_dict[self.version]
Wget(t_vers... | Decorate a project class with wget-based version information.
This adds two attributes to a project class:
- A `versions` method that returns a list of available versions
for this project.
- A `repository` attribute that provides a repository string to
download from later.
We use the `git rev-list` subcommand to list ... | codesearchnet |
def _remove_hdxobject(self, objlist, obj, matchon='id', delete=False):
if (objlist is None):
return False
if isinstance(obj, six.string_types):
obj_id = obj
elif (isinstance(obj, dict) or isinstance(obj, HDXObject)):
obj_id = obj.get(matchon)
else:
raise HDXError('Type of... | Remove an HDX object from a list within the parent HDX object
Args:
objlist (List[Union[T <= HDXObject,Dict]]): list of HDX objects
obj (Union[T <= HDXObject,Dict,str]): Either an id or hdx object metadata either from an HDX object or a dictionary
matchon (str): Field to match on. Defaults to id.
delete (bool): Whethe... | codesearchnet |
def put(self):
return self.manager.put(id=self.id, name=self.name, description=self.description, whitelisted_container_task_types=self.whitelisted_container_task_types, whitelisted_executable_task_types=self.whitelisted_executable_task_types) | Updates this task whitelist on the saltant server.
Returns:
:class:`saltant.models.task_whitelist.TaskWhitelist`:
A task whitelist model instance representing the task
whitelist just updated. | codesearchnet |
def port_create_vlan(br, port, id, internal=False):
interfaces = __salt__['network.interfaces']()
if (not (0 <= id <= 4095)):
return False
elif (not bridge_exists(br)):
return False
elif ((not internal) and (port not in interfaces)):
return False
elif (port in port_list(br)):... | Isolate VM traffic using VLANs.
Args:
br: A string - bridge name.
port: A string - port name.
id: An integer in the valid range 0 to 4095 (inclusive), name of VLAN.
internal: A boolean to create an internal interface if one does not exist.
Returns:
True on success, else False.
.. versionadded:: 2016.3.0
CLI Example... | codesearchnet |
def configs(self, filters=None):
url = self._url('/configs')
params = {}
if filters:
params['filters'] = utils.convert_filters(filters)
return self._result(self._get(url, params=params), True) | List configs
Args:
filters (dict): A map of filters to process on the configs
list. Available filters: ``names``
Returns (list): A list of configs | juraj-google-style |
def _GenerateNotices(self):
items = []
for e in self._notices:
d = e.GetDictToFormat()
if ('url' in d.keys()):
d['url'] = ('<a href="%(url)s">%(url)s</a>' % d)
items.append(('<li class="notice">%s</li>' % e.FormatProblem(d).replace('\n', '<br>')))
if items:
return... | Generate a summary of any notices.
Returns:
The generated HTML as a string. | codesearchnet |
def __getDecision(self, result, multiple=False, **values):
values = self.__toString(values)
__valueKeyWithHeaderIndex = self.__valueKeyWithHeaderIndex(values)
errors = self.__checkDecisionParameters(result, **values)
if errors:
view.Tli.showErrors('ParametersError', errors)
machingData = {}
for lin... | The main method for decision picking.
Args:
result (array of str): What values you want to get in return array.
multiple (bolean, optional): Do you want multiple result if it finds many maching decisions.
**values (dict): What should finder look for, (headerString : value).
Returns: Maped result values with finded el... | juraj-google-style |
def __init__(self, speaker, audio_format, key, lang="ru-RU", **kwargs):
self.__params = {
"speaker": speaker,
"format": audio_format,
"key": key,
"lang": lang,
}
self.__params.update(kwargs)
self._data = None | Class for generate of speech.
Args:
speaker: Speaker.
audio_format: Audio file format.
key: API-key for Yandex speech kit.
lang (optional): Language. Defaults to "ru-RU".
emotion (optional): The color of the voice. Defaults to "normal".
speed (optional): Speech tempo. Defaults to 1.0. | juraj-google-style |
def get_registered_object(name, custom_objects=None, module_objects=None):
custom_objects_scope_dict = global_state.get_global_attribute('custom_objects_scope_dict', {})
if name in custom_objects_scope_dict:
return custom_objects_scope_dict[name]
elif name in GLOBAL_CUSTOM_OBJECTS:
return GL... | Returns the class associated with `name` if it is registered with Keras.
This function is part of the Keras serialization and deserialization
framework. It maps strings to the objects associated with them for
serialization/deserialization.
Example:
```python
def from_config(cls, config, custom_objects=None):
if 'my_... | github-repos |
def _grouper(iterable, n, fillvalue=0):
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args) | Collect data into fixed-length chunks or blocks.
Args:
n (int): The size of the chunk.
fillvalue (int): The fill value.
Returns:
iterator: An iterator over the chunks. | juraj-google-style |
def nsarg_completions(
completion_text: str,
entity_types: list,
bel_spec: BELSpec,
namespace: str,
species_id: str,
bel_fmt: str,
size: int,
):
minimal_nsarg_completion_len = 1
species = [species_id]
namespaces = [namespace]
replace_list = []
if len(completion_te... | Namespace completions
Args:
completion_text
entity_types: used to filter namespace search results
bel_spec: used to search default namespaces
namespace: used to filter namespace search results
species_id: used to filter namespace search results
bel_fmt: used to select full name or abbrev for default namespaces
size: h... | juraj-google-style |
def setHolidayDates(self, cmd_dict=None, password="00000000"):
result = False
self.setContext("setHolidayDates")
if not cmd_dict:
cmd_dict = self.m_holiday_date_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on set... | Serial call to set holiday list.
If a buffer dictionary is not supplied, the method will use
the class object buffer populated with assignHolidayDate.
Args:
cmd_dict (dict): Optional dictionary of holidays.
password (str): Optional password.
Returns:
bool: True on completion. | juraj-google-style |
def compare(expr, value, regex_expr=False):
if expr == value:
return True
negate = False
if isinstance(expr, str):
negate = expr.startswith(NEGATE)
expr = strip_negate(expr) if negate else expr
try:
test(expr, value, regex_expr=regex_expr)
ex... | Compares an string or regular expression againast a given value.
Arguments:
expr (str|regex): string or regular expression value to compare.
value (str): value to compare against to.
regex_expr (bool, optional): enables string based regex matching.
Raises:
AssertionError: in case of assertion error.
Returns:
bool | juraj-google-style |
def get_db_row(db, start, size):
type_ = snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte]
data = client.db_read(db, start, type_, size)
return data | Here you see and example of readying out a part of a DB
Args:
db (int): The db to use
start (int): The index of where to start in db data
size (int): The size of the db data to read | codesearchnet |
def _InternalUnpackAny(msg):
type_url = msg.type_url
db = symbol_database.Default()
if not type_url:
return None
type_name = type_url.split("/")[-1]
descriptor = db.pool.FindMessageTypeByName(type_name)
if descriptor is None:
return None
message_class = db.GetPrototype(descriptor)
m... | Unpacks Any message and returns the unpacked message.
This internal method is differnt from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
Args:
msg: An Any message to be unpacked.
... | juraj-google-style |
def load_morfessor_model(lang="en", version="2"):
src_dir = "morph{}".format(version)
p = locate_resource(src_dir, lang)
file_handler = _open(p)
tmp_file_ = NamedTemporaryFile(delete=False)
tmp_file_.write(file_handler.read())
tmp_file_.close()
io = morfessor.MorfessorIO()
model = io.read_any_model(t... | Return a morfessor model for `lang` and of version `version`
Args:
lang (string): language code.
version (string): version of the parameters to be used. | juraj-google-style |
def write_file(self, file_name, vasp4_compatible=False):
def _print_fortran_float(f):
'\n Fortran codes print floats with a leading zero in scientific\n notation. When writing CHGCAR files, we adopt this convention\n to ensure written CHGCAR files are byte-to-byte identical... | Write the VolumetricData object to a vasp compatible file.
Args:
file_name (str): Path to a file
vasp4_compatible (bool): True if the format is vasp4 compatible | codesearchnet |
def substitute_symbol_table(table, version, max_id):
if (not table.table_type.is_shared):
raise ValueError('Symbol table to substitute from must be a shared table')
if (version <= 0):
raise ValueError(('Version must be grater than or equal to 1: %s' % version))
if (max_id < 0):
raise... | Substitutes a given shared symbol table for another version.
* If the given table has **more** symbols than the requested substitute, then the generated
symbol table will be a subset of the given table.
* If the given table has **less** symbols than the requested substitute, then the generated
symbol table will have s... | codesearchnet |
def add_entry(self, path_object):
if ((not is_root()) and (not (self.st_mode & PERM_WRITE)) and (not self.filesystem.is_windows_fs)):
exception = (IOError if IS_PY2 else OSError)
raise exception(errno.EACCES, 'Permission Denied', self.path)
if (path_object.name in self.contents):
self.fi... | Adds a child FakeFile to this directory.
Args:
path_object: FakeFile instance to add as a child of this directory.
Raises:
OSError: if the directory has no write permission (Posix only)
OSError: if the file or directory to be added already exists | codesearchnet |
def nested_update(d, u):
for (k, v) in list(u.items()):
if isinstance(v, collections.Mapping):
r = nested_update(d.get(k, {}), v)
d[k] = r
else:
d[k] = u[k]
return d | Merge two nested dicts.
Nested dicts are sometimes used for representing various recursive structures. When
updating such a structure, it may be convenient to present the updated data as a
corresponding recursive structure. This function will then apply the update.
Args:
d: dict
dict that will be updated in-place. Ma... | codesearchnet |
def __init__(
self, cipher_mode=None, initialization_vector=None, key=None, **kwargs):
if not key:
raise ValueError('Missing key.')
cipher_mode = self.ENCRYPTION_MODES.get(cipher_mode, None)
if cipher_mode is None:
raise ValueError('Unsupported cipher mode: {0!s}'.format(cipher_mode)... | Initializes a decrypter.
Args:
cipher_mode (Optional[str]): cipher mode.
initialization_vector (Optional[bytes]): initialization vector.
key (Optional[bytes]): key.
kwargs (dict): keyword arguments depending on the decrypter.
Raises:
ValueError: when key is not set, block cipher mode is not supported,
or initializati... | juraj-google-style |
def _format_ase2clusgeo(obj, all_atomtypes=None):
totalAN = len(obj)
if (all_atomtypes is not None):
atomtype_set = set(all_atomtypes)
else:
atomtype_set = set(obj.get_atomic_numbers())
atomtype_lst = np.sort(list(atomtype_set))
n_atoms_per_type_lst = []
pos_lst = []
for atom... | Takes an ase Atoms object and returns numpy arrays and integers
which are read by the internal clusgeo. Apos is currently a flattened
out numpy array
Args:
obj():
all_atomtypes():
sort(): | codesearchnet |
def __init__(self, *others):
selectors = list()
heads = collections.defaultdict(set)
for other in others:
if isinstance(other, MultiFieldSelector):
for head, tail in other.heads.iteritems():
heads[head].add(tail)
elif isinstanc... | Returns a MultiFieldSelector based on combining the passed-in
FieldSelector and MultiFieldSelector objects.
args:
``*others=``\ *FieldSelector*\ \|\ *iterable*
Each argument is interpreted as either a FieldSelector, or a
FieldSelector constructor. | juraj-google-style |
def modify_module(channel, module_name, module_state):
gui = ui_embed.UI(channel, '{} updated'.format(module_name), '{} is now {}'.format(module_name, ('activated' if module_state else 'deactivated')), modulename=modulename)
return gui | Creates an embed UI containing the module modified message
Args:
channel (discord.Channel): The Discord channel to bind the embed to
module_name (str): The name of the module that was updated
module_state (bool): The current state of the module
Returns:
embed: The created embed | codesearchnet |
def to_sigproc_keyword(keyword, value=None):
keyword = bytes(keyword)
if value is None:
return np.int32(len(keyword)).tostring() + keyword
else:
dtype = header_keyword_types[keyword]
dtype_to_type = {b'<l' : np.int32,
b'str' : str,
... | Generate a serialized string for a sigproc keyword:value pair
If value=None, just the keyword will be written with no payload.
Data type is inferred by keyword name (via a lookup table)
Args:
keyword (str): Keyword to write
value (None, float, str, double or angle): value to write to file
Returns:
value_str (str): s... | juraj-google-style |
def run(self, blocking: bool=True):
if not self._run_control_loop:
err = ("`run` called, but not using the internal control loop. Use"
" `start` instead")
raise RuntimeError(err)
self._setup()
self._heartbeat_reciever.start()
if bloc... | Run the internal control loop.
Args:
blocking (bool): Defaults to `True`. If set to `False`, will
intialize a thread to run the control loop.
Raises:
RuntimeError: If called and not using the internal control loop
via `self._run_control_loop`, set in the intializer of the
class | juraj-google-style |
def has_cwd(state, dir, incorrect_msg='Your current working directory should be `{{dir}}`. Use `cd {{dir}}` to navigate there.'):
expr = "[[ $PWD == '{}' ]]".format(dir)
_msg = state.build_message(incorrect_msg, fmt_kwargs={'dir': dir})
has_expr_exit_code(state, expr, output='0', incorrect_msg=_msg)
ret... | Check whether the student is in the expected directory.
This check is typically used before using ``has_expr_output()``
to make sure the student didn't navigate somewhere else.
Args:
state: State instance describing student and solution code. Can be omitted if used with ``Ex()``.
dir: Directory that the student shoul... | codesearchnet |
def call_for_each_replica(self, fn, args=(), kwargs=None):
distribute_lib._require_cross_replica_or_default_context_extended(self)
if kwargs is None:
kwargs = {}
map_fn = functools.partial(dtensor_util.convert_inputs_to_dtensor, mesh=self._mesh)
d_args = nest.map_structure(map_fn, args)
d_kw... | Run `fn` once per replica.
This is a method that expected by the strategy base class in its `run()`.
Args:
fn: function to run (will be run once per replica).
args: Tuple or list with positional arguments for `fn`.
kwargs: Dict with keyword arguments for `fn`.
Returns:
Merged return value of `fn` across all replicas... | github-repos |
def torque_on(self):
data = []
data.append(10)
data.append(self.servoid)
data.append(RAM_WRITE_REQ)
data.append(TORQUE_CONTROL_RAM)
data.append(1)
data.append(96)
send_data(data) | Enable the torques of Herkulex
In this mode, position control and velocity control
will work.
Args:
none | codesearchnet |
def dump_session_params(path):
var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
var.extend(tf.get_collection(tf.GraphKeys.MODEL_VARIABLES))
assert len(set(var)) == len(var), "TRAINABLE and MODEL variables have duplication!"
gvars = set([k.name for k in tf.global_variables()])
... | Dump value of all TRAINABLE + MODEL variables to a dict, and save as
npz format (loadable by :func:`sessinit.get_model_loader`).
Args:
path(str): the file name to save the parameters. Must ends with npz. | juraj-google-style |
def create(self, resource):
return self.service.create(
resource, self.url_prefix, self.auth, self.session,
self.session_send_opts) | Create the given resource.
Args:
resource (intern.resource.boss.BossResource): Create a data model object with attributes matching those of the resource.
Returns:
(intern.resource.boss.BossResource): Returns resource of type requested on success.
Raises:
requests.HTTPError on failure. | juraj-google-style |
def print_info(self, buf=None, format_=FileFormat.yaml,
skip_attributes=None, include_release=False):
data = self.validated_data().copy()
data.pop("config", None)
if self.config:
if isinstance(self, Package):
... | Print the contents of the package.
Args:
buf (file-like object): Stream to write to.
format_ (`FileFormat`): Format to write in.
skip_attributes (list of str): List of attributes to not print.
include_release (bool): If True, include release-related attributes,
such as 'timestamp' and 'changelog' | juraj-google-style |
def testBasic(self, count, batch_size, drop_remainder, num_parallel_calls):
components = (np.arange(7), np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis], np.array(37.0) * np.arange(7))
def _map_fn(x, y, z):
return (math_ops.square(x), math_ops.square(y), math_ops.square(z))
dataset = dataset_ops... | Tests the batch dataset logic for various input configurations.
Args:
count: the number of input elements
batch_size: the batch size
drop_remainder: whether a smaller batch size should be produced if batch
size does not divide number of inputs evenly
num_parallel_calls: the number batches to process asynchronously in
... | github-repos |
def process_openxml_file(filename: str,
print_good: bool,
delete_if_bad: bool) -> None:
print_bad = not print_good
try:
file_good = is_openxml_good(filename)
file_bad = not file_good
if (print_good and file_good) or (print_bad and fi... | Prints the filename of, or deletes, an OpenXML file depending on whether
it is corrupt or not.
Args:
filename: filename to check
print_good: if ``True``, then prints the filename if the file
appears good.
delete_if_bad: if ``True``, then deletes the file if the file
appears corrupt. | juraj-google-style |
def ensure_tensor_on_device(self, **inputs):
return self._ensure_tensor_on_device(inputs, self.device) | Ensure PyTorch tensors are on the specified device.
Args:
inputs (keyword arguments that should be `torch.Tensor`, the rest is ignored):
The tensors to place on `self.device`.
Recursive on lists **only**.
Return:
`Dict[str, torch.Tensor]`: The same as `inputs` but on the proper device. | github-repos |
def insert(self, **fields):
if self.conflict_target or self.conflict_action:
compiler = self._build_insert_compiler([fields])
rows = compiler.execute_sql(return_id=True)
pk_field_name = self.model._meta.pk.name
return rows[0][pk_field_name]
... | Creates a new record in the database.
This allows specifying custom conflict behavior using .on_conflict().
If no special behavior was specified, this uses the normal Django create(..)
Arguments:
fields:
The fields of the row to create.
Returns:
The primary key of the record that was created. | juraj-google-style |
def find_next(self, *strings, **kwargs):
start = kwargs.pop('start', None)
keys_only = kwargs.pop('keys_only', False)
staht = (start if (start is not None) else self.cursor)
for (start, stop) in [(staht, len(self)), (0, staht)]:
for i in range(start, stop):
for string in strings:
... | From the editor's current cursor position find the next instance of the
given string.
Args:
strings (iterable): String or strings to search for
Returns:
tup (tuple): Tuple of cursor position and line or None if not found
Note:
This function cycles the entire editor (i.e. cursor to length of
editor to zero and back t... | codesearchnet |
def get_qubit_los(self, user_lo_config):
try:
_q_los = self.default_qubit_los.copy()
except KeyError:
raise PulseError('Qubit default frequencies not exist.')
for channel, lo_freq in user_lo_config.qubit_lo_dict().items():
_q_los[channel.index] = lo_... | Embed default qubit LO frequencies from backend and format them to list object.
If configured lo frequency is the same as default, this method returns `None`.
Args:
user_lo_config (LoConfig): A dictionary of LOs to format.
Returns:
list: A list of qubit LOs.
Raises:
PulseError: when LO frequencies are missing. | juraj-google-style |
def GetSysFeeAmountByHeight(self, height):
hash = self.GetBlockHash(height)
return self.GetSysFeeAmount(hash) | Get the system fee for the specified block.
Args:
height (int): block height.
Returns:
int: | codesearchnet |
def __init__(self, orig_image, dpi, save_image):
self._shreds = None
self.orig_img = orig_image
self.save_image = save_image
self._fg_mask = None
self._shreds = None
if dpi is None:
self.res_x, self.res_y = self._guess_dpi()
else:
... | Initializes a Sheet instance.
Args:
orig_image: cv.Mat instance with the original sheet image.
dpi: optional (x resolution, y resolution) tuple or None.
If set to None, will try to guess dpi.
save_image: A callback to save debug images with args (name, img) | juraj-google-style |
def getindex(self, child, recursive=True, ignore=True):
for (i, c) in enumerate(self.data):
if (c is child):
return i
if recursive:
for (i, c) in enumerate(self.data):
if (ignore is True):
try:
if (not c.auth):
c... | Get the index at which an element occurs, recursive by default!
Returns:
int | codesearchnet |
def ParseContactRow(self, parser_mediator, query, row, **unused_kwargs):
query_hash = hash(query)
event_data = TangoAndroidContactEventData()
first_name = self._GetRowValue(query_hash, row, 'first_name')
try:
decoded_text = base64_decode(first_name)
event_data.first_name = codecs.deco... | Parses a contact row from the database.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row resulting from query. | juraj-google-style |
def _SetupValuesForDevice(self, tensor_in_sizes, filter_in_sizes, dilations, strides, padding, data_format, dtype, use_gpu, op_name):
x1 = self._CreateNumpyTensor(tensor_in_sizes)
x2 = self._CreateNumpyTensor(filter_in_sizes)
with test_util.device(use_gpu):
t1 = constant_op.constant(x1, shape=tensor... | Verifies the output values of the convolution function.
Args:
tensor_in_sizes: Input tensor dimensions in [batch, input_rows,
input_cols, input_depth].
filter_in_sizes: Filter tensor dimensions in [kernel_rows, kernel_cols,
input_depth, output_depth].
dilations: Dilated rate: [col_dilation, row_dilation]
strides: Stri... | github-repos |
def run_inference(self, batch: Sequence[ExampleT], model: ModelT, inference_args: Optional[dict[str, Any]]=None) -> Iterable[PredictionT]:
while self.throttler.throttle_request(time.time() * _MILLISECOND_TO_SECOND):
self.logger.info('Delaying request for %d seconds due to previous failures', self.throttle_d... | Runs inferences on a batch of examples. Calls a remote model for
predictions and will retry if a retryable exception is raised.
Args:
batch: A sequence of examples or features.
model: The model used to make inferences.
inference_args: Extra arguments for models whose inference call requires
extra parameters.
Returns:... | github-repos |
def version(msg):
tc = typecode(msg)
if tc != 31:
raise RuntimeError("%s: Not a status operation message, expecting TC = 31" % msg)
msgbin = common.hex2bin(msg)
version = common.bin2int(msgbin[72:75])
return version | ADS-B Version
Args:
msg (string): 28 bytes hexadecimal message string, TC = 31
Returns:
int: version number | juraj-google-style |
def ConfigureLogging(
debug_output=False, filename=None, mode='w', quiet_mode=False):
for handler in logging.root.handlers:
logging.root.removeHandler(handler)
logger = logging.getLogger()
if filename and filename.endswith('.gz'):
handler = CompressedFileHandler(filename, mode=mode)
elif ... | Configures the logging root logger.
Args:
debug_output (Optional[bool]): True if the logging should include debug
output.
filename (Optional[str]): log filename.
mode (Optional[str]): log file access mode.
quiet_mode (Optional[bool]): True if the logging should not include
information output. Note that debug_output ta... | juraj-google-style |
def mask_from_embedding(emb):
return weights_nonzero(tf.reduce_sum(tf.abs(emb), axis=3, keepdims=True)) | Input embeddings -> padding mask.
We have hacked symbol_modality to return all-zero embeddings for padding.
Returns a mask with 0.0 in the padding positions and 1.0 elsewhere.
Args:
emb: a Tensor with shape [batch, width, height, depth].
Returns:
a 0.0/1.0 Tensor with shape [batch, width, height, 1]. | juraj-google-style |
def grab_data(self, f_start=None, f_stop=None,t_start=None, t_stop=None, if_id=0):
self.freqs = self.populate_freqs()
self.timestamps = self.populate_timestamps()
if f_start is None:
f_start = self.freqs[0]
if f_stop is None:
f_stop = self.freqs[-1]
... | Extract a portion of data by frequency range.
Args:
f_start (float): start frequency in MHz
f_stop (float): stop frequency in MHz
if_id (int): IF input identification (req. when multiple IFs in file)
Returns:
(freqs, data) (np.arrays): frequency axis in MHz and data subset | juraj-google-style |
def get_svg_layers(svg_sources):
layers = []
width, height = None, None
def extract_length(attr):
'Extract length in pixels.'
match = CRE_MM_LENGTH.match(attr)
if match:
return INKSCAPE_PPmm.magnitude * float(match.group('length'))
else:
... | Collect layers from input svg sources.
Args:
svg_sources (list) : A list of file-like objects, each containing
one or more XML layers.
Returns
-------
(width, height), layers : (int, int), list
The first item in the tuple is the shape of the largest layer, and the
second item is a list of ``Element`` objects (from :... | juraj-google-style |
def generate_sb(date: datetime.datetime, project: str, programme_block: str) -> dict:
date = date.strftime('%Y%m%d')
instance_id = randint(0, 9999)
sb_id = 'SB-{}-{}-{:04d}'.format(date, project, instance_id)
return dict(id=sb_id, project=project, programme_block=programme_block) | Generate a Scheduling Block data object.
Args:
date (datetime.datetime): UTC date of the SBI
project (str): Project Name
programme_block (str): Programme
Returns:
str, Scheduling Block Instance (SBI) ID. | codesearchnet |
def eventFilter(self, object, event):
if (object is self.tree_scripts):
if (event.type() == QtCore.QEvent.ChildAdded):
item = self.tree_scripts.selectedItems()[0]
if not isinstance(item.value, Script):
print('ONLY... | TEMPORARY / UNDER DEVELOPMENT
THIS IS TO ALLOW COPYING OF PARAMETERS VIA DRAP AND DROP
Args:
object:
event:
Returns: | juraj-google-style |
def _AddToTree(self, x, prevx):
self.s.add(x)
self.prev[x] = prevx
for y in self.right:
slack = self._CalcSlack(x, y)
if slack < self.slack[y]:
self.slack[y] = slack
self.slackx[y] = x | Adds |x| to the current augmenting tree.
x is a node which has already been matched to a node y in Right (which is
itself connected to prevx via a non-matching edge in the equality subgraph).
We indicate prevx comes before x in the tree so we can trace the path later.
Args:
x: Node which has already been matched to a... | github-repos |
def get_all_without_ethernet(self, start=0, count=(- 1), filter='', sort=''):
without_ethernet_client = ResourceClient(self._connection, '/rest/logical-downlinks/withoutEthernet')
return without_ethernet_client.get_all(start, count, filter=filter, sort=sort) | Gets a paginated collection of logical downlinks without ethernet. The collection is
based on optional sorting and filtering and is constrained by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
... | codesearchnet |
def plot_path(line, lattice=None, coords_are_cartesian=False, ax=None, **kwargs):
(ax, fig, plt) = get_ax3d_fig_plt(ax)
if ('color' not in kwargs):
kwargs['color'] = 'r'
if ('linewidth' not in kwargs):
kwargs['linewidth'] = 3
for k in range(1, len(line)):
vertex1 = line[(k - 1)]
... | Adds a line passing through the coordinates listed in 'line' to a matplotlib Axes
Args:
line: list of coordinates.
lattice: Lattice object used to convert from reciprocal to cartesian coordinates
coords_are_cartesian: Set to True if you are providing
coordinates in cartesian coordinates. Defaults to False.
Requires la... | codesearchnet |
def _maybe_init_run(self, experiment_name, run_name):
experiment_id = self._maybe_init_experiment(experiment_name)
cursor = self._db.cursor()
cursor.execute(
,
(experiment_id, run_name))
row = cursor.fetchone()
if row:
return row[0]
run_id = self._create_id()
... | Returns the ID for the given run, creating the row if needed.
Args:
experiment_name: name of experiment containing this run.
run_name: name of run. | juraj-google-style |
def format(self, exclude_class=False):
if exclude_class:
msg = self.msg
else:
msg = ('%s: %s' % (self.__class__.__name__, self.msg))
if (len(self.params) != 0):
paramstring = '\n'.join([((str(key) + ': ') + str(val)) for (key, val) in self.params.items()])
msg += ('\nAddition... | Format this exception as a string including class name.
Args:
exclude_class (bool): Whether to exclude the exception class
name when formatting this exception
Returns:
string: a multiline string with the message, class name and
key value parameters passed to create the exception. | codesearchnet |
def _process_image_files(name, filenames, texts, labels, num_shards):
assert (len(filenames) == len(texts))
assert (len(filenames) == len(labels))
spacing = np.linspace(0, len(filenames), (FLAGS.num_threads + 1)).astype(np.int)
ranges = []
for i in range((len(spacing) - 1)):
ranges.append([s... | Process and save list of images as TFRecord of Example protos.
Args:
name: string, unique identifier specifying the data set
filenames: list of strings; each string is a path to an image file
texts: list of strings; each string is human readable, e.g. 'dog'
labels: list of integer; each integer identifies the ground t... | codesearchnet |
def delete_permissions(self, grp_name, resource):
self.project_service.set_auth(self._token_project)
self.project_service.delete_permissions(grp_name, resource) | Removes permissions from the group for the given resource.
Args:
grp_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on.
Raises:
requests.HTTPError on failure. | juraj-google-style |
def get_range(self, name_prefix, vlan_id_range):
filter = '"\'name\' matches \'{}\\_%\'"'.format(name_prefix)
ethernet_networks = self.get_all(filter=filter, sort='vlanId:ascending')
vlan_ids = self.dissociate_values_or_ranges(vlan_id_range)
for net in ethernet_networks[:]:
if (int(net['vlanId']... | Gets a list of Ethernet Networks that match the 'given name_prefix' and the 'vlan_id_range'.
Examples:
>>> enet.get_range('Enet_name', '1-2,5')
# The result contains the ethernet network with names:
['Enet_name_1', 'Enet_name_2', 'Enet_name_5']
>>> enet.get_range('Enet_name', '2')
# The result contains the ethernet n... | codesearchnet |
def _get_element_attr_or_none(document, selector, attribute):
element = document.cssselect(selector)
if element:
return element[0].get(attribute)
return None | Using a CSS selector, get the element and return the given attribute value, or None if no element.
Args:
document (HTMLElement) - HTMLElement document
selector (str) - CSS selector
attribute (str) - The attribute to get from the element | codesearchnet |
def _set_bearer_user_vars_local(token, allowed_client_ids, scopes):
result = urlfetch.fetch(
'%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token})))
if result.status_code != 200:
try:
error_description = json.loads(result.content)['error_description']
except (ValueError, Key... | Validate the oauth bearer token on the dev server.
Since the functions in the oauth module return only example results in local
development, this hits the tokeninfo endpoint and attempts to validate the
token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we
can get the user from the token.
Args:
... | juraj-google-style |
def search(self, scope, search, **kwargs):
data = {'scope': scope, 'search': search}
return self.http_list('/search', query_data=data, **kwargs) | Search GitLab resources matching the provided string.'
Args:
scope (str): Scope of the search
search (str): Search string
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSearchError: If the server failed to perform the request
Return... | codesearchnet |
def monkhorst(cls, ngkpt, shiftk=(0.5, 0.5, 0.5), chksymbreak=None, use_symmetries=True, use_time_reversal=True, comment=None):
return cls(kpts=[ngkpt], kpt_shifts=shiftk, use_symmetries=use_symmetries, use_time_reversal=use_time_reversal, chksymbreak=chksymbreak, comment=(comment if comment else 'Monkhorst-Pack sc... | Convenient static constructor for a Monkhorst-Pack mesh.
Args:
ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors.
shiftk: Shift to be applied to the kpoints.
use_symmetries: Use spatial symmetries to reduce the number of k-points.
use_time_reversal: Use time-reversal symmetry to reduce the number o... | codesearchnet |
def save_args(conditions, out_path):
if isinstance(conditions, argparse.Namespace):
args = vars(conditions)
else:
args = conditions
try:
os.makedirs(out_path)
except OSError:
pass
with tempdir(prefix='args', dir=out_path) as tempd:
path = os.path.join(tempd, '... | A util function to save experiment condition for job table.
Args:
conditions (:class:`argparse.Namespace` or dict): Experiment conditions
to show on a job table. Keys are show as table header and values
are show at a job row.
out_path (str): Output directory name to save conditions. | codesearchnet |
def _optimize_tf_model(self, graph_def, input_tensors, output_tensors, quant_mode):
if self.saved_model_dir or quant_mode.is_quantization_aware_trained_model():
return graph_def
try:
graph = _convert_to_constants.disable_lower_using_switch_merge(graph_def)
optimized_graph = _run_graph_op... | Run a Grappler pass to optimize the TensorFlow graph.
Args:
graph_def: Frozen GraphDef to be optimized.
input_tensors: List of input tensors.
output_tensors: List of output tensors.
quant_mode: the quantization mode.
Returns:
The optimized TensorFlow graph. | github-repos |
def __init__(self, message, raises=False):
super(CustodianError, self).__init__(message)
self.raises = raises
self.message = message | Initializes the error with a message.
Args:
message (str): Message passed to Exception
raises (bool): Whether this should be raised outside custodian | juraj-google-style |
def __init__(self, dfk, *args, threshold=20, interval=5):
self.dfk = dfk
self.threshold = threshold
self.interval = interval
self.cb_args = args
self.strategy = Strategy(dfk)
self.callback = self.strategy.strategize
self._handle = None
self._event... | Initialize the flowcontrol object.
We start the timer thread here
Args:
- dfk (DataFlowKernel) : DFK object to track parsl progress
KWargs:
- threshold (int) : Tasks after which the callback is triggered
- interval (int) : seconds after which timer expires | juraj-google-style |
def send_to_prv_exchange(self, user_id, message=None):
exchange = ('prv_%s' % user_id.lower())
msg = json.dumps(message, cls=ZEngineJSONEncoder)
log.debug(('Sending following users "%s" exchange:\n%s ' % (exchange, msg)))
self.get_channel().publish(exchange=exchange, routing_key='', body=msg) | Send messages through logged in users private exchange.
Args:
user_id string: User key
message dict: Message object | codesearchnet |
def __init__(self, graph, name=None):
if not isinstance(graph, BipartiteGraph):
raise ValueError(
"Given graph is not instance of Bipartite:", graph)
self._graph = graph
if name:
self.name = name
else:
self.name = super(_Node,... | Construct a new node.
Args:
name: Specifying the name of this node.
If not given, use strings returned from __str__ method. | juraj-google-style |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.