code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def sync(self, force=None):
try:
if force:
sd = force
else:
sd = self.sync_dir()
if sd == self.SYNC_DIR.FILE_TO_RECORD:
if force and not self.exists():
return None
self.fs_to_record()
... | Synchronize between the file in the file system and the field record |
def user_remove(name, user=None, password=None, host=None, port=None,
database='admin', authdb=None):
conn = _connect(user, password, host, port)
if not conn:
return 'Failed to connect to mongo database'
try:
log.info('Removing user %s', name)
mdb = pymongo.database.D... | Remove a MongoDB user
CLI Example:
.. code-block:: bash
salt '*' mongodb.user_remove <name> <user> <password> <host> <port> <database> |
def create_rule(self):
return BlockRule(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of block rule services facade. |
def del_current_vrf(self):
vrf_id = int(request.json['vrf_id'])
if vrf_id in session['current_vrfs']:
del session['current_vrfs'][vrf_id]
session.save()
return json.dumps(session.get('current_vrfs', {})) | Remove VRF to filter list session variable |
def update_score(self, node, addToScore):
current_score = 0
score_string = self.parser.getAttribute(node, 'gravityScore')
if score_string:
current_score = int(score_string)
new_score = current_score + addToScore
self.parser.setAttribute(node, "gravityScore", str(new_s... | \
adds a score to the gravityScore Attribute we put on divs
we'll get the current score then add the score
we're passing in to the current |
def get_long_description():
import codecs
with codecs.open('README.rst', encoding='UTF-8') as f:
readme = [line for line in f if not line.startswith('.. contents::')]
return ''.join(readme) | Strip the content index from the long description. |
def hashing_type(self, cluster='main'):
if not self.config.has_section(cluster):
raise SystemExit("Cluster '%s' not defined in %s"
% (cluster, self.config_file))
hashing_type = 'carbon_ch'
try:
return self.config.get(cluster, 'hashing_type')
... | Hashing type of cluster. |
def import_symbol(name=None, path=None, typename=None, base_path=None):
_, symbol = _import(name or typename, path or base_path)
return symbol | Import a module, or a typename within a module from its name.
Arguments:
name: An absolute or relative (starts with a .) Python path
path: If name is relative, path is prepended to it.
base_path: (DEPRECATED) Same as path
typename: (DEPRECATED) Same as path |
def execute_sql(self, sql):
cursor = self.get_cursor()
cursor.execute(sql)
return cursor | Executes SQL and returns cursor for it |
def facets_boundary(self):
edges = self.edges_sorted.reshape((-1, 6))
edges_facet = [edges[i].reshape((-1, 2)) for i in self.facets]
edges_boundary = np.array([i[grouping.group_rows(i, require_count=1)]
for i in edges_facet])
return edges_boundary | Return the edges which represent the boundary of each facet
Returns
---------
edges_boundary : sequence of (n, 2) int
Indices of self.vertices |
def smartfields_get_field_status(self, field_name):
manager = self._smartfields_managers.get(field_name, None)
if manager is not None:
return manager.get_status(self)
return {'state': 'ready'} | A way to find out a status of a filed. |
def create_connection(cls, address, timeout=None, source_address=None):
sock = socket.create_connection(address, timeout, source_address)
return cls(sock) | Create a SlipSocket connection.
This convenience method creates a connection to the the specified address
using the :func:`socket.create_connection` function.
The socket that is returned from that call is automatically wrapped in
a :class:`SlipSocket` object.
.. note::
... |
def _reverse_convert(x, factor1, factor2):
return x * factor1 / ((1-x) * factor2 + x * factor1) | Converts mixing ratio x in c1 - c2 tie line to that in
comp1 - comp2 tie line.
Args:
x (float): Mixing ratio x in c1 - c2 tie line, a float between
0 and 1.
factor1 (float): Compositional ratio between composition c1 and
processed composition comp... |
def do_placeholder(parser, token):
name, params = parse_placeholder(parser, token)
return PlaceholderNode(name, **params) | Method that parse the placeholder template tag.
Syntax::
{% placeholder <name> [on <page>] [with <widget>] \
[parsed] [as <varname>] %}
Example usage::
{% placeholder about %}
{% placeholder body with TextArea as body_text %}
{% placeholder welcome with TextArea parsed as wel... |
def export(datastore_key, calc_id=-1, exports='csv', export_dir='.'):
dstore = util.read(calc_id)
parent_id = dstore['oqparam'].hazard_calculation_id
if parent_id:
dstore.parent = util.read(parent_id)
dstore.export_dir = export_dir
with performance.Monitor('export', measuremem=True) as mon:
... | Export an output from the datastore. |
def get_oauth_request(self):
try:
method = os.environ['REQUEST_METHOD']
except:
method = 'GET'
postdata = None
if method in ('POST', 'PUT'):
postdata = self.request.body
return oauth.Request.from_request(method, self.request.uri,
he... | Return an OAuth Request object for the current request. |
def plugins(self):
if self._plugins is None:
self._plugins = {}
for _, plugin in self.load_extensions('iotile.plugin'):
links = plugin()
for name, value in links:
self._plugins[name] = value
return self._plugins | Lazily load iotile plugins only on demand.
This is a slow operation on computers with a slow FS and is rarely
accessed information, so only compute it when it is actually asked
for. |
def yield_sorted_by_type(*typelist):
def decorate(fun):
@wraps(fun)
def decorated(*args, **kwds):
return iterate_by_type(fun(*args, **kwds), typelist)
return decorated
return decorate | a useful decorator for the collect_impl method of SuperChange
subclasses. Caches the yielded changes, and re-emits them
collected by their type. The order of the types can be specified
by listing the types as arguments to this decorator. Unlisted
types will be yielded last in no guaranteed order.
G... |
def dry_run_scan(self, scan_id, targets):
os.setsid()
for _, target in enumerate(targets):
host = resolve_hostname(target[0])
if host is None:
logger.info("Couldn't resolve %s.", target[0])
continue
port = self.get_scan_ports(scan_id, t... | Dry runs a scan. |
def get_attribute_selected(self, attribute):
items_list = self.get_options()
return next(iter([item.get_attribute(attribute) for item in items_list if item.is_selected()]), None) | Performs search of selected item from Web List
Return attribute of selected item
@params attribute - string attribute name |
def get_dm_online(self):
if not requests:
return False
try:
req = requests.get("https://earthref.org/MagIC/data-models/3.0.json", timeout=3)
if not req.ok:
return False
return req
except (requests.exceptions.ConnectTimeout, requests... | Use requests module to get data model from Earthref.
If this fails or times out, return false.
Returns
---------
result : requests.models.Response, False if unsuccessful |
def get_content_dict(vocabularies, content_vocab):
if vocabularies.get(content_vocab, None) is None:
raise UNTLFormException(
'Could not retrieve content vocabulary "%s" for the form.'
% (content_vocab)
)
else:
return vocabularies.get(content_vocab) | Get the content dictionary based on the element's content
vocabulary. |
def get_edited(self, subreddit='mod', *args, **kwargs):
url = self.config['edited'].format(subreddit=six.text_type(subreddit))
return self.get_content(url, *args, **kwargs) | Return a get_content generator of edited items.
:param subreddit: Either a Subreddit object or the name of the
subreddit to return the edited items for. Defaults to `mod` which
includes items for all the subreddits you moderate.
The additional parameters are passed directly int... |
def flush(self):
self.acquire()
try:
self.stream.flush()
except (EnvironmentError, ValueError):
pass
finally:
self.release() | Flushes all log files. |
def listdir_matches(match):
import os
last_slash = match.rfind('/')
if last_slash == -1:
dirname = '.'
match_prefix = match
result_prefix = ''
else:
match_prefix = match[last_slash + 1:]
if last_slash == 0:
dirname = '/'
result_prefix = '/'... | Returns a list of filenames contained in the named directory.
Only filenames which start with `match` will be returned.
Directories will have a trailing slash. |
def loadTextureD3D11_Async(self, textureId, pD3D11Device):
fn = self.function_table.loadTextureD3D11_Async
ppD3D11Texture2D = c_void_p()
result = fn(textureId, pD3D11Device, byref(ppD3D11Texture2D))
return result, ppD3D11Texture2D.value | Creates a D3D11 texture and loads data into it. |
def OECDas(self, to='name_short'):
if isinstance(to, str):
to = [to]
return self.data[self.data.OECD > 0][to] | Return OECD member states in the specified classification
Parameters
----------
to : str, optional
Output classification (valid str for an index of
country_data file), default: name_short
Returns
-------
Pandas DataFrame |
def add_field(self, key, field):
if key in self._fields:
raise PayloadFieldAlreadyDefinedError(
'Key {key} is already set on this payload. The existing field was {existing_field}.'
' Tried to set new field {field}.'
.format(key=key, existing_field=self._fields[key], field=field))
e... | Add a field to the Payload.
:API: public
:param string key: The key for the field. Fields can be accessed using attribute access as
well as `get_field` using `key`.
:param PayloadField field: A PayloadField instance. None is an allowable value for `field`,
in which case it will be skipped ... |
async def _get_person_json(self, id_, url_params=None):
url = self.url_builder(
'person/{person_id}',
dict(person_id=id_),
url_params=url_params or OrderedDict(),
)
data = await self.get_data(url)
return data | Retrieve raw person JSON by ID.
Arguments:
id_ (:py:class:`int`): The person's TMDb ID.
url_params (:py:class:`dict`): Any additional URL parameters.
Returns:
:py:class:`dict`: The JSON data. |
def container(self, name, length, type, *parameters):
self.new_struct('Container', name, 'length=%s' % length)
BuiltIn().run_keyword(type, *parameters)
self.end_struct() | Define a container with given length.
This is a convenience method creating a `Struct` with `length` containing fields defined in `type`. |
def request(self, requests):
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests)) | Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously. |
def ship_move(ship, x, y, speed):
click.echo('Moving ship %s to %s,%s with speed %s' % (ship, x, y, speed)) | Moves SHIP to the new location X,Y. |
def check_alert(self, text):
try:
alert = Alert(world.browser)
if alert.text != text:
raise AssertionError(
"Alert text expected to be {!r}, got {!r}.".format(
text, alert.text))
except WebDriverException:
pass | Assert an alert is showing with the given text. |
def collect_metrics():
def _register(action):
handler = Handler.get(action)
handler.add_predicate(partial(_restricted_hook, 'collect-metrics'))
return action
return _register | Register the decorated function to run for the collect_metrics hook. |
def verify_signature(self, signature_filename, data_filename,
keystore=None):
if not self.gpg:
raise DistlibException('verification unavailable because gpg '
'unavailable')
cmd = self.get_verify_command(signature_filename, data_file... | Verify a signature for a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which... |
def write(entries):
try:
with open(get_rc_path(), 'w') as rc:
rc.writelines(entries)
except IOError:
print('Error writing your ~/.vacationrc file!') | Write an entire rc file. |
def update(self, resource, timeout=-1):
self.__set_default_values(resource)
uri = self._client.build_uri(resource['logicalSwitch']['uri'])
return self._client.update(resource, uri=uri, timeout=timeout) | Updates a Logical Switch.
Args:
resource (dict): Object to update.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
Returns:
d... |
def main() -> None:
_ =
testdata = [
"hello",
1,
["bongos", "today"],
]
for data in testdata:
compare_python_to_reference_murmur3_32(data, seed=0)
compare_python_to_reference_murmur3_64(data, seed=0)
print("All OK") | Command-line validation checks. |
def _expand_subsystems(self, scope_infos):
def subsys_deps(subsystem_client_cls):
for dep in subsystem_client_cls.subsystem_dependencies_iter():
if dep.scope != GLOBAL_SCOPE:
yield self._scope_to_info[dep.options_scope]
for x in subsys_deps(dep.subsystem_cls):
yield x
... | Add all subsystems tied to a scope, right after that scope. |
def message(self):
if self.type == 'cleartext':
return self.bytes_to_text(self._message)
if self.type == 'literal':
return self._message.contents
if self.type == 'encrypted':
return self._message | The message contents |
def _get_id(self, id_, pkg_name):
collection = JSONClientValidated('id',
collection=pkg_name + 'Ids',
runtime=self._runtime)
try:
result = collection.find_one({'aliasIds': {'$in': [str(id_)]}})
except e... | Returns the primary id given an alias.
If the id provided is not in the alias table, it will simply be
returned as is.
Only looks within the Id Alias namespace for the session package |
def handle_api_exception(error):
_mp_track(
type="exception",
status_code=error.status_code,
message=error.message,
)
response = jsonify(dict(
message=error.message
))
response.status_code = error.status_code
return response | Converts an API exception into an error response. |
def np2str(value):
if hasattr(value, 'dtype') and \
issubclass(value.dtype.type, (np.string_, np.object_)) and value.size == 1:
value = np.asscalar(value)
if not isinstance(value, str):
value = value.decode()
return value
else:
raise ValueError("Array is n... | Convert an `numpy.string_` to str.
Args:
value (ndarray): scalar or 1-element numpy array to convert
Raises:
ValueError: if value is array larger than 1-element or it is not of
type `numpy.string_` or it is not a numpy array |
def DeregisterAnalyzer(cls, analyzer_class):
analyzer_name = analyzer_class.NAME.lower()
if analyzer_name not in cls._analyzer_classes:
raise KeyError('analyzer class not set for name: {0:s}'.format(
analyzer_class.NAME))
del cls._analyzer_classes[analyzer_name] | Deregisters a analyzer class.
The analyzer classes are identified based on their lower case name.
Args:
analyzer_class (type): class object of the analyzer.
Raises:
KeyError: if analyzer class is not set for the corresponding name. |
def add_callback(self, fn, *args, **kwargs):
if not callable(fn):
raise ValueError("Value for argument 'fn' is {0} and is not a callable object.".format(type(fn)))
self._callbacks.append((fn, args, kwargs)) | Add a function and arguments to be passed to it to be executed after the batch executes.
A batch can support multiple callbacks.
Note, that if the batch does not execute, the callbacks are not executed.
A callback, thus, is an "on batch success" handler.
:param fn: Callable object
... |
def get_snapshot_policy(self, name, view=None):
return self._get("snapshots/policies/%s" % name, ApiSnapshotPolicy,
params=view and dict(view=view) or None, api_version=6) | Retrieve a single snapshot policy.
@param name: The name of the snapshot policy to retrieve.
@param view: View to materialize. Valid values are 'full', 'summary', 'export', 'export_redacted'.
@return: The requested snapshot policy.
@since: API v6 |
def get_vectors_loss(ops, docs, prediction, objective="L2"):
ids = ops.flatten([doc.to_array(ID).ravel() for doc in docs])
target = docs[0].vocab.vectors.data[ids]
if objective == "L2":
d_target = prediction - target
loss = (d_target ** 2).sum()
elif objective == "cosine":
loss, ... | Compute a mean-squared error loss between the documents' vectors and
the prediction.
Note that this is ripe for customization! We could compute the vectors
in some other word, e.g. with an LSTM language model, or use some other
type of objective. |
def create_game(
self,
map_name,
bot_difficulty=sc_pb.VeryEasy,
bot_race=sc_common.Random,
bot_first=False):
self._controller.ping()
map_inst = maps.get(map_name)
map_data = map_inst.data(self._run_config)
if map_name not in self._saved_maps:
self._controller.save_map... | Create a game, one remote agent vs the specified bot.
Args:
map_name: The map to use.
bot_difficulty: The difficulty of the bot to play against.
bot_race: The race for the bot.
bot_first: Whether the bot should be player 1 (else is player 2). |
def filter(self, *args, **kwargs):
if args or kwargs:
self.q_filters = Q(self.q_filters & Q(*args, **kwargs))
return self | Apply filters to the existing nodes in the set.
:param kwargs: filter parameters
Filters mimic Django's syntax with the double '__' to separate field and operators.
e.g `.filter(salary__gt=20000)` results in `salary > 20000`.
The following operators are available:
... |
def get_connection_params(self):
return {
'uri': self.settings_dict['NAME'],
'tls': self.settings_dict.get('TLS', False),
'bind_dn': self.settings_dict['USER'],
'bind_pw': self.settings_dict['PASSWORD'],
'retry_max': self.settings_dict.get('RETRY_MAX',... | Compute appropriate parameters for establishing a new connection.
Computed at system startup. |
def multi_pop(d, *args):
retval = {}
for key in args:
if key in d:
retval[key] = d.pop(key)
return retval | pops multiple keys off a dict like object |
def parse_input(self, text):
parts = util.split(text)
command = parts[0] if text and parts else None
command = command.lower() if command else None
args = parts[1:] if len(parts) > 1 else []
return (command, args) | Parse ctl user input. Double quotes are used
to group together multi words arguments. |
def get_json(self):
try:
usernotes = self.subreddit.wiki[self.page_name].content_md
notes = json.loads(usernotes)
except NotFound:
self._init_notes()
else:
if notes['ver'] != self.schema:
raise RuntimeError(
'Use... | Get the JSON stored on the usernotes wiki page.
Returns a dict representation of the usernotes (with the notes BLOB
decoded).
Raises:
RuntimeError if the usernotes version is incompatible with this
version of puni. |
def get_error_code_msg(cls, full_error_message):
for pattern in cls.ERROR_PATTERNS:
match = pattern.match(full_error_message)
if match:
return int(match.group('code')), match.group('msg').strip()
return 0, full_error_message | Extract the code and message of the exception that clickhouse-server generated.
See the list of error codes here:
https://github.com/yandex/ClickHouse/blob/master/dbms/src/Common/ErrorCodes.cpp |
def read_json(file_path):
try:
with open(file_path, 'r') as f:
config = json_tricks.load(f)
except ValueError:
print(' '+'!'*58)
print(' Woops! Looks the JSON syntax is not valid in:')
print(' {}'.format(file_path))
print(' Note: commonly this ... | Read in a json file and return a dictionary representation |
def set_encode_key_value(self, value, store_type=PUBLIC_KEY_STORE_TYPE_BASE64):
if store_type == PUBLIC_KEY_STORE_TYPE_PEM:
PublicKeyBase.set_encode_key_value(self, value.exportKey('PEM').decode(), store_type)
else:
PublicKeyBase.set_encode_key_value(self, value.exportKey('DER'),... | Set the value based on the type of encoding supported by RSA. |
def init_body_buffer(self, method, headers):
content_length = headers.get("CONTENT-LENGTH", None)
if method in (HTTPMethod.POST, HTTPMethod.PUT):
if content_length is None:
raise HTTPErrorBadRequest("HTTP Method requires a CONTENT-LENGTH header")
self.content_leng... | Sets up the body_buffer and content_length attributes based
on method and headers. |
def to_cloudformation(self, **kwargs):
resources = []
function = kwargs.get('function')
if not function:
raise TypeError("Missing required keyword argument: function")
if self.Method is not None:
self.Method = self.Method.lower()
resources.extend(self._get... | If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing
API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the
x-amazon-apigateway-integration into the Swagger body for a provided implicit API.
... |
def read(self):
try:
f = urllib.request.urlopen(self.url)
except urllib.error.HTTPError as err:
if err.code in (401, 403):
self.disallow_all = True
elif err.code >= 400:
self.allow_all = True
else:
raw = f.read()
... | Reads the robots.txt URL and feeds it to the parser. |
def predict(self, dataset,
new_observation_data=None, new_user_data=None, new_item_data=None):
if new_observation_data is None:
new_observation_data = _SFrame()
if new_user_data is None:
new_user_data = _SFrame()
if new_item_data is None:
new_i... | Return a score prediction for the user ids and item ids in the provided
data set.
Parameters
----------
dataset : SFrame
Dataset in the same form used for training.
new_observation_data : SFrame, optional
``new_observation_data`` gives additional observa... |
def get_log_entry_log_session(self, proxy):
if not self.supports_log_entry_log():
raise errors.Unimplemented()
return sessions.LogEntryLogSession(proxy=proxy, runtime=self._runtime) | Gets the session for retrieving log entry to log mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.logging.LogEntryLogSession) - a
``LogEntryLogSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
... |
def get_property(self, property_key: str) -> str:
self._check_object_exists()
return DB.get_hash_value(self.key, property_key) | Get a scheduling object property. |
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
self.name = name
self.fileExtension = extension
with open(path, 'r') as f:
self.text = f.read() | Generic File Read from File Method |
def to_frequencies(self, fill=np.nan):
an = np.sum(self, axis=1)[:, None]
with ignore_invalid():
af = np.where(an > 0, self / an, fill)
return af | Compute allele frequencies.
Parameters
----------
fill : float, optional
Value to use when number of allele calls is 0.
Returns
-------
af : ndarray, float, shape (n_variants, n_alleles)
Examples
--------
>>> import allel
>>... |
def LessThan(self, value):
self._awql = self._CreateSingleValueCondition(value, '<')
return self._query_builder | Sets the type of the WHERE clause as "less than".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. |
def set_output(self, outfile):
if self._orig_stdout:
sys.stdout = self._orig_stdout
self._stream = outfile
sys.stdout = _LineWriter(self, self._stream, self.default) | Set's the output file, currently only useful with context-managers.
Note:
This function is experimental and may not last. |
def register_scope(self, scope):
if not isinstance(scope, Scope):
raise TypeError("Invalid scope type.")
assert scope.id not in self.scopes
self.scopes[scope.id] = scope | Register a scope.
:param scope: A :class:`invenio_oauth2server.models.Scope` instance. |
def spkuds(descr):
assert len(descr) is 5
descr = stypes.toDoubleVector(descr)
body = ctypes.c_int()
center = ctypes.c_int()
framenum = ctypes.c_int()
typenum = ctypes.c_int()
first = ctypes.c_double()
last = ctypes.c_double()
begin = ctypes.c_int()
end = ctypes.c_int()
libsp... | Unpack the contents of an SPK segment descriptor.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkuds_c.html
:param descr: An SPK segment descriptor.
:type descr: 5-Element Array of floats
:return:
The NAIF ID code for the body of the segment,
The center of motion fo... |
def add_nio(self, nio, port_number):
if port_number in self._nios:
raise DynamipsError("Port {} isn't free".format(port_number))
yield from self._hypervisor.send('ethsw add_nio "{name}" {nio}'.format(name=self._name, nio=nio))
log.info('Ethernet switch "{name}" [{id}]: NIO {nio} boun... | Adds a NIO as new port on Ethernet switch.
:param nio: NIO instance to add
:param port_number: port to allocate for the NIO |
def monkey_patch():
reset()
time_mod.time = time
time_mod.sleep = sleep
time_mod.gmtime = gmtime
time_mod.localtime = localtime
time_mod.ctime = ctime
time_mod.asctime = asctime
time_mod.strftime = strftime | monkey patch `time` module to use out versions |
def get_function_for_aws_event(self, record):
if 's3' in record:
if ':' in record['s3']['configurationId']:
return record['s3']['configurationId'].split(':')[-1]
arn = None
if 'Sns' in record:
try:
message = json.loads(record['Sns']['Messag... | Get the associated function to execute for a triggered AWS event
Support S3, SNS, DynamoDB, kinesis and SQS events |
def run_script(pycode):
if pycode[0] == "\n":
pycode = pycode[1:]
pycode.rstrip()
pycode = textwrap.dedent(pycode)
globs = {}
six.exec_(pycode, globs, globs)
return globs | Run the Python in `pycode`, and return a dict of the resulting globals. |
def embedding(self, sentences, oov_way='avg'):
data_iter = self.data_loader(sentences=sentences)
batches = []
for token_ids, valid_length, token_types in data_iter:
token_ids = token_ids.as_in_context(self.ctx)
valid_length = valid_length.as_in_context(self.ctx)
... | Get tokens, tokens embedding
Parameters
----------
sentences : List[str]
sentences for encoding.
oov_way : str, default avg.
use **avg**, **sum** or **last** to get token embedding for those out of
vocabulary words
Returns
-------
... |
def extract(self, name):
if type(name) == type(''):
ndx = self.toc.find(name)
if ndx == -1:
return None
else:
ndx = name
(dpos, dlen, ulen, flag, typcd, nm) = self.toc.get(ndx)
self.lib.seek(self.pkgstart+dpos)
rslt = self.lib.r... | Get the contents of an entry.
NAME is an entry name.
Return the tuple (ispkg, contents).
For non-Python resoures, ispkg is meaningless (and 0).
Used by the import mechanism. |
def isDone(self):
done = pydaq.bool32()
self.IsTaskDone(ctypes.byref(done))
return done.value | Returns true if task is done. |
def rh45(msg):
d = hex2bin(data(msg))
if d[38] == '0':
return None
rh = bin2int(d[39:51]) * 16
return rh | Radio height.
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: radio height in ft |
def _original_path(self, path):
def components_to_path():
if len(path_components) > len(normalized_components):
normalized_components.extend(
path_components[len(normalized_components):])
sep = self._path_separator(path)
normalized_path = s... | Return a normalized case version of the given path for
case-insensitive file systems. For case-sensitive file systems,
return path unchanged.
Args:
path: the file path to be transformed
Returns:
A version of path matching the case of existing path elements. |
def build(self, stmts=None, set_check_var=True, invert=False):
out = ""
if set_check_var:
out += self.check_var + " = False\n"
out += self.out()
if stmts is not None:
out += "if " + ("not " if invert else "") + self.check_var + ":" + "\n" + openindent + "".join(st... | Construct code for performing the match then executing stmts. |
def copy(self):
new = type(self)(str(self))
new._init_from_channel(self)
return new | Returns a copy of this channel |
def canonical_stylename(font):
from fontbakery.constants import (STATIC_STYLE_NAMES,
VARFONT_SUFFIXES)
from fontbakery.profiles.shared_conditions import is_variable_font
from fontTools.ttLib import TTFont
valid_style_suffixes = [name.replace(' ', '') for name in STATIC_STYLE_... | Returns the canonical stylename of a given font. |
def process_parameters(parameters):
if not parameters:
return {}
params = copy.copy(parameters)
for param_name in parameters:
value = parameters[param_name]
server_param_name = re.sub(r'_(\w)', lambda m: m.group(1).upper(), param_name)
if isinstance(value, dict):
... | Allows the use of Pythonic-style parameters with underscores instead of camel-case.
:param parameters: The parameters object.
:type parameters: dict
:return: The processed parameters.
:rtype: dict |
def fill_n_todo(self):
left = self.left
right = self.right
top = self.top
bottom = self.bottom
for i in xrange(self.n_chunks):
self.n_todo.ravel()[i] = np.sum([left.ravel()[i].n_todo,
right.ravel()[i].n_todo,
... | Calculate and record the number of edge pixels left to do on each tile |
def primary_key_field(self):
return [field for field in self.instance._meta.fields if field.primary_key][0] | Return the primary key field.
Is `id` in most cases. Is `history_id` for Historical models. |
def available_domains(self):
if not hasattr(self, '_available_domains'):
url = 'http://{0}/request/domains/format/json/'.format(
self.api_domain)
req = requests.get(url)
domains = req.json()
setattr(self, '_available_domains', domains)
retu... | Return list of available domains for use in email address. |
async def update(self, fields=''):
path = 'Users/{{UserId}}/Items/{}'.format(self.id)
info = await self.connector.getJson(path,
remote=False,
Fields='Path,Overview,'+fields
)
self.object_dict.update(info)
self.extras = {... | reload object info from emby
|coro|
Parameters
----------
fields : str
additional fields to request when updating
See Also
--------
refresh : same thing
send :
post : |
def guess_project_dir():
projname = settings.SETTINGS_MODULE.split(".",1)[0]
projmod = import_module(projname)
projdir = os.path.dirname(projmod.__file__)
if os.path.isfile(os.path.join(projdir,"manage.py")):
return projdir
projdir = os.path.abspath(os.path.join(projdir, os.path.pardir))
... | Find the top-level Django project directory.
This function guesses the top-level Django project directory based on
the current environment. It looks for module containing the currently-
active settings module, in both pre-1.4 and post-1.4 layours. |
def collect(self):
from dvc.scm import SCM
from dvc.utils import is_binary
from dvc.repo import Repo
from dvc.exceptions import NotDvcRepoError
self.info[self.PARAM_DVC_VERSION] = __version__
self.info[self.PARAM_IS_BINARY] = is_binary()
self.info[self.PARAM_USER_... | Collect analytics report. |
def remote_delete(self, remote_path, r_st):
if S_ISDIR(r_st.st_mode):
for item in self.sftp.listdir_attr(remote_path):
full_path = path_join(remote_path, item.filename)
self.remote_delete(full_path, item)
self.sftp.rmdir(remote_path)
else:
... | Remove the remote directory node. |
def _generate(self):
part = creator.Particle(
[random.uniform(-1, 1)
for _ in range(len(self.value_means))])
part.speed = [
random.uniform(-self.max_speed, self.max_speed)
for _ in range(len(self.value_means))]
part.smin = -self.max_speed
... | Generates a particle using the creator function.
Notes
-----
Position and speed are uniformly randomly seeded within
allowed bounds. The particle also has speed limit settings
taken from global values.
Returns
-------
part : particle object
A... |
def _process_assignments(self, anexec, contents, mode="insert"):
for assign in self.RE_ASSIGN.finditer(contents):
assignee = assign.group("assignee").strip()
target = re.split(r"[(%\s]", assignee)[0].lower()
if target in self._intrinsic:
continue
i... | Extracts all variable assignments from the body of the executable.
:arg mode: for real-time update; either 'insert', 'delete' or 'replace'. |
def delete(self, doc_id: str) -> bool:
try:
self.instance.delete(self.index, self.doc_type, doc_id)
except RequestError as ex:
logging.error(ex)
return False
else:
return True | Delete a document with id. |
def parse_game_event(self, ge):
if ge.name == "dota_combatlog":
if ge.keys["type"] == 4:
try:
source = self.dp.combat_log_names.get(ge.keys["sourcename"],
"unknown")
target = self.dp.com... | Game events contain the combat log as well as 'chase_hero' events which
could be interesting |
async def stop(self, _task=None):
self._logger.info("Stopping adapter wrapper")
if self._task.stopped:
return
for task in self._task.subtasks:
await task.stop()
self._logger.debug("Stopping underlying adapter %s", self._adapter.__class__.__name__)
await se... | Stop the device adapter.
See :meth:`AbstractDeviceAdapter.stop`. |
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location | Join 'distbase' and 'location' in such way that the
result is a valid scp destination. |
def Transfer(self, wallet, from_addr, to_addr, amount, tx_attributes=None):
if not tx_attributes:
tx_attributes = []
sb = ScriptBuilder()
sb.EmitAppCallWithOperationAndArgs(self.ScriptHash, 'transfer',
[PromptUtils.parse_param(from_addr, wal... | Transfer a specified amount of the NEP5Token to another address.
Args:
wallet (neo.Wallets.Wallet): a wallet instance.
from_addr (str): public address of the account to transfer the given amount from.
to_addr (str): public address of the account to transfer the given amount ... |
def has_ext(path_name, *, multiple=None, if_all_ext=False):
base = os.path.basename(path_name)
count = base.count(EXT)
if not if_all_ext and base[0] == EXT and count != 0:
count -= 1
if multiple is None:
return count >= 1
elif multiple:
return count > 1
else:
retu... | Determine if the given path name has an extension |
def sample_binned(self, wavelengths=None, flux_unit=None, **kwargs):
x = self._validate_binned_wavelengths(wavelengths)
i = np.searchsorted(self.binset, x)
if not np.allclose(self.binset[i].value, x.value):
raise exceptions.InterpolationNotAllowed(
'Some or all wavele... | Sample binned observation without interpolation.
To sample unbinned data, use ``__call__``.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom... |
def docs_client(self):
if not hasattr(self, '_docs_client'):
client = DocsClient()
client.ClientLogin(self.google_user, self.google_password,
SOURCE_NAME)
self._docs_client = client
return self._docs_client | A DocsClient singleton, used to look up spreadsheets
by name. |
def unpack_rpc_response(status, response=None, rpc_id=0, address=0):
status_code = status & ((1 << 6) - 1)
if address == 8:
status_code &= ~(1 << 7)
if status == 0:
raise BusyRPCResponse()
elif status == 2:
raise RPCNotFoundError("rpc %d:%04X not found" % (address, rpc_id))
e... | Unpack an RPC status back in to payload or exception. |
def render( self, tag, single, between, kwargs ):
out = "<%s" % tag
for key, value in list( kwargs.items( ) ):
if value is not None:
key = key.strip('_')
if key == 'http_equiv':
key = 'http-equiv'
elif key == 'accept_charset... | Append the actual tags to content. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.