Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
371,200 | def configure(self, options, conf):
self.conf = conf
self.when = options.browser_closer_when | Configure plugin. Plugin is enabled by default. |
371,201 | def update_container(self, container, metadata, **kwargs):
LOG.debug(, self.driver)
return self.driver.update_container(container, metadata, **kwargs) | Update container metadata
:param container: container name (Container is equivalent to
Bucket term in Amazon).
:param metadata(dict): additional metadata to include in the request.
:param **kwargs(dict): extend args for specific driver. |
371,202 | def _update_config_tags(self,directory,files=None):
if not self._connectToFlickr():
print("%s - Couldn%s [flickr] Skipping, tag updatephotoidstatokstat']);
return False
else:
return True
return False | Loads tags information from file and updates on flickr,
only reads first line. Format is comma separated eg.
travel, 2010, South Africa, Pretoria
If files is None, will update all files in DB, otherwise
will only update files that are in the flickr DB and files list |
371,203 | def loader():
url = request.args.get()
response = requests.get(url)
return response.content | Load image from URL, and preprocess for Resnet. |
371,204 | def hostedzone_from_element(zone):
return HostedZone(
name=maybe_bytes_to_unicode(zone.find("Name").text).encode("ascii").decode("idna"),
identifier=maybe_bytes_to_unicode(zone.find("Id").text).replace(u"/hostedzone/", u""),
rrset_count=int(zone.find("ResourceRecordSetCount").text),
... | Construct a L{HostedZone} instance from a I{HostedZone} XML element. |
371,205 | def maybe_convert_platform(values):
if isinstance(values, (list, tuple)):
values = construct_1d_object_array_from_listlike(list(values))
if getattr(values, , None) == np.object_:
if hasattr(values, ):
values = values._values
values = lib.maybe_convert_objects(values)
... | try to do platform conversion, allow ndarray or list here |
371,206 | def disable_constant(parameterized):
params = parameterized.params().values()
constants = [p.constant for p in params]
for p in params:
p.constant = False
try:
yield
except:
raise
finally:
for (p, const) in zip(params, constants):
p.constant = con... | Temporarily set parameters on Parameterized object to
constant=False. |
371,207 | def str2dn(dn, flags=0):
if not isinstance(dn, six.text_type):
dn = dn.decode("utf_8")
assert flags == 0
result, i = _distinguishedName(dn, 0)
if result is None:
raise tldap.exceptions.InvalidDN("Cannot parse dn")
if i != len(dn):
raise tldap.exceptions.InvalidDN(... | This function takes a DN as string as parameter and returns
a decomposed DN. It's the inverse to dn2str().
flags describes the format of the dn
See also the OpenLDAP man-page ldap_str2dn(3) |
371,208 | def make_coord_dict(subs, subscript_dict, terse=True):
sub_elems_list = [y for x in subscript_dict.values() for y in x]
coordinates = {}
for sub in subs:
if sub in sub_elems_list:
name = find_subscript_name(subscript_dict, sub)
coordinates[name] = [sub]
elif not ... | This is for assisting with the lookup of a particular element, such that the output
of this function would take the place of %s in this expression
`variable.loc[%s]`
Parameters
----------
subs: list of strings
coordinates, either as names of dimensions, or positions within a dimension
... |
371,209 | def identityRequest():
a = TpPd(pd=0x5)
b = MessageType(mesType=0x8)
c = IdentityTypeAndSpareHalfOctets()
packet = a / b / c
return packet | IDENTITY REQUEST Section 9.2.10 |
371,210 | def clean_folder_path(path, expected=None):
folders = split_unescaped(, path)
if len(folders) == 0:
return , None
if expected == or folders[-1] == or folders[-1] == or get_last_pos_of_char(, path) == len(path) - 1:
entity_name = None
else:
entity_name = unescape_name_st... | :param path: A folder path to sanitize and parse
:type path: string
:param expected: Whether a folder ("folder"), a data object ("entity"), or either (None) is expected
:type expected: string or None
:returns: *folderpath*, *name*
Unescape and parse *path* as a folder path to possibly an entity
... |
371,211 | def get(feature, obj, **kwargs):
feature = (NEURITEFEATURES[feature] if feature in NEURITEFEATURES
else NEURONFEATURES[feature])
return _np.array(list(feature(obj, **kwargs))) | Obtain a feature from a set of morphology objects
Parameters:
feature(string): feature to extract
obj: a neuron, population or neurite tree
**kwargs: parameters to forward to underlying worker functions
Returns:
features as a 1D or 2D numpy array. |
371,212 | def model_fn(features, labels, mode, params, config):
del labels, config
if params["analytic_kl"] and params["mixture_components"] != 1:
raise NotImplementedError(
"Using `analytic_kl` is only supported when `mixture_components = 1` "
"since there's no closed form otherwise.")
encoder = m... | Builds the model function for use in an estimator.
Arguments:
features: The input features for the estimator.
labels: The labels, unused here.
mode: Signifies whether it is train or test or predict.
params: Some hyperparameters as a dictionary.
config: The RunConfig, unused here.
Returns:
... |
371,213 | def assertDateTimesBefore(self, sequence, target, strict=True, msg=None):
if not isinstance(sequence, collections.Iterable):
raise TypeError()
if strict:
standardMsg = % (sequence,
target)
op = ... | Fail if any elements in ``sequence`` are not before
``target``.
If ``target`` is iterable, it must have the same length as
``sequence``
If ``strict=True``, fail unless all elements in ``sequence``
are strictly less than ``target``. If ``strict=False``, fail
unless all e... |
371,214 | def create_defaults_for(session, user, only_for=None, detail_values=None):
detail_values = detail_values or {}
if not user.openid.endswith():
log.warn("New user not from fedoraproject.org. No defaults set.")
return
nick = user.openid.split()[0]
valid_paths = fmn.lib.l... | Create a sizable amount of defaults for a new user. |
371,215 | def equilibrium_transition_matrix(Xi, omega, sigma, reversible=True, return_lcc=True):
import msmtools.estimation as me
Ct_Eq = np.einsum(, omega, Xi, Xi, sigma)
Ct_Eq[Ct_Eq < 0.0] = 0.0
pi_r = np.sum(Ct_Eq, axis=1)
if reversible:
pi_c = np.sum(Ct_Eq, axis=0)
pi_... | Compute equilibrium transition matrix from OOM components:
Parameters
----------
Xi : ndarray(M, N, M)
matrix of set-observable operators
omega: ndarray(M,)
information state vector of OOM
sigma : ndarray(M,)
evaluator of OOM
reversible : bool, optional, default=True
... |
371,216 | def toggle_sensor(request, sensorname):
if service.read_only:
service.logger.warning("Could not perform operation: read only mode enabled")
raise Http404
source = request.GET.get(, )
sensor = service.system.namespace[sensorname]
sensor.status = not sensor.status
service.system.f... | This is used only if websocket fails |
371,217 | def KL_divergence(P,Q):
assert(P.keys()==Q.keys())
distance = 0
for k in P.keys():
distance += P[k] * log(P[k]/Q[k])
return distance | Compute the KL divergence between distributions P and Q
P and Q should be dictionaries linking symbols to probabilities.
the keys to P and Q should be the same. |
371,218 | def database_path(self):
filename = self.database_filename
db_path = ":memory:" if filename == ":memory:" else (
path.abspath(path.join(__file__, "../..", "..", "data", filename)))
return db_path | Full database path. Includes the default location + the database filename. |
371,219 | def slicenet_internal(inputs, targets, target_space, hparams, run_decoder=True):
with tf.variable_scope("slicenet"):
if inputs.get_shape().as_list()[-1] != hparams.hidden_size:
inputs = common_layers.conv_block(
inputs,
hparams.hidden_size, [((1, 1), (3, 3))],
first_rel... | The slicenet model, main step used for training. |
371,220 | def element_wise_op(array, other, op, ty):
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array
other_var = weld_obj.update(other)
if isinstance(other, W... | Operation of series and other, element-wise (binary operator add)
Args:
array (WeldObject / Numpy.ndarray): Input array
other (WeldObject / Numpy.ndarray): Second Input array
op (str): Op string used to compute element-wise operation (+ / *)
ty (WeldType): Type of each element in th... |
371,221 | def GetPythonLibraryDirectoryPath():
path = sysconfig.get_python_lib(True)
_, _, path = path.rpartition(sysconfig.PREFIX)
if path.startswith(os.sep):
path = path[1:]
return path | Retrieves the Python library directory path. |
371,222 | def update_name(self):
p = self.api_retrieve()
p.name = self.name
p.save()
self.save() | Update the name of the Plan in Stripe and in the db.
Assumes the object being called has the name attribute already
reset, but has not been saved.
Stripe does not allow for update of any other Plan attributes besides name. |
371,223 | def insert_paths(self):
if self.args.path:
sys.path.insert(0, self.args.path)
if hasattr(self.config.application, config.PATHS):
if hasattr(self.config.application.paths, config.BASE):
sys.path.insert(0, self.config.application.paths.base) | Inserts a base path into the sys.path list if one is specified in
the configuration. |
371,224 | async def power(source, exponent):
async with streamcontext(source) as streamer:
async for item in streamer:
yield item ** exponent | Raise the elements of an asynchronous sequence to the given power. |
371,225 | def set_fluxinfo(self):
knowncals = [, , , ]
sourcenames = [self.sources[source][] for source in self.sources]
calsources = [cal for src in sourcenames for cal in knowncals if cal in src]
calsources_full = [src for src in sourcenames for cal in knowncals if cal in src... | Uses list of known flux calibrators (with models in CASA) to find full name given in scan. |
371,226 | def create(cls, **kw):
for k, v in kw.items():
attr = getattr(cls, k, None)
if isinstance(attr, RecordAttribute):
kw.pop(k)
kw.update(attr._decompose(v))
return cls(**kw) | Create an instance of this class, first cleaning up the keyword
arguments so they will fill in any required values.
@return: an instance of C{cls} |
371,227 | def _get_populate_values(self, instance) -> Tuple[str, str]:
return [
(
lang_code,
self._get_populate_from_value(
instance,
self.populate_from,
lang_code
),
)
... | Gets all values (for each language) from the
specified's instance's `populate_from` field.
Arguments:
instance:
The instance to get the values from.
Returns:
A list of (lang_code, value) tuples. |
371,228 | def _import_model(models, crumbs):
logger_jsons.info("enter import_model".format(crumbs))
_models = OrderedDict()
try:
for _idx, model in enumerate(models):
if "summaryTable" in model:
model["summaryTable"] = _idx_table_by_name(model["summa... | Change the nested items of the paleoModel data. Overwrite the data in-place.
:param list models: Metadata
:param str crumbs: Crumbs
:return dict _models: Metadata |
371,229 | def UpdateWorkerStatus(
self, identifier, status, pid, used_memory, display_name,
number_of_consumed_sources, number_of_produced_sources,
number_of_consumed_events, number_of_produced_events,
number_of_consumed_event_tags, number_of_produced_event_tags,
number_of_consumed_reports, number_o... | Updates the status of a worker.
Args:
identifier (str): worker identifier.
status (str): human readable status of the worker e.g. 'Idle'.
pid (int): process identifier (PID).
used_memory (int): size of used memory in bytes.
display_name (str): human readable of the file entry currentl... |
371,230 | def write_branch_data(self, file, padding=" "):
attrs = [ % (k,v) for k,v in self.branch_attr.iteritems()]
attr_str = ", ".join(attrs)
for br in self.case.branches:
file.write("%s%s -> %s [%s];\n" % \
(padding, br.from_bus.name, br.to_bus.name, attr_str)) | Writes branch data in Graphviz DOT language. |
371,231 | def _AbortJoin(self, timeout=None):
for pid, process in iter(self._processes_per_pid.items()):
logger.debug(.format(
process.name, pid))
process.join(timeout=timeout)
if not process.is_alive():
logger.debug(.format(
process.name, pid)) | Aborts all registered processes by joining with the parent process.
Args:
timeout (int): number of seconds to wait for processes to join, where
None represents no timeout. |
371,232 | def serve_doc(app, url):
@app.route(url, doc=False)
def index(env, req):
ret =
for d in env[]:
ret += .format(**d)
return ret | Serve API documentation extracted from request handler docstrings
Parameters:
* app: Grole application object
* url: URL to serve at |
371,233 | def get_output(self, idx=-1):
buff = ""
data = self.get_data()
buff += repr(data) + " | "
for i in range(0, len(data)):
buff += "\\x%02x" % data[i]
return buff | Return an additional output of the instruction
:rtype: string |
371,234 | def find_file(self, path, saltenv, back=None):
path = salt.utils.stringutils.to_unicode(path)
saltenv = salt.utils.stringutils.to_unicode(saltenv)
back = self.backends(back)
kwargs = {}
fnd = {: ,
: }
if os.path.isabs(path):
return fnd
... | Find the path and return the fnd structure, this structure is passed
to other backend interfaces. |
371,235 | def makeAggShkHist(self):
sim_periods = self.act_T
Events = np.arange(self.AggShkDstn[0].size)
EventDraws = drawDiscrete(N=sim_periods,P=self.AggShkDstn[0],X=Events,seed=0)
PermShkAggHist = self.AggShkDstn[1][EventDraws]
TranShkAggHist = self.AggShkDstn[2][EventDr... | Make simulated histories of aggregate transitory and permanent shocks. Histories are of
length self.act_T, for use in the general equilibrium simulation. This replicates the same
method for CobbDouglasEconomy; future version should create parent class.
Parameters
----------
Non... |
371,236 | def log_message(self, format, *args):
code = args[1][0]
levels = {
: ,
:
}
log_handler = getattr(logger, levels.get(code, ))
log_handler(format % args) | overrides the ``log_message`` method from the wsgiref server so that
normal logging works with whatever configuration the application has
been set to.
Levels are inferred from the HTTP status code, 4XX codes are treated as
warnings, 5XX as errors and everything else as INFO level. |
371,237 | def get_pb_ids(self) -> List[str]:
values = DB.get_hash_value(self._key, )
return ast.literal_eval(values) | Return the list of PB ids associated with the SBI.
Returns:
list, Processing block ids |
371,238 | def find_extensions_in(path: typing.Union[str, pathlib.Path]) -> list:
if not isinstance(path, pathlib.Path):
path = pathlib.Path(path)
if not path.is_dir():
return []
extension_names = []
for subpath in path.glob():
parts = subpath.with_suffix().parts
if pa... | Tries to find things that look like bot extensions in a directory. |
371,239 | async def dispatch(self, request, view=None, **kwargs):
self.auth = await self.authorize(request, **kwargs)
self.collection = await self.get_many(request, **kwargs)
if request.method == and view is None:
return await super(RESTHandler, self).dispatch(r... | Process request. |
371,240 | def checkCytoscapeVersion(host=cytoscape_host,port=cytoscape_port):
URL="http://"+str(host)+":"+str(port)+"/v1/version/"
r = requests.get(url = URL)
r=json.loads(r.content)
for k in r.keys():
print(k, r[k]) | Checks cytoscape version
:param host: cytoscape host address, default=cytoscape_host
:param port: cytoscape port, default=1234
:returns: cytoscape and api version |
371,241 | def render(self, *args, **kwargs):
ns = self.default_context.copy()
if len(args) == 1 and isinstance(args[0], MultiDict):
ns.update(args[0].to_dict(flat=True))
else:
ns.update(dict(*args))
if kwargs:
ns.update(kwargs)
context = Context... | This function accepts either a dict or some keyword arguments which
will then be the context the template is evaluated in. The return
value will be the rendered template.
:param context: the function accepts the same arguments as the
:class:`dict` constructor.
:... |
371,242 | def swap(self, kaxes, vaxes, size="150"):
kaxes = asarray(tupleize(kaxes), )
vaxes = asarray(tupleize(vaxes), )
if type(size) is not str:
size = tupleize(size)
if len(kaxes) == self.keys.ndim and len(vaxes) == 0:
raise ValueError(
... | Swap axes from keys to values.
This is the core operation underlying shape manipulation
on the Spark bolt array. It exchanges an arbitrary set of axes
between the keys and the valeus. If either is None, will only
move axes in one direction (from keys to values, or values to keys).
... |
371,243 | def hint_width(self):
return sum((len(self.style.delimiter),
self.wide,
len(self.style.delimiter),
len(u),
UCS_PRINTLEN + 2,
len(u),
self.style.name_len,)) | Width of a column segment. |
371,244 | def _split_indices(self, concat_inds):
clengths = np.append([0], np.cumsum(self.__lengths))
mapping = np.zeros((clengths[-1], 2), dtype=int)
for traj_i, (start, end) in enumerate(zip(clengths[:-1], clengths[1:])):
mapping[start:end, 0] = traj_i
mapping[start:end,... | Take indices in 'concatenated space' and return as pairs
of (traj_i, frame_i) |
371,245 | def unacknowledge_problem(self):
if self.problem_has_been_acknowledged:
logger.debug("[item::%s] deleting acknowledge of %s",
self.get_name(),
self.get_full_name())
self.problem_has_been_acknowledged = False
if self.m... | Remove the acknowledge, reset the flag. The comment is deleted
:return: None |
371,246 | def pickle_matpower_cases(case_paths, case_format=2):
import pylon.io
if isinstance(case_paths, basestring):
case_paths = [case_paths]
for case_path in case_paths:
case = pylon.io.MATPOWERReader(case_format).read(case_path)
dir_path = os.path.dirname(case_pa... | Parses the MATPOWER case files at the given paths and pickles the
resulting Case objects to the same directory. |
371,247 | def draw_tree(node, child_iter=lambda n: n.children, text_str=lambda n: str(n)):
return _draw_tree(node, , child_iter, text_str) | Args:
node: the root of the tree to be drawn,
child_iter: function that when called with a node, returns an iterable over all its children
text_str: turns a node into the text to be displayed in the tree.
The default implementations of these two arguments retrieve the children by accessing ... |
371,248 | def transfer_project(self, to_project_id, **kwargs):
path = % (self.id, to_project_id)
self.manager.gitlab.http_post(path, **kwargs) | Transfer a project to this group.
Args:
to_project_id (int): ID of the project to transfer
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabTransferProjectError: If the pr... |
371,249 | def project_ecef_vector_onto_sc(inst, x_label, y_label, z_label,
new_x_label, new_y_label, new_z_label,
meta=None):
import pysatMagVect
x, y, z = pysatMagVect.project_ecef_vector_onto_basis(inst[x_label], inst[y_label], inst[z_label],
... | Express input vector using s/c attitude directions
x - ram pointing
y - generally southward
z - generally nadir
Parameters
----------
x_label : string
Label used to get ECEF-X component of vector to be projected
y_label : string
Label used to get ECEF-Y component of... |
371,250 | def namedb_get_num_blockstack_ops_at( db, block_id ):
cur = db.cursor()
preorder_count_rows_query = "SELECT COUNT(*) FROM preorders WHERE block_number = ?;"
preorder_count_rows_args = (block_id,)
num_preorders = namedb_select_count_rows(cur, preorder_count_rows_query, preorder_count_rows... | Get the number of name/namespace/token operations that occurred at a particular block. |
371,251 | def remove_tweet(self, id):
try:
self._client.destroy_status(id=id)
return True
except TweepError as e:
if e.api_code in [TWITTER_PAGE_DOES_NOT_EXISTS_ERROR, TWITTER_DELETE_OTHER_USER_TWEET]:
return False
raise | Delete a tweet.
:param id: ID of the tweet in question
:return: True if success, False otherwise |
371,252 | def read_trigger_parameters(filename):
parameters = []
f = open(filename, )
print()
for line in f:
if line[0] == :
print(line.rstrip().lstrip())
else:
parameter_dict = ast.literal_eval(line)
trig_par = TriggerParameters(parameter_dict... | Read the trigger parameters into trigger_parameter classes.
:type filename: str
:param filename: Parameter file
:returns: List of :class:`eqcorrscan.utils.trigger.TriggerParameters`
:rtype: list
.. rubric:: Example
>>> from eqcorrscan.utils.trigger import read_trigger_parameters
>>> para... |
371,253 | def _wait_for_machine_booted(name, suffictinet_texts=None):
suffictinet_texts = suffictinet_texts or ["systemd-logind"]
for foo in range(constants.DEFAULT_RETRYTIMEOUT):
time.sleep(constants.DEFAULT_SLEEP)
out = run_cmd(
["machinectl", "... | Internal method
wait until machine is ready, in common case means there is running systemd-logind
:param name: str with machine name
:param suffictinet_texts: alternative text to check in output
:return: True or exception |
371,254 | def create(name, *effects, **kwargs):
value_info = kwargs.pop("value", None)
params = kwargs.pop("params", None)
label = kwargs.pop("label", None)
desc = kwargs.pop("desc", None)
if kwargs:
raise TypeError("create() got an unexpected keyword "
% kwargs.keys()[0])... | Annotate a non-idempotent create action to the model being defined.
Should really be::
create(name, *effects, value=None, params=None, label=None, desc=None)
but it is not supported by python < 3.
@param name: item name unique for the model being defined.
@type name: str or unicode
@param eff... |
371,255 | def get_empty_dtype_and_na(join_units):
if len(join_units) == 1:
blk = join_units[0].block
if blk is None:
return np.float64, np.nan
if is_uniform_reindex(join_units):
empty_dtype = join_units[0].block.dtype
upcasted_na = join_units[0].block.fill_value
... | Return dtype and N/A values to use when concatenating specified units.
Returned N/A value may be None which means there was no casting involved.
Returns
-------
dtype
na |
371,256 | def time_from_match(match_object):
hour = int(match_object.group())
minute = int(match_object.group())
second = int(match_object.group())
subsecond = match_object.group()
microsecond = 0
if subsecond is not None:
subsecond_denominator = 10.0 ** len(subsecond)
subsecond = in... | Create a time object from a regular expression match.
The regular expression match is expected to be from RE_TIME or RE_DATETIME.
@param match_object: The regular expression match.
@type value: B{re}.I{MatchObject}
@return: A date object.
@rtype: B{datetime}.I{time} |
371,257 | def _normalize_name(name):
try:
return HTTPHeaders._normalized_headers[name]
except KeyError:
if HTTPHeaders._NORMALIZED_HEADER_RE.match(name):
normalized = name
else:
normalized = "-".join([w.capitalize() for w in name.split("... | Converts a name to Http-Header-Case.
>>> HTTPHeaders._normalize_name("coNtent-TYPE")
'Content-Type' |
371,258 | def _general_multithread(func):
def multithread(templates, stream, *args, **kwargs):
with pool_boy(ThreadPool, len(stream), **kwargs) as pool:
return _pool_normxcorr(templates, stream, pool=pool, func=func)
return multithread | return the general multithreading function using func |
371,259 | def remove(self, username=None):
self._user_list = [user for user in self._user_list if user.name != username] | Remove User instance based on supplied user name. |
371,260 | def get_terrain_height(self, pos: Union[Point2, Point3, Unit]) -> int:
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.terrain_height[pos] | Returns terrain height at a position. Caution: terrain height is not anywhere near a unit's z-coordinate. |
371,261 | def batch_commit(self, message):
class controlled_execution:
def __init__(self, git, message):
self.git = git
self.message = message
def __enter__(self):
self.git.git_batch_commit = True
if self.git.job_id:
... | Instead of committing a lot of small commits you can batch it together using this controller.
Example:
with git.batch_commit('BATCHED'):
git.commit_file('my commit 1', 'path/to/file', 'content from file')
git.commit_json_file('[1, 2, 3]', 'path/to/file2', 'json array')
... |
371,262 | def create_context(self, message_queue, task_id):
parent_data = ParentData(self.parent.kind, self.parent.remote_id)
path_data = self.local_file.get_path_data()
params = parent_data, path_data, self.local_file.remote_id
return UploadContext(self.settings, params, message_queue, t... | Create values to be used by create_small_file function.
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly |
371,263 | def get_defaults(self):
options = [copy.copy(opt) for opt in self._options]
for opt in options:
try:
del opt.kwargs[]
except KeyError:
pass
parser = self.build_parser(options, permissive=True, add_help=False)
parse... | Use argparse to determine and return dict of defaults. |
371,264 | def log_print_response(logger, response):
log_msg =
log_msg += .format(str(response.status_code))
log_msg += .format(str(dict(response.headers)))
try:
log_msg += .format(_get_pretty_body(dict(response.headers), response.content))
except ValueError:
log_msg += .format(_get_pret... | Log an HTTP response data
:param logger: logger to use
:param response: HTTP response ('Requests' lib)
:return: None |
371,265 | def get_data_generator_by_id(hardware_source_id, sync=True):
hardware_source = HardwareSourceManager().get_hardware_source_for_hardware_source_id(hardware_source_id)
def get_last_data():
return hardware_source.get_next_xdatas_to_finish()[0].data.copy()
yield get_last_data | Return a generator for data.
:param bool sync: whether to wait for current frame to finish then collect next frame
NOTE: a new ndarray is created for each call. |
371,266 | def index_documents(self, fresh_docs, model):
docids = fresh_docs.keys()
vectors = (model.docs2vecs(fresh_docs[docid] for docid in docids))
logger.info("adding %i documents to %s" % (len(docids), self))
self.qindex.add_documents(vectors)
self.qindex.save()
self.u... | Update fresh index with new documents (potentially replacing old ones with
the same id). `fresh_docs` is a dictionary-like object (=dict, sqlitedict, shelve etc)
that maps document_id->document. |
371,267 | def map_across_full_axis(self, axis, map_func):
num_splits = self._compute_num_partitions()
preprocessed_map_func = self.preprocess_func(map_func)
partitions = self.column_partitions if not axis else self.row_partitions
result_blocks ... | Applies `map_func` to every partition.
Note: This method should be used in the case that `map_func` relies on
some global information about the axis.
Args:
axis: The axis to perform the map across (0 - index, 1 - columns).
map_func: The function to apply.
R... |
371,268 | def read_i2c_block_data(self, address, register, length):
return self.smbus.read_i2c_block_data(address, register, length) | I2C block transactions do not limit the number of bytes transferred
but the SMBus layer places a limit of 32 bytes.
I2C Block Read: i2c_smbus_read_i2c_block_data()
================================================
This command reads a block of bytes from a device, from a
design... |
371,269 | def get_logger(name):
if not hasattr(logging.Logger, ):
logging.addLevelName(TRACE_LEVEL, )
def trace(self, message, *args, **kwargs):
if self.isEnabledFor(TRACE_LEVEL):
self._log(TRACE_LEVEL, message, args, **kwargs)
logging.Logger.trace =... | Return logger with null handler added if needed. |
371,270 | def update_objective(self, objective_form):
collection = JSONClientValidated(,
collection=,
runtime=self._runtime)
if not isinstance(objective_form, ABCObjectiveForm):
raise errors.In... | Updates an existing objective.
arg: objective_form (osid.learning.ObjectiveForm): the form
containing the elements to be updated
raise: IllegalState - ``objective_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an inva... |
371,271 | def setFont(self, font):
super(XTimeEdit, self).setFont(font)
self._hourCombo.setFont(font)
self._minuteCombo.setFont(font)
self._secondCombo.setFont(font)
self._timeOfDayCombo.setFont(font) | Assigns the font to this widget and all of its children.
:param font | <QtGui.QFont> |
371,272 | def predict_withGradients(self, X):
if X.ndim==1: X = X[None,:]
ps = self.model.param_array.copy()
means = []
stds = []
dmdxs = []
dsdxs = []
for s in self.hmc_samples:
if self.model._fixes_ is None:
self.model[:] = s
... | Returns the mean, standard deviation, mean gradient and standard deviation gradient at X for all the MCMC samples. |
371,273 | def isdir(self, relpath, rsc=None):
filepath = self.find(relpath, rsc)
if filepath.startswith():
resource = QtCore.QResource(filepath)
return not resource.isFile()
else:
return os.path.isdir(filepath) | Returns whether or not the resource is a directory.
:return <bool> |
371,274 | def queryset(self, request, queryset):
if self.form.is_valid():
filter_params = dict(
filter(lambda x: bool(x[1]), self.form.cleaned_data.items())
)
return queryset.filter(**filter_params)
return queryset | Filter queryset using params from the form. |
371,275 | def _on_drawing(self, object, name, old, new):
attrs = [ "drawing", "arrowhead_drawing" ]
others = [getattr(self, a) for a in attrs \
if (a != name) and (getattr(self, a) is not None)]
x, y = self.component.position
print "POS:", x, y, self.component.position
... | Handles the containers of drawing components being set. |
371,276 | def to_array(self):
array = super(Animation, self).to_array()
array[] = u(self.file_id)
array[] = int(self.width)
array[] = int(self.height)
array[] = int(self.duration)
if self.thumb is not None:
array[] = self.thumb.to_array()
if... | Serializes this Animation to a dictionary.
:return: dictionary representation of this object.
:rtype: dict |
371,277 | def proxy(self):
headers = self.request.headers.filter(self.ignored_request_headers)
qs = self.request.query_string if self.pass_query_string else
if (self.request.META.get(, None) == and
get_django_version() == ):
del self.request.META[]
... | Retrieve the upstream content and build an HttpResponse. |
371,278 | def _process_mappings(self, limit=None):
LOG.info("Processing chromosome mappings")
if self.test_mode:
graph = self.testgraph
else:
graph = self.graph
line_counter = 0
model = Model(graph)
geno = Genotype(graph)
raw = .join((self.... | This function imports linkage mappings of various entities
to genetic locations in cM or cR.
Entities include sequence variants, BAC ends, cDNA, ESTs, genes,
PAC ends, RAPDs, SNPs, SSLPs, and STSs.
Status: NEEDS REVIEW
:param limit:
:return: |
371,279 | def Mean(self):
mu = 0.0
for x, p in self.d.iteritems():
mu += p * x
return mu | Computes the mean of a PMF.
Returns:
float mean |
371,280 | def accuracy(self, outputs):
output = outputs[self.output_name]
predict = TT.argmax(output, axis=-1)
correct = TT.eq(predict, self._target)
acc = correct.mean()
if self._weights is not None:
acc = (self._weights * correct).sum() / self._weights.sum()
... | Build a Theano expression for computing the accuracy of graph output.
Parameters
----------
outputs : dict of Theano expressions
A dictionary mapping network output names to Theano expressions
representing the outputs of a computation graph.
Returns
----... |
371,281 | def acquire(self, timeout=None):
green = getcurrent()
parent = green.parent
if parent is None:
raise MustBeInChildGreenlet()
if self._local.locked:
future = create_future(self._loop)
self._queue.append(future)
parent.switch(future... | Acquires the lock if in the unlocked state otherwise switch
back to the parent coroutine. |
371,282 | def similar(self):
if self._similar is None:
self._similar = [
Artist(artist[], artist[], self._connection)
for artist in self._connection.request(
,
{: self.id},
self._connection.header())[1][]]
... | iterator over similar artists as :class:`Artist` objects |
371,283 | def read(self, sensors):
payload = {: [], : list(set([s.key for s in sensors]))}
if self.sma_sid is None:
yield from self.new_session()
if self.sma_sid is None:
return False
body = yield from self._fetch_json(URL_VALUES, payload=payload)
... | Read a set of keys. |
371,284 | def display(self, image):
assert(image.size == self.size)
self.image = self.preprocess(image).copy() | Takes a :py:mod:`PIL.Image` and makes a copy of it for later
use/inspection.
:param image: Image to display.
:type image: PIL.Image.Image |
371,285 | def _analyze(self):
fullpartial
if not self.analysis:
for dseries in self.data_series:
dseries_count = self.df[dseries].count()
assert(len(self.df_pruned) <= dseries_count <= len(self.df) or dseries_count)
self.analysis[dseri... | Run-once function to generate analysis over all series, considering both full and partial data.
Initializes the self.analysis dict which maps:
(non-reference) column/series -> 'full' and/or 'partial' -> stats dict returned by get_xy_dataset_statistics |
371,286 | def getTreeWalker(treeType, implementation=None, **kwargs):
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType in ("dom", "pulldom"):
name = "%s.%s" % (__name__, treeType)
__import__(name)
mod = sys.modules[name]
treeWalkerCa... | Get a TreeWalker class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - The xml.dom.minidom DOM implementation
"pulldom" - The xml.dom.pulldom event stream
... |
371,287 | def notifyAppend(self, queue, force):
if not force and not self.canAppend():
self.isWaited = True
return self._matcher
if self.parent is not None:
m = self.parent.notifyAppend(self, force)
if m is not None:
return m
self.to... | Internal notify for sub-queues
:returns: If the append is blocked by parent, an EventMatcher is returned, None else. |
371,288 | def build_penalties(self):
P = []
for term in self._terms:
P.append(term.build_penalties())
return sp.sparse.block_diag(P) | builds the GAM block-diagonal penalty matrix in quadratic form
out of penalty matrices specified for each feature.
each feature penalty matrix is multiplied by a lambda for that feature.
so for m features:
P = block_diag[lam0 * P0, lam1 * P1, lam2 * P2, ... , lamm * Pm]
Param... |
371,289 | def _patch_distribution_metadata_write_pkg_info():
environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
if not environment_local:
return
def write_pkg_info(self, base_dir):
with open(os.path.join(base_dir, ), ,
encoding=) as pkg_info:
... | Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
correct this undesirable behavior. |
371,290 | def render_compressed(self, package, package_name, package_type):
if settings.PIPELINE_ENABLED:
return self.render_compressed_output(package, package_name,
package_type)
else:
return self.render_compressed_sources(package,... | Render HTML for the package.
If ``PIPELINE_ENABLED`` is ``True``, this will render the package's
output file (using :py:meth:`render_compressed_output`). Otherwise,
this will render the package's source files (using
:py:meth:`render_compressed_sources`).
Subclasses can override... |
371,291 | def parse_iso_utc(s):
m = rfc3339_datetime_re().match(s)
if not m:
raise ValueError( + s )
else:
fmt = + ( if m.group(7) else ) +
return datetime.datetime.strptime(s, fmt) | Parses an ISO time with a hard-coded Z for zulu-time (UTC) at the end. Other timezones are
not supported.
:param str s: the ISO-formatted time
:rtype: datetime.datetime
:return: an timezone-naive datetime object
>>> parse_iso_utc('2016-04-27T00:28:04.000Z')
datetime.datetime(2016, 4, 27, 0, ... |
371,292 | def ensure_exists(self):
if not self.exists:
msg = "The local %s repository %s doesn't exist!"
raise ValueError(msg % (self.friendly_name, format_path(self.local))) | Make sure the local repository exists.
:raises: :exc:`~exceptions.ValueError` when the
local repository doesn't exist yet. |
371,293 | def get_file_relative_path_by_id(self, id):
for path, info in self.walk_files_info():
if info[]==id:
return path
return None | Given an id, get the corresponding file info relative path joined with file name.
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found. |
371,294 | def gen_toyn(f, nsample, ntoy, bound, accuracy=10000, quiet=True, **kwd):
return gen_toy(f, nsample * ntoy, bound, accuracy, quiet, **kwd).reshape((ntoy, nsample)) | just alias of gentoy for nample and then reshape to ntoy,nsample)
:param f:
:param nsample:
:param bound:
:param accuracy:
:param quiet:
:param kwd:
:return: |
371,295 | def sign_out(entry, time_out=None, forgot=False):
if time_out is None:
time_out = datetime.today().time()
if forgot:
entry.forgot_sign_out = True
logger.info(
.format(entry.user_id, entry.date)
)
else:
entry.time_out = time_out
logger.info(.fo... | Sign out of an existing entry in the timesheet. If the user
forgot to sign out, flag the entry.
:param entry: `models.Entry` object. The entry to sign out.
:param time_out: (optional) `datetime.time` object. Specify the sign out time.
:param forgot: (optional) If true, user forgot to sign out. Entry wi... |
371,296 | def get_rollup_caps(self, id=None, params=None):
return self.transport.perform_request(
"GET", _make_path("_rollup", "data", id), params=params
) | `<>`_
:arg id: The ID of the index to check rollup capabilities on, or left
blank for all jobs |
371,297 | def A_array(l1,l2,PA,PB,CP,g):
Imax = l1+l2+1
A = [0]*Imax
for i in range(Imax):
for r in range(int(floor(i/2)+1)):
for u in range(int(floor((i-2*r)/2)+1)):
I = i-2*r-u
A[I] = A[I] + A_term(i,r,u,l1,l2,PA,PB,CP,g)
return A | THO eq. 2.18 and 3.1
>>> A_array(0,0,0,0,0,1)
[1.0]
>>> A_array(0,1,1,1,1,1)
[1.0, -1.0]
>>> A_array(1,1,1,1,1,1)
[1.5, -2.5, 1.0] |
371,298 | def get_file_url(self, fid, public=None):
try:
volume_id, rest = fid.strip().split(",")
except ValueError:
raise BadFidFormat(
"fid must be in format: <volume_id>,<file_name_hash>")
file_location = self.get_file_location(volume_id)
if publ... | Get url for the file
:param string fid: File ID
:param boolean public: public or internal url
:rtype: string |
371,299 | def _calibrate_vis(radiance, k):
logger.debug()
refl = 100 * k * radiance
return refl.clip(min=0) | Convert VIS radiance to reflectance
Note: Angle of incident radiation and annual variation of the
earth-sun distance is not taken into account. A value of 100%
corresponds to the radiance of a perfectly reflecting diffuse surface
illuminated at normal incidence when the sun is at its an... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.