Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
365,300 | def output_before_run(self, run):
runSet = run.runSet
try:
OutputHandler.print_lock.acquire()
try:
runSet.started_runs += 1
except AttributeError:
runSet.started_runs = 1
timeStr = time.strftime("%H:%M:%S... | The method output_before_run() prints the name of a file to terminal.
It returns the name of the logfile.
@param run: a Run object |
365,301 | def cee_map_name(self, **kwargs):
config = ET.Element("config")
cee_map = ET.SubElement(config, "cee-map", xmlns="urn:brocade.com:mgmt:brocade-cee-map")
name = ET.SubElement(cee_map, "name")
name.text = kwargs.pop()
callback = kwargs.pop(, self._callback)
return... | Auto Generated Code |
365,302 | def list_zones(profile):
conn = _get_driver(profile=profile)
return [_simple_zone(zone) for zone in conn.list_zones()] | List zones for the given profile
:param profile: The profile key
:type profile: ``str``
CLI Example:
.. code-block:: bash
salt myminion libcloud_dns.list_zones profile1 |
365,303 | def get_exception_based_on_api_message(message, image_name=""):
msg_bigger_than_source = re.compile()
msg_does_not_exist = re.compile()
msg_does_not_exist_bis = re.compile()
if re.search(msg_bigger_than_source, message):
msg = "File %s requested at a width bigger than source" % image_name
... | Return the exception matching the given API error message. |
365,304 | def get_gopath(self, target):
return os.path.join(self.workdir, target.id) | Returns the $GOPATH for the given target. |
365,305 | def write(self, content=None):
if self.system.files.no_output is True:
return
t, _ = elapsed()
if not content:
logger.warning()
return
self.update(content)
system = self.system
file = system.files.output
export = al... | Write report to file.
Parameters
----------
content: str
'summary', 'extended', 'powerflow' |
365,306 | def deploy(self):
self._info("* Opening archive: {}", self.archive_path)
if not os.path.exists(self.archive_path):
self._error("Given path does not exists: {}", self.archive_path)
with zipfile.ZipFile(self.archive_path, ) as zip_archive:
font_dir = self.requirem... | Open a ZIP archive, validate requirements then deploy the webfont into
project static files |
365,307 | def descendants(self, cl=None, noduplicates=True):
if not cl:
cl = self
if cl.children():
bag = []
for x in cl.children():
if x.uri != cl.uri:
bag += [x] + self.descendants(x, noduplicates)
else:
... | returns all descendants in the taxonomy |
365,308 | def add_reader(
self,
fd: IFileLike,
callback: typing.Callable[[IFileLike], typing.Any],
) -> None:
raise NotImplementedError() | Add a file descriptor to the processor and wait for READ.
Args:
fd (IFileLike): Any obect that exposes a 'fileno' method that
returns a valid file descriptor integer.
callback (typing.Callable[[IFileLike], typing.Any]): A function
that consumes the IFileL... |
365,309 | def send_template_message(self, user_id, template_id, data, url=, topcolor=):
unicode_data = {}
if data:
unicode_data = self._transcoding_dict(data)
return self.request.post(
url=,
data={
: user_id,
"template_id": temp... | 发送模版消息
详情请参考 http://mp.weixin.qq.com/wiki/17/304c1885ea66dbedf7dc170d84999a9d.html
:param user_id: 用户 ID, 就是你收到的 WechatMessage 的 source (OpenID)
:param template_id: 模板ID
:param data: 模板消息数据 (dict形式),示例如下:
{
"first": {
"value": "恭喜你购买成功!",
... |
365,310 | def do_fileplaceholder(parser, token):
name, params = parse_placeholder(parser, token)
return FilePlaceholderNode(name, **params) | Method that parse the fileplaceholder template tag. |
365,311 | def Deserialize(self, reader):
self.Script = reader.ReadVarBytes()
self.ParameterList = reader.ReadVarBytes()
self.ReturnType = reader.ReadByte() | Deserialize full object.
Args:
reader (neo.IO.BinaryReader): |
365,312 | def path_param(name, ns):
if ns.identifier_type == "uuid":
param_type = "string"
param_format = "uuid"
else:
param_type = "string"
param_format = None
kwargs = {
"name": name,
"in": "path",
"required": True,
"type": param_type,
}
... | Build a path parameter definition. |
365,313 | def _serialize(self, value, *args, **kwargs):
if value is not None:
value = super(MSTimestamp, self)._serialize(value, *args) * 1e3
return value | Serialize given datetime to timestamp. |
365,314 | def pretty_print_probabilities(self, decimal_digits=2):
outcome_dict = {}
qubit_num = len(self)
for index, amplitude in enumerate(self.amplitudes):
outcome = get_bitstring_from_index(index, qubit_num)
prob = round(abs(amplitude) ** 2, decimal_digits)
... | Prints outcome probabilities, ignoring all outcomes with approximately zero probabilities
(up to a certain number of decimal digits) and rounding the probabilities to decimal_digits.
:param int decimal_digits: The number of digits to truncate to.
:return: A dict with outcomes as keys and probab... |
365,315 | def _postprocess_request(self, r):
from DictObject import DictObject
import requests
assert isinstance(r, requests.Response)
try:
logger.debug(r.json())
res = DictObject.objectify(r.json())
except Exception:
logger.exception("Parsing... | This converts the response to either the response or a parsed :class:`pytgbot.api_types.receivable.Receivable`.
:param r: the request response
:type r: requests.Response
:return: The json response from the server, or, if `self.return_python_objects` is `True`, a parsed return type.
:r... |
365,316 | def emit_event(self, event):
with self._lock:
listeners = list(self._event_listeners)
for cb in list(self._event_listeners):
try:
cb(event)
except:
logger.exception("Event callback resulted in unh... | Emit the specified event (notify listeners) |
365,317 | def focusout(self, event):
bc = self.style.lookup("TEntry", "bordercolor", ("!focus",))
dc = self.style.lookup("TEntry", "darkcolor", ("!focus",))
lc = self.style.lookup("TEntry", "lightcolor", ("!focus",))
self.style.configure("%s.spinbox.TFrame" % self.frame, bordercolor=bc,
... | Change style on focus out events. |
365,318 | def clipping_params(ts, capacity=100, rate_limit=float(), method=None, max_attempts=100):
VALID_METHODS = [, , , ]
ts.index = ts.index.astype(np.int64)
costs = []
def cost_fun(x, *args):
thresh = x[0]
ts, capacity, bounds = args
integral = clipped_area(ts, thresh=thres... | Start, end, and threshold that clips the value of a time series the most, given a limitted "capacity" and "rate"
Assumes that signal can be linearly interpolated between points (trapezoidal integration)
Arguments:
ts (TimeSeries): Time series to attempt to clip to as low a max value as possible
ca... |
365,319 | def check_for_session(self, status=None):
status = Status.LAST if status is None else status
return os.path.isfile(self.get_restore_path(status)) and os.path.getsize(self.get_restore_path(status)) > 0 | check_for_session: see if session is in progress
Args:
status (str): step to check if last session reached (optional)
Returns: boolean indicating if session exists |
365,320 | def _get_conversion_type(self, convert_to=None):
acceptable = [, ]
conversion = "singularity"
if self.name == "singularity":
conversion = "docker"
if convert_to is not None and convert_to in acceptable:
conversion = convert_to
... | a helper function to return the conversion type based on user
preference and input recipe.
Parameters
==========
convert_to: a string either docker or singularity (default None) |
365,321 | def bamsort_and_index(job, job_vars):
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args[]
rg_alignments = return_input_paths(job, work_dir, ids, )
output = os.path.join(work_dir, )
cmd1 = [, docker_path(rg_alignments), docker_path()]
... | Sorts bam file and produces index file
job_vars: tuple Tuple of dictionaries: input_args and ids |
365,322 | def _translate_range(self, len_, start, end):
if start < 0:
start += len_
start = max(0, min(start, len_))
if end < 0:
end += len_
end = max(-1, min(end, len_ - 1))
return start, end | Translate range to valid bounds. |
365,323 | def get_input(self, more=False):
received = None
try:
received = self.prompt.input(more)
except KeyboardInterrupt:
print()
printerr("KeyboardInterrupt")
except EOFError:
print()
self.exit_runner()
else:
... | Prompt for code input. |
365,324 | def p_to_find(self, ):
kwd = {
: ,
}
self.render(,
kwd=kwd,
view=MUser.get_by_keyword(""),
cfg=config.CMS_CFG,
userinfo=self.userinfo) | To find, pager. |
365,325 | def handle_fk_field(self, obj, field):
self._start_relational_field(field)
related = getattr(obj, field.name)
if related is not None:
if self.use_natural_keys and hasattr(related, ):
related = related.natural_key()
... | Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields). |
365,326 | def contains(self, *items):
if len(items) == 0:
raise ValueError()
elif len(items) == 1:
if items[0] not in self.val:
if self._check_dict_like(self.val, return_as_bool=True):
self._err( % (self.val, items[0]))
else:
... | Asserts that val contains the given item or items. |
365,327 | def logger():
global _LOGGER
if _LOGGER is None:
logging.basicConfig()
_LOGGER = logging.getLogger()
_LOGGER.setLevel()
return _LOGGER | Access global logger |
365,328 | def execute(self, statement, *args, **kwargs):
with self.engine.connect() as conn:
s = sqlalchemy.sql.text(statement)
return conn.execute(s, **kwargs) | This convenience method will execute the query passed in as is. For
more complex functionality you may want to use the sqlalchemy engine
directly, but this serves as an example implementation.
:param select_query: SQL statement to execute that will identify the
resultset of interes... |
365,329 | def get_timestamp_expression(self, time_grain):
label = utils.DTTM_ALIAS
db = self.table.database
pdf = self.python_date_format
is_epoch = pdf in (, )
if not self.expression and not time_grain and not is_epoch:
sqla_col = column(self.column_name, type_=DateT... | Getting the time component of the query |
365,330 | def _h2ab_s(s):
if s < 5.85 or s > 9.155759395:
raise NotImplementedError("Incoming out of bound")
sigma1 = s/5.21
sigma2 = s/9.2
I = [1, 1, 2, 2, 4, 4, 7, 8, 8, 10, 12, 12, 18, 20, 24, 28, 28, 28, 28, 28,
32, 32, 32, 32, 32, 36, 36, 36, 36, 36]
J = [8, 24, 4, 32, 1, 2, 7... | Define the saturated line boundary between Region 4 and 2a-2b, h=f(s)
Parameters
----------
s : float
Specific entropy, [kJ/kgK]
Returns
-------
h : float
Specific enthalpy, [kJ/kg]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
*... |
365,331 | def validate_schema(cls, tx):
_validate_schema(TX_SCHEMA_COMMON, tx)
_validate_schema(TX_SCHEMA_TRANSFER, tx)
_validate_schema(cls.TX_SCHEMA_CUSTOM, tx) | Validate the validator election vote transaction. Since `VOTE` extends `TRANSFER`
transaction, all the validations for `CREATE` transaction should be inherited |
365,332 | def split_data(self, train_images, train_labels):
valid_images = train_images[:self.num_valid_images]
valid_labels = train_labels[:self.num_valid_images]
train_images = train_images[self.num_valid_images:]
train_labels = train_labels[self.num_valid_images:]
return train_... | :param train_images: numpy array (image_dim, image_dim, num_images)
:param train_labels: numpy array (labels)
:return: train_images, train_labels, valid_images, valid_labels |
365,333 | def extract_xyz_matrix_from_loop_json(pdb_lines, parsed_loop_json_contents, atoms_of_interest = backbone_atoms, expected_num_residues = None, expected_num_residue_atoms = None, allow_overlaps = False, include_all_columns = False):
dataframes = []
for loop_set in parsed_loop_json_conte... | A utility wrapper to extract_xyz_matrix_from_pdb_residue_range.
This accepts PDB file lines and a loop.json file (a defined Rosetta format) and returns a pandas dataframe of
the X, Y, Z coordinates for the requested atom types for all residues in all loops defined by the loop.json
file.... |
365,334 | def parse_timedelta(text):
td_kwargs = {}
for match in _PARSE_TD_RE.finditer(text):
value, unit = match.group(), match.group()
try:
unit_key = _PARSE_TD_KW_MAP[unit]
except KeyError:
raise ValueError(
% (unit, _PARSE_TD_KW_MAP.key... | Robustly parses a short text description of a time period into a
:class:`datetime.timedelta`. Supports weeks, days, hours, minutes,
and seconds, with or without decimal points:
Args:
text (str): Text to parse.
Returns:
datetime.timedelta
Raises:
ValueError: on parse failure.... |
365,335 | def read_block(self, block):
if block < 0 or block > 255:
raise ValueError("invalid block number")
log.debug("read block {0}".format(block))
cmd = "\x02" + chr(block) + 8 * chr(0) + self.uid
return self.transceive(cmd)[1:9] | Read an 8-byte data block at address (block * 8). |
365,336 | def to_array(self):
array = super(MessageEntity, self).to_array()
array[] = u(self.type)
array[] = int(self.offset)
array[] = int(self.length)
if self.url is not None:
array[] = u(self.url)
if self.user is not None:
array[] = sel... | Serializes this MessageEntity to a dictionary.
:return: dictionary representation of this object.
:rtype: dict |
365,337 | def _init_sub_dsp(self, dsp, fringe, outputs, no_call, initial_dist, index,
full_name):
sol = self.__class__(
dsp, {}, outputs, False, None, None, no_call, False,
wait_in=self._wait_in.get(dsp, None), index=self.index + index,
full_nam... | Initialize the dispatcher as sub-dispatcher and update the fringe.
:param fringe:
Heapq of closest available nodes.
:type fringe: list[(float | int, bool, (str, Dispatcher)]
:param outputs:
Ending data nodes.
:type outputs: list[str], iterable
:param no... |
365,338 | def _handle_timeout(self) -> None:
self._timeout = None
while True:
try:
ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
... | Called by IOLoop when the requested timeout has passed. |
365,339 | def exists(self, client=None):
client = self._require_client(client)
try:
response = client.connection.api_request(method=, path=self.path)
except NotFound:
return False
else:
if os.path.split(response.get("id"))[-1] ... | API call: test for the existence of the taskqueue via a GET request
See
https://cloud.google.com/appengine/docs/python/taskqueue/rest/taskqueues/get
:type client: :class:`taskqueue.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
... |
365,340 | def host(self):
_id = None
for participant in self[]:
if participant[] == :
if set([, ]) < set(participant):
if participant[] == :
_id = participant[]
if _id is None:
... | Return the host committee. |
365,341 | def output(self, value):
response = self.request(("output %s" % (value)).encode())
if "success" in response:
return None
else:
return response | Sets the client's output (on, off, int)
Sets the general purpose output on some display modules to this value.
Use on to set all outputs to high state, and off to set all to low state.
The meaning of the integer value depends on your specific device, usually
it is a bit pattern ... |
365,342 | def get_clone(rec):
s = rec.description
chr = re.search(chr_pat, s)
clone = re.search(clone_pat, s)
chr = chr.group(1) if chr else ""
clone = clone.group(1) if clone else ""
return chr, clone | >>> get_clone("Medicago truncatula chromosome 2 clone mth2-48e18")
('2', 'mth2-48e18') |
365,343 | def clear_relation(cls):
for k, v in cls.properties.items():
if isinstance(v, ReferenceProperty):
if hasattr(v, ) and hasattr(v.reference_class, v.collection_name):
delattr(v.reference_class, v.collection_name)
if isinstance(v, OneToO... | Clear relation properties for reference Model, such as OneToOne, Reference,
ManyToMany |
365,344 | def __getNumberOfFollowers(self, web):
counters = web.find_all(, {: })
try:
if not in counters[2].text:
self.followers = int(counters[2].text)
else:
follText = counters[2].text.replace(" ", "")
follText = follText.replace(... | Scrap the number of followers from a GitHub profile.
:param web: parsed web.
:type web: BeautifulSoup node. |
365,345 | def to_dict(self):
obj_dict = super(Cell, self).to_dict()
child_dict = {
: self.__class__.__name__,
: self.header_flag,
: self.align,
: self.wrap_slash,
: self.content.to_dict()
}
obj_dict.update(child_dict)
ret... | Render a MessageElement as python dict
:return: Python dict representation
:rtype: dict |
365,346 | def ensure_parent_directory(path, ensure_parent=True):
parent_directory = os.path.abspath(path)
if ensure_parent:
parent_directory = os.path.dirname(parent_directory)
if not os.path.exists(parent_directory):
try:
os.makedirs(parent_directory)
except (IOError, OSError... | Ensures the parent directory exists.
:param string path: the path of the file
:param bool ensure_parent: if ``True``, ensure the parent directory of ``path`` exists;
if ``False``, ensure ``path`` exists
:raises: OSError: if the path cannot be created |
365,347 | def delete(self, **kw):
delete_by = []
for field, val in kw.items():
if val is not None:
delete_by.append(field)
self.items[:] = [route for route in self.items
if not all(route.get(field) == kw.get(field)
... | Delete a policy route from the engine. You can delete using a
single field or multiple fields for a more exact match.
Use a keyword argument to delete a route by any valid attribute.
:param kw: use valid Route keyword values to delete by exact match |
365,348 | def create_invoice_from_albaran(pk, list_lines):
MODEL_SOURCE = SalesAlbaran
MODEL_FINAL = SalesInvoice
url_reverse =
msg_error_relation = _("Hay lineas asignadas a facturas")
msg_error_not_found = _()
msg_error_line_not_found = _()
return SalesLines.cr... | context = {}
if list_lines:
new_list_lines = SalesLines.objects.filter(
pk__in=[int(x) for x in list_lines]
).exclude(
invoice__isnull=False
)
if new_list_lines:
new_pk = new_list_lines.first()
if ne... |
365,349 | def traveling_salesman_qubo(G, lagrange=2, weight=):
N = G.number_of_nodes()
if N in (1, 2) or len(G.edges) != N*(N-1)//2:
msg = "graph must be a complete graph with at least 3 nodes or empty"
raise ValueError(msg)
Q = defaultdict(float)
for node in G:
for ... | Return the QUBO with ground states corresponding to a minimum TSP route.
If :math:`|G|` is the number of nodes in the graph, the resulting qubo will have:
* :math:`|G|^2` variables/nodes
* :math:`2 |G|^2 (|G| - 1)` interactions/edges
Parameters
----------
G : NetworkX graph
A complete... |
365,350 | def match(self, props=None, rng=None, offset=None):
if rng:
s, e = rng
else:
e = s = None
return ((e is None or self.end == e) and
(s is None or self.start == s)) and \
(props is None or props.issubset(self.props)) and \
... | Provide any of the args and match or dont.
:param props: Should be a subset of my props.
:param rng: Exactly match my range.
:param offset: I start after this offset.
:returns: True if all the provided predicates match or are None |
365,351 | def all_pages(method, request, accessor, cond=None):
if cond is None:
cond = lambda x: True
result = []
next_token = None
while True:
if next_token:
request[] = next_token
response = method(**request)
if cond(response):
data = accessor(respons... | Helper to process all pages using botocore service methods (exhausts NextToken).
note: `cond` is optional... you can use it to make filtering more explicit
if you like. Alternatively you can do the filtering in the `accessor` which
is perfectly fine, too
Note: lambda uses a slightly different mechanism ... |
365,352 | def wait_until_title_contains(self, partial_title, timeout=None):
timeout = timeout if timeout is not None else self.timeout
def wait():
return WebDriverWait(self.driver, timeout).until(EC.title_contains(partial_title))
return self.execute_and_handle_webdriver... | Waits for title to contain <partial_title>
@type partial_title: str
@param partial_title: the partial title to locate
@type timeout: int
@param timeout: the maximum number of seconds the driver will wait before timing out
@rtype: webdriverw... |
365,353 | def TriToBin(self, x, y, z):
if (z >= 0):
if (x + y + z == 0):
return (0, 0)
else:
Sum = x + y + z
X = 100.0 * x / Sum
Y = 100.0 * y / Sum
Z = 100.0 * z / Sum
if (X + Y != 0):
... | Turn an x-y-z triangular coord to an a-b coord.
if z is negative, calc with its abs then return (a, -b).
:param x,y,z: the three numbers of the triangular coord
:type x,y,z: float or double are both OK, just numbers
:return: the corresponding a-b coord
:rtype: a tuple consist ... |
365,354 | def tags(self, value):
if not isinstance(value, list):
raise TypeError
self._config[] = value | Set the tags in the configuraton (setter) |
365,355 | def LaplaceCentreWeight(self):
sz = [1,] * self.S.ndim
for ax in self.axes:
sz[ax] = self.S.shape[ax]
lcw = 2*len(self.axes)*np.ones(sz, dtype=self.dtype)
for ax in self.axes:
lcw[(slice(None),)*ax + ([0, -1],)] -= 1.0
return lcw | Centre weighting matrix for TV Laplacian. |
365,356 | def rpc_get_namespace_cost( self, namespace_id, **con_info ):
if not check_namespace(namespace_id):
return {: , : 400}
db = get_db_state(self.working_dir)
res = get_namespace_cost( db, namespace_id )
db.close()
units = res[]
amount = res[]
n... | Return the cost of a given namespace, including fees.
Returns {'amount': ..., 'units': ...} |
365,357 | def get_attached_cdroms(self, datacenter_id, server_id, depth=1):
response = self._perform_request(
% (
datacenter_id,
server_id,
str(depth)))
return response | Retrieves a list of CDROMs attached to the server.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param depth: The depth of the response da... |
365,358 | def r_squared(model, fit_result, data):
y_is = [data[var] for var in model if var in data]
x_is = [value for var, value in data.items() if var.name in model.__signature__.parameters]
y_bars = [np.mean(y_i) if y_i is not None else None for y_i in y_is]
f_is = model(*x_is, **fit_result.params)
... | Calculates the coefficient of determination, R^2, for the fit.
(Is not defined properly for vector valued functions.)
:param model: Model instance
:param fit_result: FitResults instance
:param data: data with which the fit was performed. |
365,359 | def get_fn(name):
fn = resource_filename(, os.path.join(, , name))
if not os.path.exists(fn):
raise IOError(.format(fn))
return fn | Get the full path to one of the reference files shipped for utils.
In the source distribution, these files are in ``mbuild/utils/reference``,
but on installation, they're moved to somewhere in the user's python
site-packages directory.
Parameters
----------
name : str
Name of the file ... |
365,360 | def back_tick(cmd, ret_err=False, as_str=True, raise_err=None):
if raise_err is None:
raise_err = False if ret_err else True
cmd_is_seq = isinstance(cmd, (list, tuple))
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=not cmd_is_seq)
out, err = proc.communicate()
retcode = proc.returnc... | Run command `cmd`, return stdout, or stdout, stderr if `ret_err`
Roughly equivalent to ``check_output`` in Python 2.7
Parameters
----------
cmd : sequence
command to execute
ret_err : bool, optional
If True, return stderr in addition to stdout. If False, just return
stdout... |
365,361 | def quantileclip(arrays, masks=None, dtype=None, out=None,
zeros=None, scales=None, weights=None,
fclip=0.10):
return generic_combine(intl_combine.quantileclip_method(fclip), arrays,
masks=masks, dtype=dtype, out=out,
zeros... | Combine arrays using the sigma-clipping, with masks.
Inputs and masks are a list of array objects. All input arrays
have the same shape. If present, the masks have the same shape
also.
The function returns an array with one more dimension than the
inputs and with size (3, shape). out[0] contains t... |
365,362 | def get_build_report(self, project, build_id, type=None):
route_values = {}
if project is not None:
route_values[] = self._serialize.url(, project, )
if build_id is not None:
route_values[] = self._serialize.url(, build_id, )
query_parameters = {}
... | GetBuildReport.
[Preview API] Gets a build report.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str type:
:rtype: :class:`<BuildReportMetadata> <azure.devops.v5_0.build.models.BuildReportMetadata>` |
365,363 | def plot_ants_plane(off_screen=False, notebook=None):
airplane = vtki.PolyData(planefile)
airplane.points /= 10
ant = vtki.PolyData(antfile)
ant.rotate_x(90)
ant.translate([90, 60, 15])
ant_copy = ant.copy()
ant_copy.translate([30, 0, -10])
plotter ... | Demonstrate how to create a plot class to plot multiple meshes while
adding scalars and text.
Plot two ants and airplane |
365,364 | def getSlicesForText(self, body, getFingerprint=None, startIndex=0, maxResults=10):
return self._text.getSlicesForText(self._retina, body, getFingerprint, startIndex, maxResults) | Get a list of slices of the text
Args:
body, str: The text to be evaluated (required)
getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
startIndex, int: The start-index for pagination (optional)
maxResults, int... |
365,365 | def consume_gas(self, amount: int, reason: str) -> None:
return self._gas_meter.consume_gas(amount, reason) | Consume ``amount`` of gas from the remaining gas.
Raise `eth.exceptions.OutOfGas` if there is not enough gas remaining. |
365,366 | def get(self, *, kind: Type=None, tag: Hashable=None, **kwargs) -> Iterator:
return self.game_objects.get(kind=kind, tag=tag, **kwargs) | Get an iterator of GameObjects by kind or tag.
kind: Any type. Pass to get a subset of contained GameObjects with the
given type.
tag: Any Hashable object. Pass to get a subset of contained GameObjects
with the given tag.
Pass both kind and tag to get objects that ar... |
365,367 | def todict(self):
return {: self.index,
: hb_encode(self.seed),
: self.n,
: hb_encode(self.root),
: hb_encode(self.hmac),
: self.timestamp} | Returns a dictionary fully representing the state of this object |
365,368 | def create_nation_fixtures(self):
SHP_SLUG = "cb_{}_us_state_500k".format(self.YEAR)
DOWNLOAD_PATH = os.path.join(self.DOWNLOAD_DIRECTORY, SHP_SLUG)
shape = shapefile.Reader(
os.path.join(DOWNLOAD_PATH, "{}.shp".format(SHP_SLUG))
)
fields = shape.fields[1:]
... | Create national US and State Map |
365,369 | def adjustTitleFont(self):
left, top, right, bottom = self.contentsMargins()
r = self.roundingRadius()
left += 5 + r / 2
top += 5 + r / 2
right += 5 + r / 2
bottom += 5 + r / 2
r = self.rect()
rect_l = r.left() + left
... | Adjusts the font used for the title based on the current with and \
display name. |
365,370 | def get_nlp_base(self):
if isinstance(self.__nlp_base, NlpBase) is False:
raise TypeError("The type of self.__nlp_base must be NlpBase.")
return self.__nlp_base | getter |
365,371 | def http_basic(r, username, password):
username = str(username)
password = str(password)
auth_s = b64encode( % (username, password))
r.headers[] = ( % auth_s)
return r | Attaches HTTP Basic Authentication to the given Request object.
Arguments should be considered non-positional. |
365,372 | def _process_output(res, parse_json=True):
res_payload = res.payload.decode()
output = res_payload.strip()
_LOGGER.debug(, res.code, output)
if not output:
return None
if not res.code.is_successful():
if 128 <= res.code < 160:
raise ClientError(output)
eli... | Process output. |
365,373 | def message(request, socket, context, message):
room = get_object_or_404(ChatRoom, id=message["room"])
if message["action"] == "start":
name = strip_tags(message["name"])
user, created = room.users.get_or_create(name=name)
if not created:
socket.send({"action": "in-use"}... | Event handler for a room receiving a message. First validates a
joining user's name and sends them the list of users. |
365,374 | def reset(self):
self.counter += 1
local_counter = self.counter
def timer_timeout():
if self.counter == local_counter and self.running:
self.callback()
self.loop.call_later(self.timeout, timer_timeout) | Reset the timeout. Starts a new timer. |
365,375 | def array(self, envelope=()):
args = ()
if envelope:
args = self.get_offset(envelope)
return self.ds.ReadAsArray(*args) | Returns an NDArray, optionally subset by spatial envelope.
Keyword args:
envelope -- coordinate extent tuple or Envelope |
365,376 | def refresh_db(cache_valid_time=0, failhard=False, **kwargs):
*
salt.utils.pkg.clear_rtag(__opts__)
failhard = salt.utils.data.is_true(failhard)
ret = {}
error_repos = list()
if cache_valid_time:
try:
latest_update = os.stat(APT_LISTS_PATH).st_mtime
now = ti... | Updates the APT database to latest packages based upon repositories
Returns a dict, with the keys being package databases and the values being
the result of the update attempt. Values can be one of the following:
- ``True``: Database updated successfully
- ``False``: Problem updating database
- ``... |
365,377 | def mod(self):
return math.sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2) | Modulus of vector. |
365,378 | def set_energy_range(self, logemin, logemax):
if logemin is None:
logemin = self.log_energies[0]
if logemax is None:
logemax = self.log_energies[-1]
imin = int(utils.val_to_edge(self.log_energies, logemin)[0])
imax = int(utils.val_to_edge(self.log_ener... | Set the energy range of the analysis.
Parameters
----------
logemin: float
Lower end of energy range in log10(E/MeV).
logemax : float
Upper end of energy range in log10(E/MeV). |
365,379 | def extract_packing_plan(self, topology):
packingPlan = {
"id": "",
"container_plans": []
}
if not topology.packing_plan:
return packingPlan
container_plans = topology.packing_plan.container_plans
containers = []
for container_plan in container_plans:
instance... | Returns the representation of packing plan that will
be returned from Tracker. |
365,380 | def sliced_wasserstein(PD1, PD2, M=50):
diag_theta = np.array(
[np.cos(0.25 * np.pi), np.sin(0.25 * np.pi)], dtype=np.float32
)
l_theta1 = [np.dot(diag_theta, x) for x in PD1]
l_theta2 = [np.dot(diag_theta, x) for x in PD2]
if (len(l_theta1) != PD1.shape[0]) or (len(l_theta2) != PD2.... | Implementation of Sliced Wasserstein distance as described in
Sliced Wasserstein Kernel for Persistence Diagrams by Mathieu Carriere, Marco Cuturi, Steve Oudot (https://arxiv.org/abs/1706.03358)
Parameters
-----------
PD1: np.array size (m,2)
Persistence diagram
... |
365,381 | def infer_data_type(data_container):
assert isinstance(data_container, list) or isinstance(
data_container, tuple
), "data_container should be a list or tuple."
assert is_data_homogenous(
data_container
), "Data are not of a homogenous type!"
datum = da... | For a given container of data, infer the type of data as one of
continuous, categorical, or ordinal.
For now, it is a one-to-one mapping as such:
- str: categorical
- int: ordinal
- float: continuous
There may be better ways that are not currently implemented below. For
example, with ... |
365,382 | def true_num_genes(model, custom_spont_id=None):
true_num = 0
for gene in model.genes:
if not is_spontaneous(gene, custom_id=custom_spont_id):
true_num += 1
return true_num | Return the number of genes in a model ignoring spontaneously labeled genes.
Args:
model (Model):
custom_spont_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001``
Returns:
int: Number of genes excluding spontaneous genes |
365,383 | def poll(args):
backend = _get_backend(args)
poller = MiFloraPoller(args.mac, backend)
print("Getting data from Mi Flora")
print("FW: {}".format(poller.firmware_version()))
print("Name: {}".format(poller.name()))
print("Temperature: {}".format(poller.parameter_value(MI_TEMPERATURE)))
pr... | Poll data from the sensor. |
365,384 | def get_node_id(nuc_or_sat, namespace=None):
node_type = get_node_type(nuc_or_sat)
if node_type == :
leaf_id = nuc_or_sat[0].leaves()[0]
if namespace is not None:
return .format(namespace, leaf_id)
else:
return string(leaf_id)
span_start = nuc_or_sa... | return the node ID of the given nucleus or satellite |
365,385 | def valid_path(path):
if path.endswith():
Log.debug(, path[:-1])
if os.path.isdir(path[:-1]):
return True
return False
Log.debug(, path)
if os.path.isdir(path):
return True
else:
Log.debug(, path)
if os.path.isfile(path):
return True
return False | Check if an entry in the class path exists as either a directory or a file |
365,386 | def build_remap_symbols(self, name_generator, children_only=None):
replacement = name_generator(skip=(self._reserved_symbols))
self.remapped_symbols[self.catch_symbol] = next(replacement)
for child in self.children:
child.build_remap_symbols(name_generator, False) | The children_only flag is inapplicable, but this is included as
the Scope class is defined like so.
Here this simply just place the catch symbol with the next
replacement available. |
365,387 | def extract(self, m):
self._clear()
self.m = m
if self.option != []:
self._url_filter()
self._email_filter()
if in self.option:
self._tex_filter()
if in self.option:
sel... | extract info specified in option |
365,388 | def _R2deriv(self,R,z,phi=0.,t=0.):
return 1./(R**2.+(self._a+nu.sqrt(z**2.+self._b2))**2.)**1.5 \
-3.*R**2./(R**2.+(self._a+nu.sqrt(z**2.+self._b2))**2.)**2.5 | NAME:
_R2deriv
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTO... |
365,389 | def read(self, size=None):
if not self._is_open:
raise IOError()
return self._fsapfs_file_entry.read(size=size) | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
... |
365,390 | def params(self):
parser = JinjaInterpolationNamespace()
parser.read(self.configuration)
return dict(parser[] or {}) | Read self params from configuration. |
365,391 | def _detect_buffer_encoding(self, f):
encoding = None
with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as m:
encoding = self._analyze_file(m)
return encoding | Guess by checking BOM, and checking `_special_encode_check`, and using memory map. |
365,392 | def predict_is(self,h=5):
predictions = []
for t in range(0,h):
data1 = self.data_original.iloc[:-h+t,:]
data2 = self.data_original.iloc[-h+t:,:]
x = NDynReg(formula=self.formula, data=data1, family=self.family)
... | Makes dynamic in-sample predictions with the estimated model
Parameters
----------
h : int (default : 5)
How many steps would you like to forecast?
Returns
----------
- pd.DataFrame with predicted values |
365,393 | def on_batch_end(self, last_input, last_output, **kwargs):
"Steps through the generators then each of the critics."
self.G_A.zero_grad(); self.G_B.zero_grad()
fake_A, fake_B = last_output[0].detach(), last_output[1].detach()
real_A, real_B = last_input
self._set_trainable(D_A=Tru... | Steps through the generators then each of the critics. |
365,394 | def index(self, index):
if self._index != index:
self._dirty = True
self._index = index | :type index: int |
365,395 | def removedirs_p(self):
with contextlib.suppress(FileExistsError, DirectoryNotEmpty):
with DirectoryNotEmpty.translate():
self.removedirs()
return self | Like :meth:`removedirs`, but does not raise an exception if the
directory is not empty or does not exist. |
365,396 | def _create_hosting_devices_from_config(self):
hd_dict = config.get_specific_config()
attr_info = ciscohostingdevicemanager.RESOURCE_ATTRIBUTE_MAP[
ciscohostingdevicemanager.DEVICES]
adm_context = bc.context.get_admin_context()
for hd_uuid, kv_dict in hd_dict.items(... | To be called late during plugin initialization so that any hosting
device specified in the config file is properly inserted in the DB. |
365,397 | def switch_to_next_app(self):
log.debug("switching to next app...")
cmd, url = DEVICE_URLS["switch_to_next_app"]
self.result = self._exec(cmd, url) | switches to the next app |
365,398 | def ls_mux(sel, lsls_di, ls_do):
N = len(ls_do)
lsls_in = [list(x) for x in zip(*lsls_di)]
return [mux(sel, lsls_in[i], ls_do[i]) for i in range(N)] | Multiplexes a list of input signal structures to an output structure.
A structure is represented by a list of signals: [signal_1, signal_2, ..., signal_n]
ls_do[0] = lsls_di[sel][0]
ls_do[1] = lsls_di[sel][1]
...
ls_do[n] = lsls_di[sel][n]
sel - sele... |
365,399 | def render(self, data, accepted_media_type=None, renderer_context=None):
if data is None:
return bytes()
renderer_context = renderer_context or {}
indent = self.get_indent(accepted_media_type, renderer_context)
if indent is None:
separators = SHORT_SEPA... | Render `data` into JSON, returning a bytestring. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.