Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
363,900 | def _iterate_fields_cond(self, pkt, val, use_val):
for fld, cond in self.flds:
if isinstance(cond, tuple):
if use_val:
if cond[1](pkt, val):
return fld
continue
else:
... | Internal function used by _find_fld_pkt & _find_fld_pkt_val |
363,901 | def step_command_output_should_not_contain_log_records_from_categories(context):
assert context.table, "REQUIRE: context.table"
context.table.require_column("category")
record_schema = context.log_record_row_schema
LogRecordTable.annotate_with_row_schema(context.table, record_schema)
step_comma... | Verifies that the command output contains not log records from
the provided log categories (in any order).
.. code-block: gherkin
Given I define the log record schema:
| category | level | message |
| root | ERROR | __LOG_MESSAGE__ |
Then the command output should n... |
363,902 | def get_assignments_by_sis_course_id(self, sis_course_id):
url = "/api/v1/courses/%s/analytics/assignments.json" % (
self._sis_id(sis_course_id, sis_field="course"))
return self._get_resource(url) | Returns assignment data for the given course_id.
https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.course_assignments |
363,903 | def change_bgcolor_enable(self, state):
self.dataModel.bgcolor(state)
self.bgcolor_global.setEnabled(not self.is_series and state > 0) | This is implementet so column min/max is only active when bgcolor is |
363,904 | def is_valid_callsign(self, callsign, timestamp=timestamp_now):
try:
if self.get_all(callsign, timestamp):
return True
except KeyError:
return False | Checks if a callsign is valid
Args:
callsign (str): Amateur Radio callsign
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
Returns:
bool: True / False
Example:
The following checks if "DH1TW" is a valid callsign
>>... |
363,905 | def _ending_consonants_only(self, letters: List[str]) -> List[int]:
reversed_letters = list(reversed(letters))
length = len(letters)
for idx, letter in enumerate(reversed_letters):
if not self._contains_vowels(letter) and self._contains_consonants(letter):
re... | Return a list of positions for ending consonants. |
363,906 | def is_active(self):
self.open()
active = lvm_lv_is_active(self.__lvh)
self.close()
return bool(active) | Returns True if the logical volume is active, False otherwise. |
363,907 | def fetch_items(self, category, **kwargs):
from_date = kwargs[]
logger.info("Looking for messages from since %s",
self.url, str(from_date))
mailing_list = HyperKittyList(self.url, self.dirpath)
mailing_list.fetch(from_date=from_date)
messages = se... | Fetch the messages
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items |
363,908 | def auto_delete_files_on_instance_change(
instance: Any,
fieldnames: Iterable[str],
model_class) -> None:
if not instance.pk:
return
try:
old_instance = model_class.objects.get(pk=instance.pk)
except model_class.DoesNotExist:
return
... | Deletes files from filesystem when object is changed.
model_class: ``Type[Model]``
... only the type checker in Py3.5 is broken; v.s. |
363,909 | def resync(self):
success = 0
for head in [True, False]:
for _ in range(30):
try:
self.status(head)
success += 1
break
except Exception as e:
self._log_error(e)
... | make sure we can ping the head and assigned node.
Possibly after an env.exit() |
363,910 | def _initialize(self):
payload = {
: self.session.cookies.get(),
: self.session.cookies.get()
}
if self.fm_user.logged_in:
payload[] = self.session.cookies.get()
payload.update(self.transfer_info)
method, url = get_URL()
... | Initialize transfer. |
363,911 | def show(self):
self.parent.addLayout(self._logSelectLayout)
self.menuCount += 1
self._connectSlots() | Display menus and connect even signals. |
363,912 | def update_data_run(self, event_to_wait_on):
while event_to_wait_on.wait():
event_to_wait_on.clear()
if self.update_data_callback_kill_event.is_set():
return
self.update_data_func() | This is the thread that listens to an event from
the comm process to execute the update_data_func callback
in the context of the main process. |
363,913 | def parse_series(s):
if isinstance(s, Element):
s = s._format_element()
if not s or s == :
else:
b = int(b)
return (a, b) | Parses things like '1n+2', or 'an+b' generally, returning (a, b) |
363,914 | def _send_consumer_aware_request(self, group, payloads, encoder_fn, decoder_fn):
original_ordering = [(p.topic, p.partition) for p in payloads]
broker = self._get_coordinator_for_group(group)
responses = {}
requestId = self._next_id()
... | Send a list of requests to the consumer coordinator for the group
specified using the supplied encode/decode functions. As the payloads
that use consumer-aware requests do not contain the group (e.g.
OffsetFetchRequest), all payloads must be for a single group.
Arguments:
group... |
363,915 | def load_json(path, quiet=False, cli=False):
checkfor = [path+".json", path]
for inpath in checkfor:
inpath = inpath.replace("~", os.path.expanduser("~"))
try:
with open(inpath, ) as infile:
fullj = json.loads(infile.read(), object_hook=_tu... | Load a json serialized object and ensure it matches to the current
Assembly object format |
363,916 | def heatmap(adata, var_names, groups=None, groupby=None, annotations=None, use_raw=False, layers=[], color_map=None,
color_map_anno=None, colorbar=True, row_width=None, xlabel=None, title=None, figsize=None, dpi=None,
show=True, save=None, ax=None, **kwargs):
if not in adata.obs... | \
Plot pseudotimeseries for genes as heatmap.
Arguments
---------
adata: :class:`~anndata.AnnData`
Annotated data matrix.
var_names: `str`, list of `str`
Names of variables to use for the plot.
groups: `str`, list of `str` or `None` (default: `None`)
Groups selected to... |
363,917 | def run(
cmd,
env=None,
return_object=False,
block=True,
cwd=None,
verbose=False,
nospin=False,
spinner_name=None,
combine_stderr=True,
display_limit=200,
write_to_stdout=True,
):
_env = os.environ.copy()
if env:
_env.update(env)
if six.PY2:
... | Use `subprocess.Popen` to get the output of a command and decode it.
:param list cmd: A list representing the command you want to run.
:param dict env: Additional environment settings to pass through to the subprocess.
:param bool return_object: When True, returns the whole subprocess instance
:param b... |
363,918 | def linearize_data_types(self):
linearized_data_types = []
seen_data_types = set()
def add_data_type(data_type):
if data_type in seen_data_types:
return
elif data_type.namespace != self:
re... | Returns a list of all data types used in the namespace. Because the
inheritance of data types can be modeled as a DAG, the list will be a
linearization of the DAG. It's ideal to generate data types in this
order so that composite types that reference other composite types are
defined in ... |
363,919 | def _create(self):
from .tools import makedirs_safe
makedirs_safe(os.path.dirname(self._database))
Base.metadata.create_all(self._engine)
logger.debug("Created new empty database " % self._database) | Creates a new and empty database. |
363,920 | def no(self, text, count=None):
if count is None and self.persistent_count is not None:
count = self.persistent_count
if count is None:
count = 0
mo = re.search(r"\A(\s*)(.+?)(\s*)\Z", text)
pre = mo.group(1)
word = mo.group(2)
post = mo.... | If count is 0, no, zero or nil, return 'no' followed by the plural
of text.
If count is one of:
1, a, an, one, each, every, this, that
return count followed by text.
Otherwise return count follow by the plural of text.
In the return value count is always followed b... |
363,921 | def is_file(cls, file):
peeked_data = wpull.string.printable_bytes(
wpull.util.peek_file(file)).lower()
if b in peeked_data \
or b in peeked_data \
or b in peeked_data \
or b in peeked_data \
or b in peeked_data \
or b in peeke... | Return whether the file is likely to be HTML. |
363,922 | def get_parent(self, level=1):
try:
parent_path = self.path.get_parent(level)
except ValueError:
return None
assert parent_path
return DirectoryInfo(parent_path) | get parent dir as a `DirectoryInfo`.
return `None` if self is top. |
363,923 | def list(self):
mask =
results = self.client.call(, , mask=mask)
return results | List Reserved Capacities |
363,924 | def get_arthur_params_from_url(cls, url):
params = {}
args = cls.get_perceval_params_from_url(url)
parser = GitLabCommand.setup_cmd_parser()
parsed_args = parser.parse(*args)
params[] = parsed_args.owner
params[] = parsed_args.repository
param... | Get the arthur params given a URL for the data source |
363,925 | def get_triplets_at_q(grid_point,
mesh,
point_group,
reciprocal_lattice,
is_time_reversal=True,
swappable=True,
stores_triplets_map=False):
map_triplets, map_q, grid_address ... | Parameters
----------
grid_point : int
A grid point
mesh : array_like
Mesh numbers
dtype='intc'
shape=(3,)
point_group : array_like
Rotation matrices in real space. Note that those in reciprocal space
mean these matrices transposed (local terminology).
... |
363,926 | def filter(self, chamber, congress=CURRENT_CONGRESS, **kwargs):
check_chamber(chamber)
kwargs.update(chamber=chamber, congress=congress)
if in kwargs and in kwargs:
path = ("members/{chamber}/{state}/{district}/"
"current.json").format(**kwargs)
... | Takes a chamber and Congress,
OR state and district, returning a list of members |
363,927 | def is_older_than_metadata(self):
try:
path = self.doc_file.path
except AttributeError:
path = self.doc_file
source_ref = self._doc.ref.path
try:
age_diff = getmtime(source_ref) - getmtime(path)
return age_diff > 0
exc... | Return True if the package save file is older than the metadata. If it is, it should be rebuilt. Returns
False if the time of either can't be determined
:param path: Optional extra save path, used in save_path() |
363,928 | def mcc(x, axis=0, autocorrect=False):
if axis is not 0:
x = x.T
n, c = x.shape
if c < 2:
raise Exception(
"Only " + str(c) + " variables provided. Min. 2 required.")
r = np.ones((c, c))
p = np.zeros((c, c))
for i in range(0, c):
... | Matthews correlation
Parameters
----------
x : ndarray
dataset of binary [0,1] values
axis : int, optional
Variables as columns is the default (axis=0). If variables
are in the rows use axis=1
autocorrect : bool, optional
If all predictions are True or all are Fals... |
363,929 | def filter_device_by_class(vid, pid, device_class):
if device_class in CMSIS_DAP_USB_CLASSES:
return False
if ((vid, pid) == ARM_DAPLINK_ID) and (device_class == USB_CLASS_COMMUNICATIONS):
return False
return True | ! @brief Test whether the device should be ignored by comparing bDeviceClass.
This function checks the device's bDeviceClass to determine whether the it is likely to be
a CMSIS-DAP device. It uses the vid and pid for device-specific quirks.
@retval True Skip the device.
@retval False The devic... |
363,930 | def svg_data_uri(self, xmldecl=False, encode_minimal=False,
omit_charset=False, nl=False, **kw):
return writers.as_svg_data_uri(self.matrix, self._version,
xmldecl=xmldecl, nl=nl,
encode_minimal=encode_mi... | \
Converts the QR Code into a SVG data URI.
The XML declaration is omitted by default (set ``xmldecl`` to ``True``
to enable it), further the newline is omitted by default (set ``nl`` to
``True`` to enable it).
Aside from the missing ``out`` parameter and the different ``xmldec... |
363,931 | def load(cls, sc, path):
model = cls._load_java(sc, path)
wrapper =\
sc._jvm.org.apache.spark.mllib.api.python.PowerIterationClusteringModelWrapper(model)
return PowerIterationClusteringModel(wrapper) | Load a model from the given path. |
363,932 | def _parse_table_name(self, table_id):
attributes = table_id.split()
year_month = "-".join(attributes[:2])
app_id = "-".join(attributes[2:])
if year_month.count("-") == 1 and all(
[num.isdigit() for num in year_month.split()]):
ret... | Parse a table name in the form of appid_YYYY_MM or
YYYY_MM_appid and return a tuple consisting of YYYY-MM and the app id.
Returns (None, None) in the event of a name like <desc>_YYYYMMDD_<int>
Parameters
----------
table_id : str
The table id as listed by BigQuery
... |
363,933 | def newline(self):
self.write_str(self.eol)
self.room = self.maxlinelen | Write eol, then start new line. |
363,934 | def iter_followers(self, login=None, number=-1, etag=None):
if login:
return self.user(login).iter_followers()
return self._iter_follow(, int(number), etag=etag) | If login is provided, iterate over a generator of followers of that
login name; otherwise return a generator of followers of the
authenticated user.
:param str login: (optional), login of the user to check
:param int number: (optional), number of followers to return. Default:
... |
363,935 | def get_oauth_access_token(url, client_id, client_secret, token_type=, grant_type=,
refresh_token=None):
now = datetime.datetime.utcnow()
data = {
: grant_type,
: client_id,
: client_secret,
: token_type,
}
if refresh_token:
data[] ... | Retrieves OAuth 2.0 access token using the given grant type.
Args:
url (str): Oauth2 access token endpoint
client_id (str): client ID
client_secret (str): client secret
Kwargs:
token_type (str): Type of token to return. Options include bearer and jwt.
grant_type (str): O... |
363,936 | def commit(self, message=None, amend=False, stage=True):
return git_commit(self.repo_dir, message=message,
amend=amend, stage=stage) | Commit any changes, optionally staging all changes beforehand. |
363,937 | def cli(env, keyword, package_type):
manager = ordering.OrderingManager(env.client)
table = formatting.Table(COLUMNS)
_filter = {: {: {: }}}
if keyword:
_filter[] = {: % keyword}
if package_type:
_filter[] = {: {: package_type}}
packages = manager.list_packages(filter=_fi... | List packages that can be ordered via the placeOrder API.
::
# List out all packages for ordering
slcli order package-list
# List out all packages with "server" in the name
slcli order package-list --keyword server
# Select only specifict package types
slcli orde... |
363,938 | def objects(self, cls=None):
objs = (asrootpy(x.ReadObj(), warn=False)
for x in self.GetListOfKeys())
if cls is not None:
objs = (obj for obj in objs if isinstance(obj, cls))
return objs | Return an iterater over all objects in this directory which are
instances of `cls`. By default, iterate over all objects (`cls=None`).
Parameters
----------
cls : a class, optional (default=None)
If a class is specified, only iterate over objects that are
instan... |
363,939 | def _keys_to_camel_case(self, obj):
return dict((to_camel_case(key), value) for (key, value) in obj.items()) | Make a copy of a dictionary with all keys converted to camel case. This is just calls to_camel_case on each of the keys in the dictionary and returns a new dictionary.
:param obj: Dictionary to convert keys to camel case.
:return: Dictionary with the input values and all keys in camel case |
363,940 | def dispatch(self, method, url, auth=None, params=None, **kwargs):
r = Request(
method=method,
url=url,
auth=auth,
params=params,
data=kwargs)
s = Session()
resp = s.send(r.prepare())
status = resp.status_code
t... | Send HTTP request, with given method,
credentials and data to the given URL,
and return the success and the result on success. |
363,941 | def ppo_atari_base():
hparams = ppo_discrete_action_base()
hparams.learning_rate_constant = 1e-4
hparams.epoch_length = 200
hparams.gae_gamma = 0.985
hparams.gae_lambda = 0.985
hparams.entropy_loss_coef = 0.003
hparams.value_loss_coef = 1
hparams.optimization_epochs = 3
hparams.epochs_num = 1000
... | Pong base parameters. |
363,942 | def textbetween(variable,
firstnum=None,
secondnum=None,
locationoftext=):
if locationoftext == :
return variable[firstnum:secondnum]
elif locationoftext == :
return variable[firstnum:]
elif locationoftext == :
return variable[:sec... | Get The Text Between Two Parts |
363,943 | def _kernel(kernel_spec):
if isinstance(kernel_spec, tf.compat.integral_types):
return [kernel_spec, kernel_spec]
elif len(kernel_spec) == 1:
return [kernel_spec[0], kernel_spec[0]]
else:
assert len(kernel_spec) == 2
return kernel_spec | Expands the kernel spec into a length 2 list.
Args:
kernel_spec: An integer or a length 1 or 2 sequence that is expanded to a
list.
Returns:
A length 2 list. |
363,944 | def create_question(self, question, type=None, **kwargs):
if not type:
return Question(question, **kwargs)
if type == "choice":
return ChoiceQuestion(question, **kwargs)
if type == "confirmation":
return ConfirmationQuestion(question, **kwargs) | Returns a Question of specified type. |
363,945 | def moveToXY(self, vehID, edgeID, lane, x, y, angle=tc.INVALID_DOUBLE_VALUE, keepRoute=1):
s angle to
the given value (for drawing).
If the angle is set to INVALID_DOUBLE_VALUE, the vehicle assumes the
natural angle of the edge on which it is driving.
If keepRoute is set to 1, th... | Place vehicle at the given x,y coordinates and force it's angle to
the given value (for drawing).
If the angle is set to INVALID_DOUBLE_VALUE, the vehicle assumes the
natural angle of the edge on which it is driving.
If keepRoute is set to 1, the closest position
within the exist... |
363,946 | def write(*args):
s = ""
for a in args:
a = to_data(a)
if isinstance(a, np.ndarray):
s += ("\t" if s else "") + "Tensor {} {} min: {:.3f} max: {:.3f}".format(
a.dtype, a.shape, a.min(), a.max())
print(s)
s = ""
... | Like print(), but recognizes tensors and arrays and show
more details about them.
Example:
hl.write("My Tensor", my_tensor)
Prints:
My Tensor float32 (10, 3, 224, 224) min: 0.0 max: 1.0 |
363,947 | def clear(self, asset_manager_id, book_ids=None):
self.logger.info(, asset_manager_id)
url = % (self.endpoint, asset_manager_id)
params = {: .join(book_ids)} if book_ids else {}
response = self.session.delete(url, params=params)
if response.ok:
tran_count = ... | This method deletes all the data for an asset_manager_id
and option book_ids.
It should be used with extreme caution. In production it
is almost always better to Inactivate rather than delete. |
363,948 | def pick_key(keys, use, alg=, key_type=, kid=):
res = []
if not key_type:
if use == :
key_type = jws_alg2keytype(alg)
else:
key_type = jwe_alg2keytype(alg)
for key in keys:
if key.use and key.use != use:
continue
if key.kty == key_ty... | Based on given criteria pick out the keys that fulfill them from a
given set of keys.
:param keys: List of keys. These are :py:class:`cryptojwt.jwk.JWK`
instances.
:param use: What the key is going to be used for 'sig'/'enc'
:param alg: crypto algorithm
:param key_type: Type of key 'rsa'/'e... |
363,949 | def select(self, Class, set=None, recursive=True, ignore=True, node=None):
if self.include:
return self.subdoc.data[0].select(Class,set,recursive, ignore, node)
else:
return iter([]) | See :meth:`AbstractElement.select` |
363,950 | def dict_filter_nones(dict_):
r
dict2_ = {
key: val
for key, val in six.iteritems(dict_)
if val is not None
}
return dict2_ | r"""
Removes None values
Args:
dict_ (dict): a dictionary
Returns:
dict:
CommandLine:
python -m utool.util_dict --exec-dict_filter_nones
Example:
>>> # DISABLE_DOCTEST
>>> # UNSTABLE_DOCTEST
>>> # fails on python 3 because of dict None order
... |
363,951 | def realtime_observations(cls, buoy, data_type=):
endpoint = cls()
parsers = {: endpoint._parse_met,
: endpoint._parse_drift,
: endpoint._parse_cwind,
: endpoint._parse_spec,
: endpoint._parse_ocean,
... | Retrieve the realtime buoy data from NDBC.
Parameters
----------
buoy : str
Name of buoy
data_type : str
Type of data requested, must be one of
'txt' standard meteorological data
'drift' meteorological data from drifting buoys and limited ... |
363,952 | def get_ips(self, instance_id):
if not instance_id:
raise InstanceError("could not retrieve the ip address for node: "
"no associated instance id")
gce = self._connect()
instances = gce.instances()
try:
request = instances.get(... | Retrieves the ip addresses (public) from the cloud
provider by the given instance id.
:param str instance_id: id of the instance
:return: list (ips)
:raises: InstanceError if the ip could not be retrieved. |
363,953 | def isRef(self, doc, attr):
if doc is None: doc__o = None
else: doc__o = doc._o
if attr is None: attr__o = None
else: attr__o = attr._o
ret = libxml2mod.xmlIsRef(doc__o, self._o, attr__o)
return ret | Determine whether an attribute is of type Ref. In case we
have DTD(s) then this is simple, otherwise we use an
heuristic: name Ref (upper or lowercase). |
363,954 | def _ReadStructureFromFileObject(
self, file_object, file_offset, data_type_map):
context = None
data = b
last_data_size = 0
data_size = data_type_map.GetByteSize()
if not data_size:
data_size = data_type_map.GetSizeHint()
while data_size != last_data_size:
read_offset =... | Reads a structure from a file-like object.
If the data type map has a fixed size this method will read the predefined
number of bytes from the file-like object. If the data type map has a
variable size, depending on values in the byte stream, this method will
continue to read from the file-like object ... |
363,955 | async def async_oauth_dance(consumer_key, consumer_secret, callback_uri="oob"):
token = await get_oauth_token(consumer_key, consumer_secret, callback_uri)
oauth_verifier = await get_oauth_verifier(token[])
token = await get_access_token(
consumer_key,
consumer_secret,
oauth_v... | OAuth dance to get the user's access token
Parameters
----------
consumer_key : str
Your consumer key
consumer_secret : str
Your consumer secret
callback_uri : str
Callback uri, defaults to 'oob'
Returns
-------
dict
Access tokens |
363,956 | def _make_pcaps(self):
self._pcaps = {}
for devname,intf in self._devinfo.items():
if intf.iftype == InterfaceType.Loopback:
senddev = _RawSocket(devname, protocol=IPProtocol.UDP)
self._localsend[devname] = senddev
pdev = PcapLiveDevice(de... | Internal method. Create libpcap devices
for every network interface we care about and
set them in non-blocking mode. |
363,957 | def get_buffer(self, format, output=None):
return self.io.get_buffer(self._get_data(), self.get_header(),
format, output=output) | Get image as a buffer in (format).
Format should be 'jpeg', 'png', etc. |
363,958 | def save_existing(self, form, instance, commit=True):
self._prepare_multilingual_object(instance, form)
return forms.save_instance(form, instance, exclude=[self._pk_field.name], commit=commit) | NOTE: save_new method is completely overridden here, there's no
other way to pretend double save otherwise. Just assign translated data
to object |
363,959 | def check_response(self, resp):
if resp.ok:
json = resp.json()
self.response = ResponseHolder()
self.response.response = json
if not in json:
raise InvalidResponse()
self.response.status = self.field_t... | Checks response after request was made.
Checks status of the response, mainly
:param resp:
:return: |
363,960 | def _configure_formatting(self):
self.format_strings(self.DEFAULT_FORMAT, self.DEFAULT_FORMAT)
if self.fit_to_screen:
try:
import fcntl
import struct
import termios
hw = struct.unpack(, fcntl.ioctl(1,... | Configures output formatting, and fitting output to the current terminal width.
Returns None. |
363,961 | def graph(self, fnm=None, size=None, fntsz=None, fntfm=None, clrgen=None,
rmsz=False, prog=):
if clrgen is None:
clrgen = lambda n: self._clrgen(n, 0.330, 0.825)
clrlst = clrgen(len(self.group))
g = pgv.AGraph(strict=False, directe... | Construct call graph
Parameters
----------
fnm : None or string, optional (default None)
Filename of graph file to be written. File type is determined by
the file extentions (e.g. dot for 'graph.dot' and SVG for
'graph.svg'). If None, a file is not written.
... |
363,962 | def process(self, document):
content = json.dumps(document)
versions = {}
versions.update({: Version(VERSION)})
versions.update(self.get_version("Bash", self.BASH_VERSION))
if content.find() >= 0 or content.find() >= 0:
versions.update(VersionsCheck.get_ver... | Logging versions of required tools. |
363,963 | def _dbdir():
global dbdir
from os import mkdir, path, getcwd, chdir
if dbdir is None:
from acorn.config import settings
config = settings("acorn")
if (config.has_section("database") and
config.has_option("database", "folder")):
dbdir = config.get("d... | Returns the path to the directory where acorn DBs are stored. |
363,964 | def open(self, file, mode=, perm=0o0644):
args = {
: file,
: mode,
: perm,
}
return self._client.json(, args) | Opens a file on the node
:param file: file path to open
:param mode: open mode
:param perm: file permission in octet form
mode:
'r' read only
'w' write only (truncate)
'+' read/write
'x' create if not exist
'a' append
:return: a... |
363,965 | def _post_process_yaml_data(self,
fixture_data: Dict[str, Dict[str, Any]],
relationship_columns: Set[str],
) -> Tuple[Dict[str, Dict[str, Any]], List[str]]:
rv = {}
relationships = set()
if n... | Convert and normalize identifier strings to Identifiers, as well as determine
class relationships. |
363,966 | def _readsie(self, pos):
codenum, pos = self._readuie(pos)
if not codenum:
return 0, pos
try:
if self[pos]:
return -codenum, pos + 1
else:
return codenum, pos + 1
except IndexError:
raise ReadError("... | Return interpretation of next bits as a signed interleaved exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code. |
363,967 | def _bond_percolation(network, tmask):
r
csr = network.create_adjacency_matrix(weights=tmask, fmt=,
drop_zeros=True)
clusters = sprs.csgraph.connected_components(csgraph=csr,
directed=False)[1]
Ps ... | r"""
This private method is called by 'find_clusters' |
363,968 | def _subdivide_nodes(nodes):
_, num_nodes = np.shape(nodes)
if num_nodes == 2:
left_nodes = _helpers.matrix_product(nodes, _LINEAR_SUBDIVIDE_LEFT)
right_nodes = _helpers.matrix_product(nodes, _LINEAR_SUBDIVIDE_RIGHT)
elif num_nodes == 3:
left_nodes = _helpers.matrix_product(node... | Subdivide a curve into two sub-curves.
Does so by taking the unit interval (i.e. the domain of the surface) and
splitting it into two sub-intervals by splitting down the middle.
.. note::
There is also a Fortran implementation of this function, which
will be used if it can be built.
Ar... |
363,969 | def magic_memit(self, line=):
opts, stmt = self.parse_options(line, , posix=False, strict=False)
repeat = int(getattr(opts, , 1))
if repeat < 1:
repeat == 1
timeout = int(getattr(opts, , 0))
if timeout <= 0:
timeout = None
run_in_place = hasattr(opts, )
mem_usage = memo... | Measure memory usage of a Python statement
Usage, in line mode:
%memit [-ir<R>t<T>] statement
Options:
-r<R>: repeat the loop iteration <R> times and take the best result.
Default: 1
-i: run the code in the current environment, without forking a new process.
This is required on some Mac... |
363,970 | def get_values():
default_initial = ((name, options[0])
for name, options in settings.CONFIG.items())
initial = dict(default_initial, **dict(config._backend.mget(settings.CONFIG)))
return initial | Get dictionary of values from the backend
:return: |
363,971 | def disable_digital_reporting(self, pin):
port = pin // 8
command = [self._command_handler.REPORT_DIGITAL + port, self.REPORTING_DISABLE]
self._command_handler.send_command(command) | Disables digital reporting. By turning reporting off for this pin, reporting
is disabled for all 8 bits in the "port" -
:param pin: Pin and all pins for this port
:return: No return value |
363,972 | def infixNotation( baseExpr, opList, lpar=Suppress(), rpar=Suppress() ):
ret = Forward()
lastExpr = baseExpr | ( lpar + ret + rpar )
for i,operDef in enumerate(opList):
opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
termName = "%s term" % opExpr if arity < 3 else "%s%s term" %... | Helper method for constructing grammars of expressions made up of
operators working in a precedence hierarchy. Operators may be unary or
binary, left- or right-associative. Parse actions can also be attached
to operator expressions.
Parameters:
- baseExpr - expression representing... |
363,973 | def inner(self, x1, x2):
if x1 not in self:
raise LinearSpaceTypeError(
.format(x1, self))
if x2 not in self:
raise LinearSpaceTypeError(
.format(x2, self))
inner = self._inner(x1, x2)
... | Return the inner product of ``x1`` and ``x2``.
Parameters
----------
x1, x2 : `LinearSpaceElement`
Elements whose inner product to compute.
Returns
-------
inner : `LinearSpace.field` element
Inner product of ``x1`` and ``x2``. |
363,974 | def delete(self, removealien=True):
if self.status() is None:
parent = self.get_parent()
if parent:
parent.remove_child(self)
self._treeitem.parent().remove_child(self._treeitem)
self.get_root().remove_reftrack(self)
return
... | Delete the current entity.
This will also call :meth:`RefobjInterface.get_children_to_delete` and
delete these children first by calling :meth:`Reftrack.delete`.
To delete the content it will call :meth:`RefobjInterface.delete`.
Then the refobject will be set to None. If the :class:`Ref... |
363,975 | def reduce_tree(node, parent=None):
new_node = None
if node.type == syms.Matcher:
node = node.children[0]
if node.type == syms.Alternatives :
if len(node.children) <= 2:
new_node = reduce_tree(node.children[0], parent)
else:
... | Internal function. Reduces a compiled pattern tree to an
intermediate representation suitable for feeding the
automaton. This also trims off any optional pattern elements(like
[a], a*). |
363,976 | def add(self, resource, replace=False):
uri = resource.uri
if (uri in self and not replace):
raise ResourceSetDupeError(
"Attempt to add resource already in this set")
self[uri] = resource | Add just a single resource. |
363,977 | def difference(self, second_iterable, selector=identity):
if self.closed():
raise ValueError("Attempt to call difference() on a "
"closed Queryable.")
if not is_iterable(second_iterable):
raise TypeError("Cannot compute difference() with sec... | Returns those elements which are in the source sequence which are not
in the second_iterable.
This method is equivalent to the Except() LINQ operator, renamed to a
valid Python identifier.
Note: This method uses deferred execution, but as soon as execution
commences the ent... |
363,978 | def _register_update(self, fmt={}, replot=False, force=False,
todefault=False):
if self.disabled:
return
self.replot = self.replot or replot
self._todefault = self._todefault or todefault
if force is True:
force = list(fmt)
... | Register formatoptions for the update
Parameters
----------
fmt: dict
Keys can be any valid formatoptions with the corresponding values
(see the :attr:`formatoptions` attribute)
replot: bool
Boolean that determines whether the data specific formatopti... |
363,979 | def listItem(node):
o = nodes.list_item()
for n in MarkDown(node):
o += n
return o | An item in a list |
363,980 | def chunks(self):
chunks = {}
for v in self.variables.values():
if v.chunks is not None:
for dim, c in zip(v.dims, v.chunks):
if dim in chunks and c != chunks[dim]:
raise ValueError()
chunks[dim] = c
... | Block dimensions for this dataset's data or None if it's not a dask
array. |
363,981 | def update(self, eid, data, token):
final_dict = {"data": {"id": eid, "type": "libraryEntries", "attributes": data}}
final_headers = self.header
final_headers[] = "Bearer {}".format(token)
r = requests.patch(self.apiurl + "/library-entries/{}".format(eid), json=final_dict, head... | Update a given Library Entry.
:param eid str: Entry ID
:param data dict: Attributes
:param token str: OAuth token
:return: True or ServerError
:rtype: Bool or Exception |
363,982 | def _setup_time_axis(self, t_start=None, t_stop=None):
ii_start, ii_stop = 0, self.n_ints_in_file
if t_start:
ii_start = t_start
if t_stop:
ii_stop = t_stop
n_ints = ii_stop - ii_start
t0 = self.header[b]
t_delt = self.... | Setup time axis. |
363,983 | def _get_windows_console_width() -> int:
from ctypes import byref, windll
import pyreadline
out = windll.kernel32.GetStdHandle(-11)
info = pyreadline.console.CONSOLE_SCREEN_BUFFER_INFO()
windll.kernel32.GetConsoleScreenBufferInfo(out, byref(info))
return info.dwSize.X | A small utility function for getting the current console window's width, in Windows.
:return: The current console window's width. |
363,984 | def get_instance(self, payload):
return UsageInstance(self._version, payload, account_sid=self._solution[], ) | Build an instance of UsageInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.usage.UsageInstance
:rtype: twilio.rest.api.v2010.account.usage.UsageInstance |
363,985 | def list_price(self):
price = self._safe_get_element_text()
currency = self._safe_get_element_text(
)
if price:
return float(price) / 100, currency
else:
return None, None | List Price.
:return:
A tuple containing:
1. Float representation of price.
2. ISO Currency code (string). |
363,986 | def get_host(self):
host = self.get_as_nullable_string("host")
host = host if host != None else self.get_as_nullable_string("ip")
return host | Gets the host name or IP address.
:return: the host name or IP address. |
363,987 | def ndim(n, *args, **kwargs):
thunk = kwargs.get("thunk", lambda: random.random())
if not args:
return [thunk() for i in range(n)]
A = []
for i in range(n):
A.append( ndim(*args, thunk=thunk) )
return A | Makes a multi-dimensional array of random floats. (Replaces RandomArray). |
363,988 | def open_resource(name):
name_parts = name.lstrip().split()
for part in name_parts:
if part == os.path.pardir or os.path.sep in part:
raise ValueError( % part)
filename = os.path.join(os.path.dirname(__file__),
, *name_parts)
if not os.path.exists(fil... | Open a resource from the zoneinfo subdir for reading.
Uses the pkg_resources module if available and no standard file
found at the calculated location. |
363,989 | def _init_data_with_tdms(self, tdms_filename):
tdms_file = TdmsFile(str(tdms_filename))
table = "Cell Track"
for arg in naming.tdms2dclab:
try:
data = tdms_file.object(table, arg).data
except KeyError:
pass
... | Initializes the current RT-DC dataset with a tdms file. |
363,990 | def _piped_bamprep_region_gatk(data, region, prep_params, out_file, tmp_dir):
broad_runner = broad.runner_from_config(data["config"])
cur_bam, cl = _piped_input_cl(data, region, tmp_dir, out_file, prep_params)
if not prep_params["realign"]:
prerecal_bam = None
elif prep_params["realign"] ==... | Perform semi-piped BAM preparation using Picard/GATK tools. |
363,991 | def cluster_uniform_time(data=None, k=None, stride=1, metric=,
n_jobs=None, chunksize=None, skip=0, **kwargs):
r
from pyemma.coordinates.clustering.uniform_time import UniformTimeClustering
res = UniformTimeClustering(k, metric=metric, n_jobs=n_jobs, skip=skip, stride=stride)
fr... | r"""Uniform time clustering
If given data, performs a clustering that selects data points uniformly in
time and then assigns the data using a Voronoi discretization. Returns a
:class:`UniformTimeClustering <pyemma.coordinates.clustering.UniformTimeClustering>` object
that can be used to extract the dis... |
363,992 | def except_keyword(source, start, keyword):
start = pass_white(source, start)
kl = len(keyword)
if kl + start > len(source):
return None
if source[start:start + kl] != keyword:
return None
if kl + start < len(source) and source[start + kl] in IDENTIFIER_PART:
return No... | Returns position after keyword if found else None
Note: skips white space |
363,993 | def eval_objfn(self):
fval = self.obfn_f()
gval = self.obfn_g(self.obfn_gvar())
obj = fval + gval
return (obj, fval, gval) | Compute components of objective function as well as total
contribution to objective function. |
363,994 | def ecdsa_private_key(privkey_str=None, compressed=None):
if compressed is None:
compressed = False
if privkey_str is not None:
if len(privkey_str) == 66 and privkey_str[-2:] == :
compressed = True
return _ECPrivateKey(privkey_str, compressed=compressed) | Make a private key, but enforce the following rule:
* unless the key's hex encoding specifically ends in '01', treat it as uncompressed. |
363,995 | def generate_one_of(self):
self.l()
for definition_item in self._definition[]:
| Means that value have to be valid by only one of those definitions. It can't be valid
by two or more of them.
.. code-block:: python
{
'oneOf': [
{'type': 'number', 'multipleOf': 3},
{'type': 'number', 'multipleOf': 5},
... |
363,996 | def zone_compare(timezone):
*America/Denver
if timezone.lower() in mapper.win_to_unix:
check_zone = timezone
elif timezone.lower() in mapper.unix_to_win:
raise CommandExecutionError(
.format(timezone))
return get_zone() == mapper.get_un... | Compares the given timezone with the machine timezone. Mostly useful for
running state checks.
Args:
timezone (str):
The timezone to compare. This can be in Windows or Unix format. Can
be any of the values returned by the ``timezone.list`` function
Returns:
bool: ``... |
363,997 | def _run_ext_wsgiutils(app, config, mode):
from wsgidav.server import ext_wsgiutils_server
_logger.info(
"Running WsgiDAV {} on wsgidav.ext_wsgiutils_server...".format(__version__)
)
_logger.warning(
"WARNING: This single threaded server (ext-wsgiutils) is not meant for production.... | Run WsgiDAV using ext_wsgiutils_server from the wsgidav package. |
363,998 | def apply_corrections(self):
for error in self.errors:
for solution in error.scheduler_adapter_solutions:
if self.scheduler_adapter is not None:
if self.scheduler_adapter.__getattribut__(solution[0].__name__)(solution[1]):
return T... | Method to directly apply the corrections. |
363,999 | def endpoint_from_name(endpoint_name):
if endpoint_name is None:
return None
factory = relation_factory(endpoint_name)
if factory:
return factory.from_name(endpoint_name) | The object used for interacting with the named relations, or None. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.