Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
1,500 | def handle_put_user(self, req):
account = req.path_info_pop()
user = req.path_info_pop()
key = unquote(req.headers.get(, ))
key_hash = unquote(req.headers.get(, ))
admin = req.headers.get() ==
reseller_admin = \
req.headers.get() ==
... | Handles the PUT v2/<account>/<user> call for adding a user to an
account.
X-Auth-User-Key represents the user's key (url encoded),
- OR -
X-Auth-User-Key-Hash represents the user's hashed key (url encoded),
X-Auth-User-Admin may be set to `true` to create an account .admin, and
... |
1,501 | def asr_breaking(self, tol_eigendisplacements=1e-5):
for i in range(self.nb_qpoints):
if np.allclose(self.qpoints[i].frac_coords, (0, 0, 0)):
if self.has_eigendisplacements:
acoustic_modes_index = []
for j in range(self.nb_bands):
... | Returns the breaking of the acoustic sum rule for the three acoustic modes,
if Gamma is present. None otherwise.
If eigendisplacements are available they are used to determine the acoustic
modes: selects the bands corresponding to the eigendisplacements that
represent to a translation w... |
1,502 | def rename(self, old_fieldname, new_fieldname):
if old_fieldname not in self:
raise Exception("DataTable does not have field `%s`" %
old_fieldname)
if not isinstance(new_fieldname, basestring):
raise ValueError("DataTable fields must be strin... | Renames a specific field, and preserves the underlying order. |
1,503 | def _post_fork_init(self):
d after the fork has completed
The easiest example is that one of these module types creates a thread
in the parent process, then once the fork happens you
ropts = dict(self.opts)
ropts[] = True
runner_client = salt.runner.RunnerClient(... | Some things need to be init'd after the fork has completed
The easiest example is that one of these module types creates a thread
in the parent process, then once the fork happens you'll start getting
errors like "WARNING: Mixing fork() and threads detected; memory leaked." |
1,504 | def price_diff(self):
res = self.price.groupby(level=1).apply(lambda x: x.diff(1))
res.name =
return res | 返回DataStruct.price的一阶差分 |
1,505 | def _offset_setup(self,sigangle,leading,deltaAngleTrack):
self._sigjr= (self._progenitor.rap()-self._progenitor.rperi())/numpy.pi*self._sigv
self._siglz= self._progenitor.rperi()*self._sigv
self._sigjz= 2.*self._progenitor.zmax()/numpy.pi*self._sigv
self._sigjm... | The part of the setup related to calculating the stream/progenitor offset |
1,506 | def download(url, filename=None, print_progress=0, delete_fail=True,
**kwargs):
blocksize = 1024 * 1024
downloaded = 0
progress = None
log.info(, url)
response = open_url(url, **kwargs)
if not filename:
filename = os.path.basename(url)
output = None
try:
... | Download a file, optionally printing a simple progress bar
url: The URL to download
filename: The filename to save to, default is to use the URL basename
print_progress: The length of the progress bar, use 0 to disable
delete_fail: If True delete the file if the download was not successful,
defaul... |
1,507 | def gpg_decrypt( fd_in, path_out, sender_key_info, my_key_info, passphrase=None, config_dir=None ):
if config_dir is None:
config_dir = get_config_dir()
tmpdir = make_gpg_tmphome( prefix="decrypt", config_dir=config_dir )
res = gpg_stash_key( "decrypt", sender_key_info[], config_dir=conf... | Decrypt a stream of data using key info
for a private key we own.
@my_key_info and @sender_key_info should be data returned by gpg_app_get_key
{
'key_id': ...
'key_data': ...
'app_name': ...
}
Return {'status': True, 'sig': ...} on success
Return {'status': True} on succ... |
1,508 | def on_connect(client):
print "++ Opened connection to %s" % client.addrport()
broadcast( % client.addrport() )
CLIENT_LIST.append(client)
client.send("Welcome to the Chat Server, %s.\n" % client.addrport() ) | Sample on_connect function.
Handles new connections. |
1,509 | def shrank(self, block=None, percent_diff=0, abs_diff=1):
if block is None:
block = self.block
cur_nets = len(block.logic)
net_goal = self.prev_nets * (1 - percent_diff) - abs_diff
less_nets = (cur_nets <= net_goal)
self.prev_nets = cur_nets
return le... | Returns whether a block has less nets than before
:param Block block: block to check (if changed)
:param Number percent_diff: percentage difference threshold
:param int abs_diff: absolute difference threshold
:return: boolean
This function checks whether the change in the numbe... |
1,510 | def dictToFile(dictionary,replicateKey,outFileName):
replicateToFile=h5py.File(outFileName,"w")
for i in range(len(dictionary[replicateKey])):
replicateToFile.create_dataset("{}".format(dictionary[replicateKey].keys()[i])\
,data=dictionary[replicateKey].values()[... | Function to write dictionary data, from subsampleReplicates, to file an hdf5 file.
:param dictionary: nested dictionary returned by subsampleReplicates
:param replicateKey: string designating the replicate written to file
:param outFileName: string defining the hdf5 filename |
1,511 | def delete_model(self, meta: dict):
bucket = self.connect()
if bucket is None:
raise BackendRequiredError
blob_name = "models/%s/%s.asdf" % (meta["model"], meta["uuid"])
self._log.info(blob_name)
try:
self._log.info("Deleting model ...")
... | Delete the model from GCS. |
1,512 | def diffmap(adata, n_comps=15, copy=False):
if not in adata.uns:
raise ValueError(
)
if n_comps <= 2:
raise ValueError(
)
adata = adata.copy() if copy else adata
_diffmap(adata, n_comps=n_comps)
return adata if copy else None | Diffusion Maps [Coifman05]_ [Haghverdi15]_ [Wolf18]_.
Diffusion maps [Coifman05]_ has been proposed for visualizing single-cell
data by [Haghverdi15]_. The tool uses the adapted Gaussian kernel suggested
by [Haghverdi16]_ in the implementation of [Wolf18]_.
The width ("sigma") of the connectivity kern... |
1,513 | def _wait_until(obj, att, desired, callback, interval, attempts, verbose,
verbose_atts):
if not isinstance(desired, (list, tuple)):
desired = [desired]
if verbose_atts is None:
verbose_atts = []
if not isinstance(verbose_atts, (list, tuple)):
verbose_atts = [verbose_atts... | Loops until either the desired value of the attribute is reached, or the
number of attempts is exceeded. |
1,514 | def get_user_for_membersuite_entity(membersuite_entity):
user = None
user_created = False
user_username = generate_username(membersuite_entity)
try:
user = User.objects.get(username=user_username)
except User.DoesNotExist:
pass
if not user:
try:
... | Returns a User for `membersuite_entity`.
membersuite_entity is any MemberSuite object that has the fields
membersuite_id, email_address, first_name, and last_name, e.g.,
PortalUser or Individual. |
1,515 | def _raise_error_if_not_of_type(arg, expected_type, arg_name=None):
display_name = "%s " % arg_name if arg_name is not None else "Argument "
lst_expected_type = [expected_type] if \
type(expected_type) == type else expected_type
err_msg = "%smust be of type %s " % (display_nam... | Check if the input is of expected type.
Parameters
----------
arg : Input argument.
expected_type : A type OR a list of types that the argument is expected
to be.
arg_name : The name of the variable in the function being used. No
name is a... |
1,516 | def _add_new_items(self, config, seen):
for (key, value) in self.items():
if key not in seen:
self._set_value(config, key, value) | Add new (unseen) items to the config. |
1,517 | def _check_series_localize_timestamps(s, timezone):
from pyspark.sql.utils import require_minimum_pandas_version
require_minimum_pandas_version()
from pandas.api.types import is_datetime64tz_dtype
tz = timezone or _get_local_timezone()
if is_datetime64tz_dtype(s.dtype):
return s.d... | Convert timezone aware timestamps to timezone-naive in the specified timezone or local timezone.
If the input series is not a timestamp series, then the same series is returned. If the input
series is a timestamp series, then a converted series is returned.
:param s: pandas.Series
:param timezone: the... |
1,518 | def getStats(self):
recordStoreStats = self._recordStore.getStats()
streamStats = dict()
for (key, values) in recordStoreStats.items():
fieldStats = dict(zip(self._recordStoreFieldNames, values))
streamValues = []
for name in self._streamFieldNames... | TODO: This method needs to be enhanced to get the stats on the *aggregated*
records.
:returns: stats (like min and max values of the fields). |
1,519 | def _process_state_embryo(self, job_record):
start_timeperiod = self.compute_start_timeperiod(job_record.process_name, job_record.timeperiod)
end_timeperiod = self.compute_end_timeperiod(job_record.process_name, job_record.timeperiod)
self._compute_and_transfer_to_progress(job_record.pr... | method that takes care of processing job records in STATE_EMBRYO state |
1,520 | def channels(self):
try:
return self._channels
except AttributeError:
logger.debug("initialize output channels ...")
channels = self.args.channels
config_channels = [sec.rpartition()[0] for sec in self.config.sections(suffix=)]
unknown = set(chan... | Output channels |
1,521 | def _set_auth_type(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=auth_type.auth_type, is_container=, presence=False, yang_name="auth-type", rest_name="auth-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_path... | Setter method for auth_type, mapped from YANG variable /routing_system/interface/ve/ipv6/ipv6_vrrp_extended/auth_type (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_auth_type is considered as a private
method. Backends looking to populate this variable should
... |
1,522 | def _parallel_exec(self, hosts):
if not hosts:
return
p = multiprocessing.Pool(self.forks)
results = []
results = p.map_async(multiprocessing_runner, hosts).get(9999999)
p.close()
p.join()
return results | handles mulitprocessing when more than 1 fork is required |
1,523 | def _proc_gnusparse_01(self, next, pax_headers):
sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
next.sparse = list(zip(sparse[::2], sparse[1::2])) | Process a GNU tar extended sparse header, version 0.1. |
1,524 | def pin_ls(self, type="all", **kwargs):
kwargs.setdefault("opts", {"type": type})
return self._client.request(, decoder=, **kwargs) | Lists objects pinned to local storage.
By default, all pinned objects are returned, but the ``type`` flag or
arguments can restrict that to a specific pin type or to some specific
objects respectively.
.. code-block:: python
>>> c.pin_ls()
{'Keys': {
... |
1,525 | def configure_logging(args):
log_format = logging.Formatter()
log_level = logging.INFO if args.verbose else logging.WARN
log_level = logging.DEBUG if args.debug else log_level
console = logging.StreamHandler()
console.setFormatter(log_format)
console.setLevel(log_level)
root_logger = lo... | Logging to console |
1,526 | def copy(self):
"Return a clone of this hash object."
other = _ChainedHashAlgorithm(self._algorithms)
other._hobj = deepcopy(self._hobj)
other._fobj = deepcopy(self._fobj)
return other | Return a clone of this hash object. |
1,527 | def token(self):
" Get token when needed."
if hasattr(self, ):
return getattr(self, )
data = json.dumps({: self.customer,
: self.username,
: self.password})
response = requests.post(
, data... | Get token when needed. |
1,528 | def get_grade_systems_by_genus_type(self, grade_system_genus_type):
collection = JSONClientValidated(,
collection=,
runtime=self._runtime)
result = collection.find(
dict({: s... | Gets a ``GradeSystemList`` corresponding to the given grade system genus ``Type`` which does not include systems of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known systems or
an error results. Otherwise, the returned list may contain only
t... |
1,529 | def list_logs(args, container_name=None):
from sregistry.main import Client as cli
if len(args.commands) > 0:
container_name = args.commands.pop(0)
cli.logs(container_name)
sys.exit(0) | list a specific log for a builder, or the latest log if none provided
Parameters
==========
args: the argparse object to look for a container name
container_name: a default container name set to be None (show latest log) |
1,530 | def print_commands(self, out=sys.stdout):
cmds = self.list_commands()
for ck in cmds:
if ck.printable:
out.write( % ck) | utility method to print commands
and descriptions for @BotFather |
1,531 | def build_common_all_meta_df(common_meta_dfs, fields_to_remove, remove_all_metadata_fields):
if remove_all_metadata_fields:
trimmed_common_meta_dfs = [pd.DataFrame(index=df.index) for df in common_meta_dfs]
else:
shared_column_headers = sorted(set.intersection(*[set(df.columns) for df in c... | concatenate the entries in common_meta_dfs, removing columns selectively (fields_to_remove) or entirely (
remove_all_metadata_fields=True; in this case, effectively just merges all the indexes in common_meta_dfs).
Returns 2 dataframes (in a tuple): the first has duplicates removed, the second does not... |
1,532 | def _ls_print_summary(all_trainings: List[Tuple[str, dict, TrainingTrace]]) -> None:
counts_by_name = defaultdict(int)
counts_by_classes = defaultdict(int)
for _, config, _ in all_trainings:
counts_by_name[get_model_name(config)] += 1
counts_by_classes[get_classes(config)] += 1
pri... | Print trainings summary.
In particular print tables summarizing the number of trainings with
- particular model names
- particular combinations of models and datasets
:param all_trainings: a list of training tuples (train_dir, configuration dict, trace) |
1,533 | def collect(nested_nodes, transform=None):
items = []
if transform is None:
transform = lambda node, parents, nodes, *args: node
def __collect__(node, parents, nodes, first, last, depth):
items.append(transform(node, parents, nodes, first, last, depth))
apply_depth_first(nested_n... | Return list containing the result of the `transform` function applied to
each item in the supplied list of nested nodes.
A custom transform function may be applied to each entry during the
flattening by specifying a function through the `transform` keyword
argument. The `transform` function will be pa... |
1,534 | def task_done(self, **kw):
def validate(task):
if not Status.is_pending(task[]):
raise ValueError("Task is not pending.")
return self._task_change_status(Status.COMPLETED, validate, **kw) | Marks a pending task as done, optionally specifying a completion
date with the 'end' argument. |
1,535 | def comment(self, text):
url = self._imgur._base_url + "/3/comment"
payload = {: self.id, : text}
resp = self._imgur._send_request(url, params=payload, needs_auth=True,
method=)
return Comment(resp, imgur=self._imgur, has_fetched=False) | Make a top-level comment to this.
:param text: The comment text. |
1,536 | def get_imports(self, option):
if option:
if len(option) == 1 and option[0].isupper() and len(option[0]) > 3:
return getattr(settings, option[0])
else:
codes = [e for e in option if e.isupper() and len(e) == 3]
if len(codes) != len... | See if we have been passed a set of currencies or a setting variable
or look for settings CURRENCIES or SHOP_CURRENCIES. |
1,537 | def attach_volume(self, xml_bytes):
root = XML(xml_bytes)
status = root.findtext("status")
attach_time = root.findtext("attachTime")
attach_time = datetime.strptime(
attach_time[:19], "%Y-%m-%dT%H:%M:%S")
return {"status": status, "attach_time": attach_time} | Parse the XML returned by the C{AttachVolume} function.
@param xml_bytes: XML bytes with a C{AttachVolumeResponse} root
element.
@return: a C{dict} with status and attach_time keys.
TODO: volumeId, instanceId, device |
1,538 | def to_xml(self):
if self.alias_hosted_zone_id != None and self.alias_dns_name != None:
body = self.AliasBody % (self.alias_hosted_zone_id, self.alias_dns_name)
else:
records = ""
for r in self.resource_records:
recor... | Spit this resource record set out as XML |
1,539 | def register_dimensions(self, dims):
if isinstance(dims, collections.Mapping):
dims = dims.itervalues()
for dim in dims:
self.register_dimension(dim.name, dim) | Register multiple dimensions on the cube.
.. code-block:: python
cube.register_dimensions([
{'name' : 'ntime', 'global_size' : 10,
'lower_extent' : 2, 'upper_extent' : 7 },
{'name' : 'na', 'global_size' : 3,
'lower_extent' : 2, 'upper... |
1,540 | def from_unidiff(cls, diff: str) -> :
lines = diff.split()
file_patches = []
while lines:
if lines[0] == or lines[0].isspace():
lines.pop(0)
continue
file_patches.append(FilePatch._read_next(lines))
return Patch(file_patc... | Constructs a Patch from a provided unified format diff. |
1,541 | def restore_review_history_for_affected_objects(portal):
logger.info("Restoring review_history ...")
query = dict(portal_type=NEW_SENAITE_WORKFLOW_BINDINGS)
brains = api.search(query, UID_CATALOG)
total = len(brains)
done = 0
for num, brain in enumerate(brains):
if num % 100 == 0:
... | Applies the review history for objects that are bound to new senaite_*
workflows |
1,542 | def LOS_CrossProj(VType, Ds, us, kPIns, kPOuts, kRMins,
Lplot=, proj=, multi=False):
assert type(VType) is str and VType.lower() in [,]
assert Lplot.lower() in [,]
assert type(proj) is str
proj = proj.lower()
assert proj in [,,,]
assert Ds.ndim==2 and Ds.shape==us.shape
... | Compute the parameters to plot the poloidal projection of the LOS |
1,543 | def get_cfgdict_list_subset(cfgdict_list, keys):
r
import utool as ut
cfgdict_sublist_ = [ut.dict_subset(cfgdict, keys) for cfgdict in cfgdict_list]
cfgtups_sublist_ = [tuple(ut.dict_to_keyvals(cfgdict)) for cfgdict in cfgdict_sublist_]
cfgtups_sublist = ut.unique_ordered(cfgtups_sublist_)
cfgdi... | r"""
returns list of unique dictionaries only with keys specified in keys
Args:
cfgdict_list (list):
keys (list):
Returns:
list: cfglbl_list
CommandLine:
python -m utool.util_gridsearch --test-get_cfgdict_list_subset
Example:
>>> # ENABLE_DOCTEST
>... |
1,544 | def diropenbox(msg=None
, title=None
, default=None
):
if sys.platform == :
_bring_to_front()
title=getFileDialogTitle(msg,title)
localRoot = Tk()
localRoot.withdraw()
if not default: default = None
f = tk_FileDialog.askdirectory(
parent=localRoot
, tit... | A dialog to get a directory name.
Note that the msg argument, if specified, is ignored.
Returns the name of a directory, or None if user chose to cancel.
If the "default" argument specifies a directory name, and that
directory exists, then the dialog box will start with that directory. |
1,545 | def dumps(obj, big_endian=True):
geom_type = obj[]
meta = obj.get(, {})
exporter = _dumps_registry.get(geom_type)
if exporter is None:
_unsupported_geom_type(geom_type)
)
return exporter(obj, big_endian, meta) | Dump a GeoJSON-like `dict` to a WKB string.
.. note::
The dimensions of the generated WKB will be inferred from the first
vertex in the GeoJSON `coordinates`. It will be assumed that all
vertices are uniform. There are 4 types:
- 2D (X, Y): 2-dimensional geometry
- Z (X, Y,... |
1,546 | def download(url, path, kind=,
progressbar=True, replace=False, timeout=10., verbose=True):
if kind not in ALLOWED_KINDS:
raise ValueError(.format(
ALLOWED_KINDS, kind))
path = op.expanduser(path)
if len(path) == 0:
raise ValueError()
download_url = ... | Download a URL.
This will download a file and store it in a '~/data/` folder,
creating directories if need be. It will also work for zip
files, in which case it will unzip all of the files to the
desired location.
Parameters
----------
url : string
The url of the file to download. ... |
1,547 | def find_by_id(self, team, params={}, **options):
path = "/teams/%s" % (team)
return self.client.get(path, params, **options) | Returns the full record for a single team.
Parameters
----------
team : {Id} Globally unique identifier for the team.
[params] : {Object} Parameters for the request |
1,548 | def size(self):
return len(self._query_compiler.index) * len(self._query_compiler.columns) | Get the number of elements in the DataFrame.
Returns:
The number of elements in the DataFrame. |
1,549 | def __value_compare(self, target):
if self.expectation == "__ANY__":
return True
elif self.expectation == "__DEFINED__":
return True if target is not None else False
elif self.expectation == "__TYPE__":
return True if type(target) == self.target_type ... | Comparing result based on expectation if arg_type is "VALUE"
Args: Anything
Return: Boolean |
1,550 | def generate(organization, package, destination):
gen = ResourceGenerator(organization, package)
tmp = tempfile.NamedTemporaryFile(mode=, delete=False)
try:
tmp.write(gen.conf())
finally:
tmp.close()
shutil.copy(tmp.name, os.path.join(destination, ))
tmp = tempfile.NamedT... | Generates the Sphinx configuration and Makefile.
Args:
organization (str): the organization name.
package (str): the package to be documented.
destination (str): the destination directory. |
1,551 | def get_random_service(
service_registry: ServiceRegistry,
block_identifier: BlockSpecification,
) -> Tuple[Optional[str], Optional[str]]:
count = service_registry.service_count(block_identifier=block_identifier)
if count == 0:
return None, None
index = random.SystemRandom().ran... | Selects a random PFS from service_registry.
Returns a tuple of the chosen services url and eth address.
If there are no PFS in the given registry, it returns (None, None). |
1,552 | def sm_dict2lha(d):
blocks = OrderedDict([
(, {: [[1, d[].real], [2, d[].real], [3, d[].real]]}),
(, {: [[1, d[].real], [2, d[].real]]}),
(, {: matrix2lha(d[].real)}),
(, {: matrix2lha(d[].imag)}),
(, {: matrix2lha(d[].real)}),
(, {: matrix2lha(d[].imag)}),
... | Convert a a dictionary of SM parameters into
a dictionary that pylha can convert into a DSixTools SM output file. |
1,553 | def flush(self, timeout=60):
if timeout <= 0:
raise ErrBadTimeout
if self.is_closed:
raise ErrConnectionClosed
future = asyncio.Future(loop=self._loop)
try:
yield from self._send_ping(future)
yield from asyncio.wait_for(future, t... | Sends a ping to the server expecting a pong back ensuring
what we have written so far has made it to the server and
also enabling measuring of roundtrip time.
In case a pong is not returned within the allowed timeout,
then it will raise ErrTimeout. |
1,554 | def coerce(cls, key, value):
if not isinstance(value, MutationDict):
if isinstance(value, dict):
return MutationDict(value)
return Mutable.coerce(key, value)
else:
return value | Convert plain dictionaries to MutationDict. |
1,555 | def open(self):
if self._table_exists():
self.mode = "open"
self._get_table_info()
return self
else:
raise IOError,"Table %s doesn't exist" %self.name | Open an existing database |
1,556 | def check_all_permissions(sender, **kwargs):
if not is_permissions_app(sender):
return
config = getattr(settings, , dict())
for natural_key, permissions in config.items():
if natural_key == :
for content_type in ContentType.objects.all():
... | This syncdb checks our PERMISSIONS setting in settings.py and makes sure all those permissions
actually exit. |
1,557 | def _run_operation_with_response(self, operation, unpack_res,
exhaust=False, address=None):
if operation.exhaust_mgr:
server = self._select_server(
operation.read_preference, operation.session, address=address)
with self._res... | Run a _Query/_GetMore operation and return a Response.
:Parameters:
- `operation`: a _Query or _GetMore object.
- `unpack_res`: A callable that decodes the wire protocol response.
- `exhaust` (optional): If True, the socket used stays checked out.
It is returned along ... |
1,558 | def mach2cas(M, h):
tas = mach2tas(M, h)
cas = tas2cas(tas, h)
return cas | Mach to CAS conversion |
1,559 | def reset_db():
logger = get_logger(PROCESS_SCHEDULER)
logger.info()
ds = ds_manager.ds_factory(logger)
ds._db_client.drop_database(settings.settings[])
logger.info()
connection = ds.connection(COLLECTION_MANAGED_PROCESS)
connection.create_index([(PROCESS_NAME, pymongo.ASCENDING)], un... | drops the *scheduler* database, resets schema |
1,560 | def _set_exp_traffic_class(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("exp_traffic_class_map_name",exp_traffic_class.exp_traffic_class, yang_name="exp-traffic-class", rest_name="exp-traffic-class", parent=self, is_container=, user_or... | Setter method for exp_traffic_class, mapped from YANG variable /qos_mpls/map/exp_traffic_class (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_exp_traffic_class is considered as a private
method. Backends looking to populate this variable should
do so via calling ... |
1,561 | def resource_type_type(loader):
def impl(string):
t_resources = loader.get_models()
if set(string) - set("sco"):
raise ValueError
return t_resources(_str=.join(set(string)))
return impl | Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. |
1,562 | def textContent(self, text: str) -> None:
if self._inner_element:
self._inner_element.textContent = text
else:
super().textContent = text | Set text content to inner node. |
1,563 | def select_window(pymux, variables):
window_id = variables[]
def invalid_window():
raise CommandException( % window_id)
if window_id.startswith():
try:
number = int(window_id[1:])
except ValueError:
invalid_window()
else:
w = pymux.a... | Select a window. E.g: select-window -t :3 |
1,564 | def save(self, filename, clobber=True, **kwargs):
if os.path.exists(filename) and not clobber:
raise IOError("filename exists and we have been asked not to"\
" clobber it".format(filename))
if not filename.endswith():
data ... | Save the `Spectrum1D` object to the specified filename.
:param filename:
The filename to save the Spectrum1D object to.
:type filename:
str
:param clobber: [optional]
Whether to overwrite the `filename` if it already exists.
:type clobber:
... |
1,565 | def SETB(cpu, dest):
dest.write(Operators.ITEBV(dest.size, cpu.CF, 1, 0)) | Sets byte if below.
:param cpu: current CPU.
:param dest: destination operand. |
1,566 | def clear_threads(self):
for aThread in compat.itervalues(self.__threadDict):
aThread.clear()
self.__threadDict = dict() | Clears the threads snapshot. |
1,567 | def prt_txt(prt, data_nts, prtfmt=None, nt_fields=None, **kws):
lines = get_lines(data_nts, prtfmt, nt_fields, **kws)
if lines:
for line in lines:
prt.write(line)
else:
sys.stdout.write(" 0 items. NOT WRITING\n") | Print list of namedtuples into a table using prtfmt. |
1,568 | def cmServiceRequest(PriorityLevel_presence=0):
a = TpPd(pd=0x5)
b = MessageType(mesType=0x24)
c = CmServiceTypeAndCiphKeySeqNr()
e = MobileStationClassmark2()
f = MobileId()
packet = a / b / c / e / f
if PriorityLevel_presence is 1:
g = PriorityLevelHdr(ieiPL=0x8, eightBitPL=... | CM SERVICE REQUEST Section 9.2.9 |
1,569 | def validNormalizeAttributeValue(self, doc, name, value):
if doc is None: doc__o = None
else: doc__o = doc._o
ret = libxml2mod.xmlValidNormalizeAttributeValue(doc__o, self._o, name, value)
return ret | Does the validation related extra step of the normalization
of attribute values: If the declared value is not CDATA,
then the XML processor must further process the normalized
attribute value by discarding any leading and trailing
space (#x20) characters, and by replacing sequen... |
1,570 | def has_equal_value(state, ordered=False, ndigits=None, incorrect_msg=None):
if not hasattr(state, "parent"):
raise ValueError(
"You can only use has_equal_value() on the state resulting from check_column, check_row or check_result."
)
if incorrect_msg is None:
incorre... | Verify if a student and solution query result match up.
This function must always be used after 'zooming' in on certain columns or records (check_column, check_row or check_result).
``has_equal_value`` then goes over all columns that are still left in the solution query result, and compares each column with th... |
1,571 | def string_to_tokentype(s):
if isinstance(s, _TokenType):
return s
if not s:
return Token
node = Token
for item in s.split():
node = getattr(node, item)
return node | Convert a string into a token type::
>>> string_to_token('String.Double')
Token.Literal.String.Double
>>> string_to_token('Token.Literal.Number')
Token.Literal.Number
>>> string_to_token('')
Token
Tokens that are already tokens are returned unchanged:
>>> s... |
1,572 | def get_nested_val(key_tuple, dict_obj):
if len(key_tuple) == 1:
return dict_obj[key_tuple[0]]
return get_nested_val(key_tuple[1:], dict_obj[key_tuple[0]]) | Return a value from nested dicts by the order of the given keys tuple.
Parameters
----------
key_tuple : tuple
The keys to use for extraction, in order.
dict_obj : dict
The outer-most dict to extract from.
Returns
-------
value : object
The extracted value, if exist... |
1,573 | def validate(self, instance, value):
try:
floatval = float(value)
if not self.cast and abs(value - floatval) > TOL:
self.error(
instance=instance,
value=value,
extra=.format(TOL),
)
... | Checks that value is a float and in min/max bounds
Non-float numbers are coerced to floats |
1,574 | def list_controller_revision_for_all_namespaces(self, **kwargs):
kwargs[] = True
if kwargs.get():
return self.list_controller_revision_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_controller_revision_for_all_namespaces_with_http_info(*... | list_controller_revision_for_all_namespaces # noqa: E501
list or watch objects of kind ControllerRevision # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_controller_revision_fo... |
1,575 | def hog(image, orientations=8, ksize=(5, 5)):
s0, s1 = image.shape[:2]
try:
k = hog.kernels[str(ksize) + str(orientations)]
except KeyError:
k = _mkConvKernel(ksize, orientations)
hog.kernels[str(ksize) + str(orientations)] = k
out = np.empty(shape=(s0, s1, ... | returns the Histogram of Oriented Gradients
:param ksize: convolution kernel size as (y,x) - needs to be odd
:param orientations: number of orientations in between rad=0 and rad=pi
similar to http://scikit-image.org/docs/dev/auto_examples/plot_hog.html
but faster and with less options |
1,576 | def subprocess_check_output(*args, cwd=None, env=None, stderr=False):
if stderr:
proc = yield from asyncio.create_subprocess_exec(*args, stderr=asyncio.subprocess.PIPE, cwd=cwd, env=env)
output = yield from proc.stderr.read()
else:
proc = yield from asyncio.create_subprocess_exec(*... | Run a command and capture output
:param *args: List of command arguments
:param cwd: Current working directory
:param env: Command environment
:param stderr: Read on stderr
:returns: Command output |
1,577 | def cleanup(self, force=False):
manager = self.getManager(force=force)
if manager is not None:
service = manager.current()
self.destroyManager(force=force)
else:
service = None
return service | Clean up Yadis-related services in the session and return
the most-recently-attempted service from the manager, if one
exists.
@param force: True if the manager should be deleted regardless
of whether it's a manager for self.url.
@return: current service endpoint object or None... |
1,578 | def tocimxml(value):
if isinstance(value, (tuple, list)):
array_xml = []
for v in value:
if v is None:
if SEND_VALUE_NULL:
array_xml.append(cim_xml.VALUE_NULL())
else:
array_xml.append(cim_xml.VALUE(None... | Return the CIM-XML representation of the input object,
as an object of an appropriate subclass of :term:`Element`.
The returned CIM-XML representation is consistent with :term:`DSP0201`.
Parameters:
value (:term:`CIM object`, :term:`CIM data type`, :term:`number`, :class:`py:datetime.datetime`, or ... |
1,579 | def create_geometry(self, input_geometry, dip, upper_depth, lower_depth,
mesh_spacing=1.0):
assert((dip > 0.) and (dip <= 90.))
self.dip = dip
self._check_seismogenic_depths(upper_depth, lower_depth)
if not isinstance(input_geometry, Line):
i... | If geometry is defined as a numpy array then create instance of
nhlib.geo.line.Line class, otherwise if already instance of class
accept class
:param input_geometry:
Trace (line) of the fault source as either
i) instance of nhlib.geo.line.Line class
ii) numpy... |
1,580 | def create_widget(self):
context = self.get_context()
d = self.declaration
style = d.style or
self.widget = AutoCompleteTextView(context, None, style)
self.adapter = ArrayAdapter(context, ) | Create the underlying widget. |
1,581 | def channels_rename(self, *, channel: str, name: str, **kwargs) -> SlackResponse:
self._validate_xoxp_token()
kwargs.update({"channel": channel, "name": name})
return self.api_call("channels.rename", json=kwargs) | Renames a channel.
Args:
channel (str): The channel id. e.g. 'C1234567890'
name (str): The new channel name. e.g. 'newchannel' |
1,582 | def hierarchyLookup(self, record):
def _get_lookup(cls):
if cls in self._hierarchyLookup:
return self._hierarchyLookup[cls]
for base in cls.__bases__:
results = _get_lookup(base)
if results:
... | Looks up additional hierarchy information for the inputed record.
:param record | <orb.Table>
:return (<subclass of orb.Table> || None, <str> column) |
1,583 | def _certificate_required(cls, hostname, port=XCLI_DEFAULT_PORT,
ca_certs=None, validate=None):
if not ca_certs:
return False
xlog.debug("CONNECT SSL %s:%s, cert_file=%s",
hostname, port, ca_certs)
certificate = ssl.get_serve... | returns true if connection should verify certificate |
1,584 | def enqueue_conversion_path(url_string, to_type, enqueue_convert):
target_ts = TypeString(to_type)
foreign_res = ForeignResource(url_string)
typed_foreign_res = foreign_res.guess_typed()
if not typed_foreign_res.cache_exists():
typed_foreign_res.symlink_from(foreign_res)
... | Given a URL string that has already been downloaded, enqueue
necessary conversion to get to target type |
1,585 | def _update_key(self, mask, key):
mask = np.asanyarray(mask)
if key in self._data:
self._data[key] = self._data[key][mask] | Mask the value contained in the DataStore at a specified key.
Parameters
-----------
mask: (n,) int
(n,) bool
key: hashable object, in self._data |
1,586 | def rows2skip(self, decdel):
if decdel == :
ms = self.matches_p
elif decdel == :
ms = self.matches_c
cnt = row = 0
for val1, val2 in zip(ms, ms[1:]):
row += 1
if val2 == val1 != 0:
pass
... | Return the number of rows to skip based on the decimal delimiter
decdel.
When each record start to have the same number of matches, this
is where the data starts. This is the idea. And the number of
consecutive records to have the same number of matches is to be
EQUAL_CNT_REQ. |
1,587 | def json_item(model, target=None, theme=FromCurdoc):
/plotpetal_widthpetal_length/plot
with OutputDocumentFor([model], apply_theme=theme) as doc:
doc.title = ""
docs_json = standalone_docs_json([model])
doc = list(docs_json.values())[0]
root_id = doc[][][0]
return {
: targ... | Return a JSON block that can be used to embed standalone Bokeh content.
Args:
model (Model) :
The Bokeh object to embed
target (string, optional)
A div id to embed the model into. If None, the target id must
be supplied in the JavaScript call.
theme (Th... |
1,588 | def normalize(self, decl_string, arg_separator=None):
if not self.has_pattern(decl_string):
return decl_string
name, args = self.split(decl_string)
for i, arg in enumerate(args):
args[i] = self.normalize(arg)
return self.join(name, args, arg_separator) | implementation details |
1,589 | def execute(self):
from vsgen.util.logger import VSGLogger
VSGLogger.info(self._logname, self._message)
start = time.clock()
VSGWriter.write(self._writables, self._parallel)
end = time.clock()
VSGLogger.info(self._logname, "Wrote %s files in %s seconds:", len(se... | Executes the command. |
1,590 | def hacking_docstring_start_space(physical_line, previous_logical, tokens):
r
docstring = is_docstring(tokens, previous_logical)
if docstring:
start, start_triple = _find_first_of(docstring, START_DOCSTRING_TRIPLE)
if docstring[len(start_triple)] == :
re... | r"""Check for docstring not starting with space.
OpenStack HACKING guide recommendation for docstring:
Docstring should not start with space
Okay: def foo():\n '''This is good.'''
Okay: def foo():\n r'''This is good.'''
Okay: def foo():\n a = ''' This is not a docstring.'''
Okay: def ... |
1,591 | def dot_product_batched_head(q, k, v, gates_q, gates_k, mask_right=False):
nb_buckets = common_layers.shape_list(gates_q)[-1]
@expert_utils.add_name_scope()
def get_dispatcher(gates):
length = common_layers.shape_list(gates)[1]
nb_elems_to_dispatch = tf.reduce_sum(gates, axis=[1, 2])
nb_... | Perform a dot product attention on a single sequence on a single head.
This function dispatch the q, k, v and loop over the buckets to compute the
attention dot product on each subsequences.
Args:
q (tf.Tensor): [batch*heads, length_q, depth_q]
k (tf.Tensor): [batch*heads, length_k, depth_q]
v (tf.T... |
1,592 | def visit_FormattedValue(self, node: AST,
dfltChaining: bool = True) -> str:
format_spec = node.format_spec
return f"{{{self.visit(node.value)}" \
f"{self.CONV_MAP.get(node.conversion, )}" \
f"{+self._nested_str(format_spec) if format_s... | Return `node`s value formatted according to its format spec. |
1,593 | def _recurse(self, matrix, m_list, indices, output_m_list=[]):
while m_list[-1][1] == 0:
m_list = copy(m_list)
m_list.pop()
if not m_list:
matrix_sum = np.sum(matrix)
if matrix_sum < self._current_minimum:
... | This method recursively finds the minimal permutations using a binary
tree search strategy.
Args:
matrix: The current matrix (with some permutations already
performed).
m_list: The list of permutations still to be performed
indices: Set of indices whi... |
1,594 | def setImagePlotAutoRangeOn(self, axisNumber):
setXYAxesAutoRangeOn(self, self.xAxisRangeCti, self.yAxisRangeCti, axisNumber) | Sets the image plot's auto-range on for the axis with number axisNumber.
:param axisNumber: 0 (X-axis), 1 (Y-axis), 2, (Both X and Y axes). |
1,595 | def isPositiveStrand(self):
if self.strand is None and self.DEFAULT_STRAND == self.POSITIVE_STRAND:
return True
return self.strand == self.POSITIVE_STRAND | Check if this genomic region is on the positive strand.
:return: True if this element is on the positive strand |
1,596 | def to_text_diagram_drawer(
self,
*,
use_unicode_characters: bool = True,
qubit_namer: Optional[Callable[[ops.Qid], str]] = None,
transpose: bool = False,
precision: Optional[int] = 3,
qubit_order: ops.QubitOrderOrList = ops.QubitOrder.... | Returns a TextDiagramDrawer with the circuit drawn into it.
Args:
use_unicode_characters: Determines if unicode characters are
allowed (as opposed to ascii-only diagrams).
qubit_namer: Names qubits in diagram. Defaults to str.
transpose: Arranges qubit wires ... |
1,597 | def _generate_corpus_table(self, labels, ngrams):
html = []
for label in labels:
html.append(self._render_corpus_row(label, ngrams))
return .join(html) | Returns an HTML table containing data on each corpus' n-grams. |
1,598 | def _prefix_from_ip_int(self, ip_int):
trailing_zeroes = _count_righthand_zero_bits(ip_int,
self._max_prefixlen)
prefixlen = self._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefi... | Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones |
1,599 | def pattern_for_view(self, view, action):
if getattr(view, , None):
return view.derive_url_pattern(self.path, action)
else:
return r % (self.path, action) | Returns the URL pattern for the passed in action. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.