Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
387,100 | def _make_entities_from_ids(entity_cls, entity_objs_and_ids, server_config):
return [
_make_entity_from_id(entity_cls, entity_or_id, server_config)
for entity_or_id
in entity_objs_and_ids
] | Given an iterable of entities and/or IDs, return a list of entities.
:param entity_cls: An :class:`Entity` subclass.
:param entity_obj_or_id: An iterable of
:class:`nailgun.entity_mixins.Entity` objects and/or entity IDs. All of
the entities in this iterable should be of type ``entity_cls``.
... |
387,101 | def find(self, name):
if not isinstance(name, basestring):
raise TypeError("name can only be an instance of type basestring")
return_data = self._call("find",
in_p=[name])
return_data = IExtPack(return_data)
return return_data | Returns the extension pack with the specified name if found.
in name of type str
The name of the extension pack to locate.
return return_data of type :class:`IExtPack`
The extension pack if found.
raises :class:`VBoxErrorObjectNotFound`
No extension pack ma... |
387,102 | def _compute_mean(self, C, mag, rhypo, hypo_depth, mean, idx):
mean[idx] = (C[] + C[] * mag + C[] * np.log(rhypo[idx] +
C[] * np.exp(C[] * mag)) + C[] * hypo_depth) | Compute mean value according to equations 10 and 11 page 226. |
387,103 | def delete(self, client=None):
return self.taskqueue.delete_task(self.id, client=client) | Deletes a task from Task Queue.
:type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the task's taskqueue.
:rtype: :class:`Task`
:returns: The task... |
387,104 | def RegisterProtoDescriptors(db, *additional_descriptors):
db.RegisterFileDescriptor(artifact_pb2.DESCRIPTOR)
db.RegisterFileDescriptor(client_pb2.DESCRIPTOR)
db.RegisterFileDescriptor(config_pb2.DESCRIPTOR)
db.RegisterFileDescriptor(cron_pb2.DESCRIPTOR)
db.RegisterFileDescriptor(flow_pb2.DESCRIPTOR)
db.... | Registers all API-releated descriptors in a given symbol DB. |
387,105 | def get_condarc_channels(self,
normalize=False,
conda_url=,
channels=None):
default_channels = self.load_rc(system=True).get(,
self.DEFAULT_C... | Return all the channel urls defined in .condarc.
If no condarc file is found, use the default channels.
the `default_channel_alias` key is ignored and only the anaconda client
`url` key is used. |
387,106 | def layout(self, dimensions=None, **kwargs):
dimensions = self._valid_dimensions(dimensions)
if len(dimensions) == self.ndims:
with item_check(False):
return NdLayout(self, **kwargs).reindex(dimensions)
return self.groupby(dimensions, container_type=NdLayout,... | Group by supplied dimension(s) and lay out groups
Groups data by supplied dimension(s) laying the groups along
the dimension(s) out in a NdLayout.
Args:
dimensions: Dimension(s) to group by
Returns:
NdLayout with supplied dimensions |
387,107 | def cli(env, package_keyname):
manager = ordering.OrderingManager(env.client)
table = formatting.Table(COLUMNS)
locations = manager.package_locations(package_keyname)
for region in locations:
for datacenter in region[]:
table.add_row([
datacenter[][],
... | List Datacenters a package can be ordered in.
Use the location Key Name to place orders |
387,108 | def _check_cargs(self, cargs):
if not all(isinstance(i, tuple) and
isinstance(i[0], ClassicalRegister) and
isinstance(i[1], int) for i in cargs):
raise QiskitError("carg not (ClassicalRegister, int) tuple")
if not all(self.has_register(i[0]) for... | Raise exception if clbit is not in this circuit or bad format. |
387,109 | def combobox_set_model_from_list(cb, items):
cb.clear()
model = gtk.ListStore(str)
for i in items:
model.append([i])
cb.set_model(model)
if type(cb) == gtk.ComboBoxEntry:
cb.set_text_column(0)
elif type(cb) == gtk.ComboBox:
cell = gtk.CellRendererText()
cb.pa... | Setup a ComboBox or ComboBoxEntry based on a list of strings. |
387,110 | def fastaIterator(fn, useMutableString=False, verbose=False):
fh = fn
if type(fh).__name__ == "str":
fh = open(fh)
if verbose:
try:
pind = __build_progress_indicator(fh)
except ProgressIndicatorError as e:
sys.stderr.write("Warning: unable to show progress for stream. " +
... | A generator function which yields fastaSequence objects from a fasta-format
file or stream.
:param fn: a file-like stream or a string; if this is a string, it's
treated as a filename, else it's treated it as a file-like
object, which must have a readline() method.
:param useMu... |
387,111 | def substitute_variables(cls, configuration, value, ref):
if isinstance(value, str):
while True:
match = cls.REF_PATTERN.search(value)
if match is None:
break
path = os.path.join(os.path.dirname(ref), match.gro... | Substitute variables in `value` from `configuration` where any path reference is relative to
`ref`.
Parameters
----------
configuration : dict
configuration (required to resolve intra-document references)
value :
value to resolve substitutions for
... |
387,112 | def plot_best_worst_fits(assignments_df, data, modality_col=,
score=):
ncols = 2
nrows = len(assignments_df.groupby(modality_col).groups.keys())
fig, axes = plt.subplots(nrows=nrows, ncols=ncols,
figsize=(nrows*4, ncols*6))
axes_iter = axes.fl... | Violinplots of the highest and lowest scoring of each modality |
387,113 | def build_opener(self):
http_handler = urllib2.HTTPHandler()
if util.empty(self.transport.proxy_url):
return urllib2.build_opener(http_handler)
proxy_handler = urllib2.ProxyHandler(
{self.transport.proxy_url[:4]: self.transport.proxy_url})
return urll... | Builds url opener, initializing proxy.
@return: OpenerDirector |
387,114 | def int_filter(text):
res = list()
for char in text:
if char.isdigit():
res.append(char)
return int("".join(res)) | Extract integer from text.
**中文文档**
摘除文本内的整数。 |
387,115 | def regularrun(
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
loop_again = True
command_string = regulartype(prompt_template)
if command_string == TAB:
loop_again = False
... | Allow user to run their own live commands until CTRL-Z is pressed again. |
387,116 | def delete_device(name, safety_on=True):
hostname-101.mycompany.comhostname-101hostname-1
config = _get_vistara_configuration()
if not config:
return False
access_token = _get_oath2_access_token(config[], config[])
if not access_token:
return
query_string = .format(name)
... | Deletes a device from Vistara based on DNS name or partial name. By default,
delete_device will only perform the delete if a single host is returned. Set
safety_on=False to delete all matches (up to default API search page size)
CLI Example:
.. code-block:: bash
salt-run vistara.delete_device... |
387,117 | def get_folder(service_instance, datacenter, placement, base_vm_name=None):
log.trace()
if base_vm_name:
vm_object = get_vm_by_property(service_instance, base_vm_name, vm_properties=[])
vm_props = salt.utils.vmware.get_properties_of_managed_object(vm_object, properties=[])
if in vm... | Returns a Folder Object
service_instance
Service instance object
datacenter
Name of the datacenter
placement
Placement dictionary
base_vm_name
Existing virtual machine name (for cloning) |
387,118 | def random_jpath(depth = 3):
chunks = []
while depth > 0:
length = random.randint(5, 15)
ident = .join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(length))
if random.choice((True, False)):
index = random.randint(0, 10)
iden... | Generate random JPath with given node depth. |
387,119 | def reinit(self):
log.debug("Reinitializing socket connection for %s:%d" % (self.host, self.port))
if self._sock:
self.close()
try:
self._sock = socket.create_connection((self.host, self.port), self.timeout)
except socket.error:
log.exceptio... | Re-initialize the socket connection
close current socket (if open)
and start a fresh connection
raise ConnectionError on error |
387,120 | def _setup_freqs(self):
if self.header[b] > 0:
self.f_start = self.f_begin + self.chan_start_idx*abs(self.header[b])
self.f_stop = self.f_begin + self.chan_stop_idx*abs(self.header[b])
else:
self.f_start = self.f_end - self.chan_stop_idx*abs(self.header[b])
... | Updating frequency borders from channel values |
387,121 | def make_optimize_tensor(self, model, session=None, var_list=None, **kwargs):
session = model.enquire_session(session)
objective = model.objective
full_var_list = self._gen_var_list(model, var_list)
with session.as_default():
minimize = self.optimizer.minimi... | Make Tensorflow optimization tensor.
This method builds optimization tensor and initializes all necessary variables
created by optimizer.
:param model: GPflow model.
:param session: Tensorflow session.
:param var_list: List of variables for training.
:par... |
387,122 | def get_service_definitions(self, service_type=None):
route_values = {}
if service_type is not None:
route_values[] = self._serialize.url(, service_type, )
response = self._send(http_method=,
location_id=,
version=,... | GetServiceDefinitions.
[Preview API]
:param str service_type:
:rtype: [ServiceDefinition] |
387,123 | def parse_line(self, line, lineno):
if line.startswith():
self.is_taskcluster = True
if self.is_taskcluster:
... | Check a single line for an error. Keeps track of the linenumber |
387,124 | def parse_commandline(argv):
ap = ArgumentParser(
prog=,
description=DESCRIPTION,
epilog=EPILOG,
)
ap.add_argument(
, action=, version=.format(version),
help="shows version and exits"
)
ap.add_argument(
, metavar=,
help="original file"
)
ap.add_argument(
, metavar=,
... | Returns the arguments parsed from *argv* as a namespace. |
387,125 | def on_post(self):
request = self.environ[]
try:
return self.process_request(request)
except ClientError as exc:
return self.on_client_error(exc)
except BadGateway as exc:
return self.on_bad_gateway(exc)
except InvalidConfig:
... | Extracts the request, feeds the module, and returns the response. |
387,126 | def random_word(length,dictionary = False):
if dictionary:
try:
with open() as fp:
words = [word.lower()[:-1] for word in fp.readlines() if re.match(.format(+str(length)+),word)]
return random.choice(words)
except FileNotFoundError:
pass
v... | Creates random lowercase words from dictionary or by alternating vowels and consonants
The second method chooses from 85**length words.
The dictionary method chooses from 3000--12000 words for 3<=length<=12
(though this of course depends on the available dictionary)
:param length: word length
... |
387,127 | def mkdir_command(endpoint_plus_path):
endpoint_id, path = endpoint_plus_path
client = get_client()
autoactivate(client, endpoint_id, if_expires_in=60)
res = client.operation_mkdir(endpoint_id, path=path)
formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message") | Executor for `globus mkdir` |
387,128 | def info(self, *msg):
label = colors.blue("INFO")
self._msg(label, *msg) | Prints a message with an info prefix |
387,129 | def optimize(population, toolbox, ngen, archive=None, stats=None, verbose=False, history=None):
start = time.time()
if history is not None:
history.update(population)
logbook = tools.Logbook()
logbook.header = [, , ] + (stats.fields if stats else [])
render_fitness(population, toolbox, ... | Optimize a population of individuals.
:param population:
:param toolbox:
:param mut_prob:
:param ngen:
:param archive:
:param stats:
:param verbose:
:param history:
:return: |
387,130 | def unimapping(arg, level):
if not isinstance(arg, collections.Mapping):
raise TypeError(
.format(type(arg).__name__)
)
result = []
for i in arg.items():
result.append(
pretty_spaces(level) + u.join(map(functools.partial(convert, level=level), i))
... | Mapping object to unicode string.
:type arg: collections.Mapping
:param arg: mapping object
:type level: int
:param level: deep level
:rtype: unicode
:return: mapping object as unicode string |
387,131 | def store(self, text, tier):
store = self._stores.get(tier, None)
if not store:
store = AutoSplittingFile(self._dir, self._lines_per_store, self._file_name, tier)
self._stores[tier] = store
store.write(text) | Writes text to the underlying Store mapped at tier. If the store doesn't exists, yet, it creates it
:param text: the text to write
:param tier: the tier used to identify the store
:return: |
387,132 | def check_file_for_tabs(cls, filename, verbose=True):
filename = path_expand(filename)
file_contains_tabs = False
with open(filename, ) as f:
lines = f.read().split("\n")
line_no = 1
for line in lines:
if "\t" in line:
file_conta... | identifies if the file contains tabs and returns True if it
does. It also prints the location of the lines and columns. If
verbose is set to False, the location is not printed.
:param verbose: if true prints issues
:param filename: the filename
:type filename: str
:rtype... |
387,133 | def remove_repositories(repositories, default_repositories):
repos = []
for repo in repositories:
if repo in default_repositories:
repos.append(repo)
return repos | Remove no default repositories |
387,134 | def combine_mv_and_lv(mv, lv):
combined = {
c: pd.concat([mv[c], lv[c]], axis=0) for c in list(lv.keys())
}
combined[] = mv[]
return combined | Combine MV and LV grid topology in PyPSA format |
387,135 | def is_deletion(self):
return (len(self.ref) > len(self.alt)) and self.ref.startswith(self.alt) | Does this variant represent the deletion of nucleotides from the
reference genome? |
387,136 | def continue_abort(self,
root_pipeline_key,
cursor=None,
max_to_notify=_MAX_ABORTS_TO_BEGIN):
if not isinstance(root_pipeline_key, db.Key):
root_pipeline_key = db.Key(root_pipeline_key)
url=se... | Sends the abort signal to all children for a root pipeline.
Args:
root_pipeline_key: db.Key of the root pipeline to abort.
cursor: The query cursor for enumerating _PipelineRecords when inserting
tasks to cause child pipelines to terminate.
max_to_notify: Used for testing. |
387,137 | def from_array(array):
if array is None or not array:
return None
assert_type_or_raise(array, dict, parameter_name="array")
data = {}
data[] = u(array.get())
data[] = int(array.get())
instance = LabeledPrice(**data)
instance... | Deserialize a new LabeledPrice from a given dictionary.
:return: new LabeledPrice instance.
:rtype: LabeledPrice |
387,138 | def guinierplot(*args, **kwargs):
ret=plotsascurve(*args, **kwargs)
plt.xscale(,exponent=2)
plt.yscale()
return ret | Make a Guinier plot. This is simply a wrapper around plotsascurve(). |
387,139 | def handle_exists(self, spec, checkable):
$existst exist, we return False, because that
can
if not isinstance(spec, bool):
msg =
raise InvalidQuery(msg)
return spec | The implementation of this one is weird. By the time
the {'$exists': True} spec gets to the dispatched
handler, the key presumably exists.
So we just parrot the assertion the spec makes. If it
asserts the key exists, we return True. If it asserts
the key doesn't exist, we return... |
387,140 | def find(self, which, param):
for i, layer in enumerate(self.layers):
if which == i or which == layer.name:
return layer.find(param)
raise KeyError(which) | Get a parameter from a layer in the network.
Parameters
----------
which : int or str
The layer that owns the parameter to return.
If this is an integer, then 0 refers to the input layer, 1 refers
to the first hidden layer, 2 to the second, and so on.
... |
387,141 | def user_filter(config, message, fasnick=None, *args, **kw):
fasnick = kw.get(, fasnick)
if fasnick:
return fasnick in fmn.rules.utils.msg2usernames(message, **config) | A particular user
Use this rule to include messages that are associated with a
specific user. |
387,142 | def remove_core_element(self, model):
gv_name = model
if self.global_variable_is_editable(gv_name, "Deletion"):
try:
self.model.global_variable_manager.delete_variable(gv_name)
except AttributeError as e:
logger.warning("The respective glo... | Remove respective core element of handed global variable name
:param str model: String that is the key/gv_name of core element which should be removed
:return: |
387,143 | def get_tree(cls, *condition, **kwargs):
parent_field = kwargs.pop(, )
parent = kwargs.pop(, None)
parent_order_by = kwargs.pop(, None)
current = kwargs.pop(, None)
order_by = kwargs.pop(, None)
id_field = kwargs.pop(, )
mode = kwargs.pop(, )
if m... | parent is root parent value, default is None
current is current value
condition is extra condition for select root records
mode is search method, value is 'wide' or 'deep' |
387,144 | def get_is_value(tag):
if tag.VR == or tag.VR == :
value = int(tag.value.decode("ascii").replace(" ", ""))
return value
return int(tag.value) | Getters for data that also work with implicit transfersyntax
:param tag: the tag to read |
387,145 | def get_obs_route(value):
obs_route = ObsRoute()
while value and (value[0]== or value[0] in CFWS_LEADER):
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
elif value[0] == :
obs_route.append(ListSeparator)
val... | obs-route = obs-domain-list ":"
obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain])
Returns an obs-route token with the appropriate sub-tokens (that is,
there is no obs-domain-list in the parse tree). |
387,146 | def _recv(self):
prefix = self._read(self.prefix_len)
msg = self._read(self._extract_len(prefix))
return prefix + msg | Receives and returns a message from Scratch |
387,147 | def extract_notebook_metatab(nb_path: Path):
from metatab.rowgenerators import TextRowGenerator
import nbformat
with nb_path.open() as f:
nb = nbformat.read(f, as_version=4)
lines = .join([] + [get_cell_source(nb, tag) for tag in [, ,
... | Extract the metatab lines from a notebook and return a Metapack doc |
387,148 | def read_from_cache(self, domains=None):
logger.info(f)
if domains is not None and isinstance(domains, list):
dfs = {domain: self.read_entry(domain) for domain in domains}
else:
dfs = {name: self.read_entry(name)
for name in os.listdir(self.EXT... | Returns:
dict: Dict[str, DataFrame] |
387,149 | def _python_rpath(self):
| The relative path (from environment root) to python. |
387,150 | def compute_checksum(self, payload_offset: Optional[int]=None):
if not self.block_file:
self.fields[] =
return
block_hasher = hashlib.sha1()
payload_hasher = hashlib.sha1()
with wpull.util.reset_file_offset(self.block_file):
if payload_offs... | Compute and add the checksum data to the record fields.
This function also sets the content length. |
387,151 | def _recipients_from_cloud(self, recipients, field=None):
recipients_data = []
for recipient in recipients:
recipients_data.append(
self._recipient_from_cloud(recipient, field=field))
return Recipients(recipients_data, parent=self, field=field) | Transform a recipient from cloud data to object data |
387,152 | def _get_event_id(object_type: str) -> str:
key = _keys.event_counter(object_type)
DB.watch(key, pipeline=True)
count = DB.get_value(key)
DB.increment(key)
DB.execute()
if count is None:
count = 0
return .format(object_type, int(count)) | Return an event key for the event on the object type.
This must be a unique event id for the object.
Args:
object_type (str): Type of object
Returns:
str, event id |
387,153 | def ParseOptions(cls, options, configuration_object):
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject(
)
hashers = cls._ParseStringOption(
options, , default_value=cls._DEFAULT_HASHER_STRING)
hasher_file_size_limit = cls._ParseNumericOption... | Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
BadConfigOption: when a configuration... |
387,154 | def get_parser(self):
parser = self.parser_cls(prog=self.prog_name, usage=self.get_usage(),
stream=self.stderr)
subparsers = parser.add_subparsers(
title=,
)
for name, command in self.registry.items():
cmdparser = subparsers.add_parser(name, h... | Returns :class:`monolith.cli.Parser` instance for this
*ExecutionManager*. |
387,155 | def from_conversation_event(conversation, conv_event, prev_conv_event,
datetimefmt, watermark_users=None):
user = conversation.get_user(conv_event.user_id)
if prev_conv_event is not None:
is_new_day = (conv_event.timestamp.astimezone... | Return MessageWidget representing a ConversationEvent.
Returns None if the ConversationEvent does not have a widget
representation. |
387,156 | def create_ip_arp_reply(srchw, dsthw, srcip, targetip):
pkt = create_ip_arp_request(srchw, srcip, targetip)
pkt[0].dst = dsthw
pkt[1].operation = ArpOperation.Reply
pkt[1].targethwaddr = dsthw
return pkt | Create an ARP reply (just change what needs to be changed
from a request) |
387,157 | def layers(self):
try:
response = self.d.history(self.image_id)
except docker.errors.NotFound:
raise NotAvailableAnymore()
layers = []
for l in response:
... | similar as parent images, except that it uses /history API endpoint
:return: |
387,158 | def parse_relations(
belstr: str, char_locs: CharLocs, parsed: Parsed, errors: Errors
) -> Tuple[Parsed, Errors]:
quotes = char_locs["quotes"]
quoted_range = set([i for start, end in quotes.items() for i in range(start, end)])
for match in relations_pattern_middle.finditer(belstr):
(start,... | Parse relations from BEL string
Args:
belstr: BEL string as one single string (not list of chars)
char_locs: paren, comma and quote char locations
parsed: data structure for parsed functions, relations, nested
errors: error messages
Returns:
(parsed, errors): |
387,159 | def set_channel_created(self, channel_link, channel_id):
self.channel_link = channel_link
self.channel_id = channel_id
self.__record_progress(Status.PUBLISH_CHANNEL if config.PUBLISH else Status.DONE) | set_channel_created: records progress after creating channel on Kolibri Studio
Args:
channel_link (str): link to uploaded channel
channel_id (str): id of channel that has been uploaded
Returns: None |
387,160 | def apply_sfr_seg_parameters(seg_pars=True, reach_pars=False):
if not seg_pars and not reach_pars:
raise Exception("gw_utils.apply_sfr_pars() error: both seg_pars and reach_pars are False")
import flopy
bak_sfr_file,pars = None,None
... | apply the SFR segement multiplier parameters. Expected to be run in the same dir
as the model exists
Parameters
----------
reach_pars : bool
if reach paramters need to be applied
Returns
-------
sfr : flopy.modflow.ModflowSfr instance
Note
----
expects... |
387,161 | def resetAndRejoin(self, timeout):
print % self.port
print timeout
try:
self._sendline()
self.isPowerDown = True
time.sleep(timeout)
if self.deviceRole == Thread_Device_Role.SED:
self.setPollingRate(self.sedPollingRate)
... | reset and join back Thread Network with a given timeout delay
Args:
timeout: a timeout interval before rejoin Thread Network
Returns:
True: successful to reset and rejoin Thread Network
False: fail to reset and rejoin the Thread Network |
387,162 | def generateSplines(self):
_ = returnSplineList(self.dependentVar, self.independentVar,
subsetPercentage=self.splineSubsetPercentage,
cycles=self.splineCycles,
minKnotPoints=self.splineMinKnotPoins,
... | #TODO: docstring |
387,163 | def _equalizeHistogram(img):
intType = None
if not in img.dtype.str:
TO_FLOAT_TYPES = {np.dtype(): np.float16,
np.dtype(): np.float32,
np.dtype(): np.float64,
np.dtype(): np.float64}
intType = img.... | histogram equalisation not bounded to int() or an image depth of 8 bit
works also with negative numbers |
387,164 | def check_purge_status(self, purge_id):
content = self._fetch("/purge?id=%s" % purge_id)
return map(lambda x: FastlyPurgeStatus(self, x), content) | Get the status and times of a recently completed purge. |
387,165 | def iptag_clear(self, iptag, x, y):
self._send_scp(x, y, 0, SCPCommands.iptag,
int(consts.IPTagCommands.clear) << 16 | iptag) | Clear an IPTag.
Parameters
----------
iptag : int
Index of the IPTag to clear. |
387,166 | def mix(self, color1, color2, weight=50, *args):
if color1 and color2:
if isinstance(weight, string_types):
weight = float(weight.strip())
weight = ((weight / 100.0) * 2) - 1
rgb1 = self._hextorgb(color1)
rgb2 = self._hextorgb(color2)
... | This algorithm factors in both the user-provided weight
and the difference between the alpha values of the two colors
to decide how to perform the weighted average of the two RGB values.
It works by first normalizing both parameters to be within [-1, 1],
where 1 indicates "only use colo... |
387,167 | def _create_archive_table(self, table_name):
if table_name in self._get_table_names():
raise KeyError(.format(table_name))
try:
table = self._resource.create_table(
TableName=table_name,
KeySchema=[{: , : }],
AttributeDefi... | Dynamo implementation of BaseDataManager create_archive_table
waiter object is implemented to ensure table creation before moving on
this will slow down table creation. However, since we are only creating
table once this should no impact users.
Parameters
----------
tab... |
387,168 | def serialize_gen(
obj_pyxb, encoding=, pretty=False, strip_prolog=False, xslt_url=None
):
assert d1_common.type_conversions.is_pyxb(obj_pyxb)
assert encoding in (None, , )
try:
obj_dom = obj_pyxb.toDOM()
except pyxb.ValidationError as e:
raise ValueError(
.format(e.... | Serialize PyXB object to XML.
Args:
obj_pyxb: PyXB object
PyXB object to serialize.
encoding: str
Encoding to use for XML doc bytes
pretty: bool
True: Use pretty print formatting for human readability.
strip_prolog:
True: remove any XML prolog (e.g., ``<?x... |
387,169 | def toLily(self):
lilystring = ""
if not self.autoBeam:
lilystring += "\\autoBeamOff"
children = self.SortedChildren()
if not hasattr(self, "transpose"):
self.transpose = None
for child in range(len(children)):
measureNode = self.Get... | Method which converts the object instance, its attributes and children to a string of lilypond code
:return: str of lilypond code |
387,170 | def basename_without_extension(self):
ret = self.basename.rsplit(, 1)[0]
if ret.endswith():
ret = ret[0:len(ret)-4]
return ret | Get the ``os.path.basename`` of the local file, if any, with extension removed. |
387,171 | def insert_paulis(self, indices=None, paulis=None, pauli_labels=None):
if pauli_labels is not None:
if paulis is not None:
raise QiskitError("Please only provide either `paulis` or `pauli_labels`")
if isinstance(pauli_labels, str):
pauli_labels = ... | Insert or append pauli to the targeted indices.
If indices is None, it means append at the end.
Args:
indices (list[int]): the qubit indices to be inserted
paulis (Pauli): the to-be-inserted or appended pauli
pauli_labels (list[str]): the to-be-inserted or appended ... |
387,172 | def sparse_to_unmasked_sparse(self):
return mapping_util.sparse_to_unmasked_sparse_from_mask_and_pixel_centres(
total_sparse_pixels=self.total_sparse_pixels, mask=self.regular_grid.mask,
unmasked_sparse_grid_pixel_centres=self.unmasked_sparse_grid_pixel_centres).astype() | The 1D index mappings between the masked sparse-grid and unmasked sparse grid. |
387,173 | def rotate(a, th):
return np.sum(a[..., np.newaxis] * R_rot(th), axis=-2) | Return cartesian vectors, after rotation by specified angles about
each degree of freedom.
Parameters
----------
a: array, shape (n, d)
Input d-dimensional cartesian vectors, left unchanged.
th: array, shape (n, m)
Angles by which to rotate about each m rotational degree of freedom
... |
387,174 | def find_converting_reactions(model, pair):
first = set(find_met_in_model(model, pair[0]))
second = set(find_met_in_model(model, pair[1]))
hits = list()
for rxn in model.reactions:
if len(first & set(rxn.reactants)) > 0 and len(
second & set(rxn.products)) > 0:
... | Find all reactions which convert a given metabolite pair.
Parameters
----------
model : cobra.Model
The metabolic model under investigation.
pair: tuple or list
A pair of metabolite identifiers without compartment suffix.
Returns
-------
frozenset
The set of reactio... |
387,175 | def combine_tax_scales(node):
combined_tax_scales = None
for child_name in node:
child = node[child_name]
if not isinstance(child, AbstractTaxScale):
log.info(.format(child_name, child))
continue
if combined_tax_scales is None:
combined_tax_scal... | Combine all the MarginalRateTaxScales in the node into a single MarginalRateTaxScale. |
387,176 | def _distort_color(image, color_ordering=0, scope=None):
with tf.name_scope(scope, "distort_color", [image]):
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(ima... | Distort the color of a Tensor image.
Each color distortion is non-commutative and thus ordering of the color ops
matters. Ideally we would randomly permute the ordering of the color ops.
Rather then adding that level of complication, we select a distinct ordering
of color ops for each preprocessing thread.
... |
387,177 | def from_text(cls, text, mapping=):
graphemes = Counter(grapheme_pattern.findall(text))
specs = [
OrderedDict([
(cls.GRAPHEME_COL, grapheme),
(, frequency),
(mapping, grapheme)])
for grapheme, frequency in graphemes.most_co... | Create a Profile instance from the Unicode graphemes found in `text`.
Parameters
----------
text
mapping
Returns
-------
A Profile instance. |
387,178 | def index_agreement(s, o):
ia = 1 - (np.sum((o-s)**2)) /\
(np.sum((np.abs(s-np.mean(o))+np.abs(o-np.mean(o)))**2))
return ia | index of agreement
input:
s: simulated
o: observed
output:
ia: index of agreement |
387,179 | def random(cls, num_qubits, seed=None):
if seed is not None:
np.random.seed(seed)
z = np.random.randint(2, size=num_qubits).astype(np.bool)
x = np.random.randint(2, size=num_qubits).astype(np.bool)
return cls(z, x) | Return a random Pauli on number of qubits.
Args:
num_qubits (int): the number of qubits
seed (int): Optional. To set a random seed.
Returns:
Pauli: the random pauli |
387,180 | def _get_directives_and_roles_from_sphinx():
if SPHINX_INSTALLED:
sphinx_directives = list(sphinx.domains.std.StandardDomain.directives)
sphinx_roles = list(sphinx.domains.std.StandardDomain.roles)
for domain in [sphinx.domains.c.CDomain,
sphinx.domains.cpp.CPPDo... | Return a tuple of Sphinx directive and roles. |
387,181 | def download_image(self, img_url):
img_request = None
try:
img_request = requests.request(
, img_url, stream=True, proxies=self.proxies)
if img_request.status_code != 200:
raise ImageDownloadError(img_request.status_code)
except:
... | Downloads a single image.
Downloads img_url using self.page_url as base.
Also, raises the appropriate exception if required. |
387,182 | def load_gffutils_db(f):
import gffutils
db = gffutils.FeatureDB(f, keep_order=True)
return db | Load database for gffutils.
Parameters
----------
f : str
Path to database.
Returns
-------
db : gffutils.FeatureDB
gffutils feature database. |
387,183 | def get_compatible_generator_action(self, filename):
for action in self.__generator_actions:
if action.act_on_file(filename):
return action
return None | Return the **first** compatible :class:`GeneratorAction` for a given filename or ``None`` if none is found.
Args:
filename (str): The filename of the template to process. |
387,184 | def rotate_content(day=None):
for main in Main.objects.all():
site = main.sites_rooted_here.all().first()
main_lang = Languages.for_site(site).languages.filter(
is_main_language=True).first()
index = SectionIndexPage.objects.live().child_of(main).first()
site_s... | this method gets the parameters that are needed for rotate_latest
and rotate_featured_in_homepage methods, and calls them both |
387,185 | def read_from_LSQ(self, LSQ_file):
cont = self.user_warning(
"LSQ import only works if all measurements are present and not averaged during import from magnetometer files to magic format. Do you wish to continue reading interpretations?")
if not cont:
return
self... | Clears all current interpretations and replaces them with
interpretations read from LSQ file.
Parameters
----------
LSQ_file : path to LSQ file to read in |
387,186 | def concatenate_not_none(l, axis=0):
mask = []
for i in range(len(l)):
if l[i] is not None:
mask.append(i)
l_stacked = np.concatenate([l[i] for i in mask], axis=axis)
return l_stacked | Construct a numpy array by stacking not-None arrays in a list
Parameters
----------
data : list of arrays
The list of arrays to be concatenated. Arrays have same shape in all
but one dimension or are None, in which case they are ignored.
axis : int, default = 0
Axis for the co... |
387,187 | def get_gtf_db(gtf, in_memory=False):
db_file = gtf +
if gtf.endswith():
db_file = gtf[:-3] +
if file_exists(db_file):
return gffutils.FeatureDB(db_file)
db_file = if in_memory else db_file
if in_memory or not file_exists(db_file):
debug()
infer_extent = guess... | create a gffutils DB |
387,188 | def remove(self, flag, extra):
self.flag = flag
self.extra = extra
self.dep_path = self.meta.log_path + "dep/"
dependencies, rmv_list = [], []
self.removed = self._view_removed()
if not self.removed:
print("")
else:
msg = "packa... | Remove Slackware binary packages |
387,189 | def get_uuid(type=4):
import uuid
name = +str(type)
u = getattr(uuid, name)
return u().hex | Get uuid value |
387,190 | def encode(self, uuid, pad_length=22):
return self._num_to_string(uuid.int, pad_to_length=pad_length) | Encodes a UUID into a string (LSB first) according to the alphabet
If leftmost (MSB) bits 0, string might be shorter |
387,191 | def create_configuration(self, node, ports):
target_raid_config = node.get(, {}).copy()
return hpssa_manager.create_configuration(
raid_config=target_raid_config) | Create RAID configuration on the bare metal.
This method creates the desired RAID configuration as read from
node['target_raid_config'].
:param node: A dictionary of the node object
:param ports: A list of dictionaries containing information of ports
for the node
:r... |
387,192 | def get_energies(atoms_list):
if len(atoms_list) == 1:
return atoms_list[0].get_potential_energy()
elif len(atoms_list) > 1:
energies = []
for atoms in atoms_list:
energies.append(atoms.get_potential_energy())
return energies | Potential energy for a list of atoms objects |
387,193 | def get_thin_rect_vertices(ox, oy, dx, dy, r):
if ox < dx:
leftx = ox
rightx = dx
xco = 1
elif ox > dx:
leftx = ox * -1
rightx = dx * -1
xco = -1
else:
return [
ox - r, oy,
ox + r, oy,
ox + r, dy,
ox... | Given the starting point, ending point, and width, return a list of
vertex coordinates at the corners of the line segment
(really a thin rectangle). |
387,194 | def get_arguments(self):
args = loads(self.grid_arguments)[] if isinstance(self.grid_arguments, bytes) else loads(self.grid_arguments.encode())[]
retval = {}
if in args:
retval[] = args[]
if in args and args[] is not None:
retval[] = args[]
if in args and args[] is... | Returns the additional options for the grid (such as the queue, memory requirements, ...). |
387,195 | def set_common_fields(self, warc_type: str, content_type: str):
self.fields[self.WARC_TYPE] = warc_type
self.fields[self.CONTENT_TYPE] = content_type
self.fields[self.WARC_DATE] = wpull.util.datetime_str()
self.fields[self.WARC_RECORD_ID] = .format(uuid.uuid4().urn) | Set the required fields for the record. |
387,196 | def load_modes(node):
if isinstance(node, list):
values = [load_mode(child) for child in node]
keys = [mode.key for mode in values]
return dict(zip(keys,values))
elif isinstance(node, dict):
values = {key: load_mode(child) for key, child in node}
return values
el... | Load all observing modes |
387,197 | def load_modules(self):
if self.INTERFACES_MODULE is None:
raise NotImplementedError("A module containing interfaces modules "
"should be setup in INTERFACES_MODULE !")
else:
for module, permission in self.modules.items():
... | Should instance interfaces and set them to interface, following `modules` |
387,198 | def update_asset(self, asset_form=None):
if asset_form is None:
raise NullArgument()
if not isinstance(asset_form, abc_repository_objects.AssetForm):
raise InvalidArgument()
if not asset_form.is_for_update():
raise InvalidArgument()
try:
... | Updates an existing asset.
:param asset_form: the form containing the elements to be updated
:type asset_form: ``osid.repository.AssetForm``
:raise: ``IllegalState`` -- ``asset_form`` already used in anupdate transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
... |
387,199 | def downsample(self, factor):
if int(factor) != factor or factor < 1:
raise ValueError("Argument `factor` must be a positive integer greater than or equal to 1. Got: <{}>({})", type(factor), factor)
paths = self.interjoint_paths()
for i, path in enumerate(paths):
paths[i] = np.concatenate... | Compute a downsampled version of the skeleton by striding while
preserving endpoints.
factor: stride length for downsampling the saved skeleton paths.
Returns: downsampled PrecomputedSkeleton |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.