Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
369,700 | def _remove_wire_nets(block):
wire_src_dict = _ProducerList()
wire_removal_set = set()
for net in block.logic:
if net.op == :
wire_src_dict[net.dests[0]] = net.args[0]
if not isinstance(net.dests[0], Output):
wire_removal_set.add(net.dests[0... | Remove all wire nodes from the block. |
369,701 | def blurred_image_1d_from_1d_unblurred_and_blurring_images(unblurred_image_1d, blurring_image_1d, convolver):
return convolver.convolve_image(image_array=unblurred_image_1d, blurring_array=blurring_image_1d) | For a 1D masked image and 1D blurring image (the regions outside the mask whose light blurs \
into the mask after PSF convolution), use both to compute the blurred image within the mask via PSF convolution.
The convolution uses each image's convolver (*See ccd.convolution*).
Parameters
----------
... |
369,702 | def show_system_info_output_show_system_info_rbridge_id(self, **kwargs):
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
output = ET.SubElement(show_system_info, "output")
show_system_info = ET.SubElement(output, ... | Auto Generated Code |
369,703 | def set_extend(self, extend):
cairo.cairo_pattern_set_extend(self._pointer, extend)
self._check_status() | Sets the mode to be used for drawing outside the area of this pattern.
See :ref:`EXTEND` for details on the semantics of each extend strategy.
The default extend mode is
:obj:`NONE <EXTEND_NONE>` for :class:`SurfacePattern`
and :obj:`PAD <EXTEND_PAD>` for :class:`Gradient` patterns. |
369,704 | def get_lines_from_file(filename, lineno, context_lines):
def get_lines(start, end):
return [linecache.getline(filename, l).rstrip() for l in range(start, end)]
lower_bound = max(1, lineno - context_lines)
upper_bound = lineno + context_lines
linecache.checkcache(filename)
pre_context... | Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context). |
369,705 | def _make_args_checker(self):
def _checker(*args, **kws):
nargs = len(args)
nnonvaargs = min(nargs, self._max_positional_args)
if nargs > self._max_positional_args and self._ivararg is None:
raise self._too_many_args_error(nargs)
... | Create a function that checks signature of the source function. |
369,706 | def run(itf):
if not itf:
return 1
options = SplitInput(itf)
error_check(options)
try:
molecules, ensemble_lookup = ReadFiles(options)
except:
return 1
if options.compare:
compare(molecules, ensemble_lookup, options)
else:
eval... | Run postanalyze functions. |
369,707 | def get_items(self, from_date, url, expand_fields=True):
start_at = 0
req = self.fetch(url, payload=self.__build_payload(start_at, from_date, expand_fields))
issues = req.text
data = req.json()
titems = data[]
nitems = data[]
start_at += min(nitems, ti... | Retrieve all the items from a given date.
:param url: endpoint API url
:param from_date: obtain items updated since this date
:param expand_fields: if True, it includes the expand fields in the payload |
369,708 | def timeago(tz=None, *args, **kwargs):
return totz(datetime.now(), tz) - timedelta(*args, **kwargs) | Return a datetime so much time ago. Takes the same arguments as timedelta(). |
369,709 | def build_agg_vec(agg_vec, **source):
if type(agg_vec) is str:
agg_vec = [agg_vec]
agg_dict = dict()
for entry in agg_vec:
try:
agg_dict[entry] = source[entry]
except KeyError:
folder = source.get(, )
folder = os.path.join(PYMRIO_PATH[fo... | Builds an combined aggregation vector based on various classifications
This function build an aggregation vector based on the order in agg_vec.
The naming and actual mapping is given in source, either explicitly or by
pointing to a folder with the mapping.
>>> build_agg_vec(['EU', 'OECD'], path = 'tes... |
369,710 | def plot_neuron3d(ax, nrn, neurite_type=NeuriteType.all,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
plot_soma3d(ax, nrn.soma, color=color, alpha=alpha)
for neurite in iter_neurites(nrn, filt=tree_type_checker(neurite_type)):
... | Generates a figure of the neuron,
that contains a soma and a list of trees.
Args:
ax(matplotlib axes): on what to plot
nrn(neuron): neuron to be plotted
neurite_type(NeuriteType): an optional filter on the neurite type
diameter_scale(float): Scale factor multiplied with segment ... |
369,711 | def get_unawarded_user_ids(self, db_read=None):
db_read = db_read or self.db_read
already_awarded_ids = self.get_already_awarded_user_ids(db_read=db_read)
current_ids = self.get_current_user_ids(db_read=db_read)
unawarded_ids = list(set(current_ids) - set(already_awarded_ids))
... | Returns unawarded user ids (need to be saved) and the count. |
369,712 | def _get_code_w_scope(data, position, obj_end, opts):
code, position = _get_string(data, position + 4, obj_end, opts)
scope, position = _get_object(data, position, obj_end, opts)
return Code(code, scope), position | Decode a BSON code_w_scope to bson.code.Code. |
369,713 | def remove_api_key(self):
url = self.record_url + "/remove_api_key"
res = requests.patch(url=url, headers=HEADERS, verify=False)
res.raise_for_status()
self.api_key = "" | Removes the user's existing API key, if present, and sets the current instance's 'api_key'
attribute to the empty string.
Returns:
`NoneType`: None. |
369,714 | def subscribe(self, tag, fun, description=None):
self.methods[tag] = fun
self.descriptions[tag] = description
self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag) | Subscribe to something and register a function |
369,715 | def update_binary_stats(self, label, pred):
pred = pred.asnumpy()
label = label.asnumpy().astype()
pred_label = numpy.argmax(pred, axis=1)
check_label_shapes(label, pred)
if len(numpy.unique(label)) > 2:
raise ValueError("%s currently only supports binary cl... | Update various binary classification counts for a single (label, pred)
pair.
Parameters
----------
label : `NDArray`
The labels of the data.
pred : `NDArray`
Predicted values. |
369,716 | def solar_midnight(self, date=None, local=True):
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.tod... | Calculates the solar midnight (the time when the sun is at its lowest
point.)
:param date: The date for which to calculate the midnight time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time... |
369,717 | def display_hook(fn):
@wraps(fn)
def wrapped(element):
global FULL_TRACEBACK
if Store.current_backend is None:
return {}, {}
try:
max_frames = OutputSettings.options[]
mimebundle = fn(element, max_frames=max_frames)
if mimebundle is N... | A decorator to wrap display hooks that return a MIME bundle or None.
Additionally it handles adding output to the notebook archive, saves
files specified with the output magic and handles tracebacks. |
369,718 | def kelvin2rgb(temperature):
if temperature < 1000:
temperature = 1000
elif temperature > 40000:
temperature = 40000
tmp_internal = temperature / 100.0
if tmp_internal <= 66:
red = 255
else:
tmp_red = 329.698727446 * np.power(tmp_internal - 60, -0.133... | Converts from Kelvin temperature to an RGB color.
Algorithm credits: |tannerhelland|_ |
369,719 | def store_minions(opts, jid, minions, mminion=None, syndic_id=None):
if mminion is None:
mminion = salt.minion.MasterMinion(opts, states=False, rend=False)
job_cache = opts[]
minions_fstr = .format(job_cache)
try:
mminion.returners[minions_fstr](jid, minions, syndic_id=syndic_id)
... | Store additional minions matched on lower-level masters using the configured
master_job_cache |
369,720 | def init_app(self, app, **kwargs):
self.init_config(app)
self.limiter = Limiter(app, key_func=get_ipaddr)
if app.config[]:
self.talisman = Talisman(
app, **app.config.get(, {})
)
if app.config[]:
... | Initialize application object.
:param app: An instance of :class:`~flask.Flask`. |
369,721 | def _step(self,
model: TrainingModel,
batch: mx.io.DataBatch,
checkpoint_interval: int,
metric_train: mx.metric.EvalMetric,
metric_loss: Optional[mx.metric.EvalMetric] = None):
if model.monitor is not None:
model.monitor... | Performs an update to model given a batch and updates metrics. |
369,722 | def _get_style_of_faulting_term(self, C, rup):
SS, NS, RS = 0.0, 0.0, 0.0
if np.abs(rup.rake) <= 30.0 or (180.0 - np.abs(rup.rake)) <= 30.0:
SS = 1.0
elif rup.rake > 30.0 and rup.rake < 150.0:
RS = 1.0
else:
... | Returns the style-of-faulting term.
Fault type (Strike-slip, Normal, Thrust/reverse) is
derived from rake angle.
Rakes angles within 30 of horizontal are strike-slip,
angles from 30 to 150 are reverse, and angles from
-30 to -150 are normal.
Note that the 'Unspecified' ca... |
369,723 | def execute_python_script(self, script):
file_handle, filename = tempfile.mkstemp()
temp_file = os.fdopen(file_handle, "wt")
temp_file.write(script)
temp_file.close()
self.put(filename, "python_execute.py")
command = ["python", "python_execute.... | Execute a python script of the remote server
:param script: Inline script to convert to a file and execute remotely
:return: The output of the script execution |
369,724 | def __Restore_Geometry_On_Layout_Change_checkBox_set_ui(self):
restore_geometry_on_layout_change))
self.Restore_Geometry_On_Layout_Change_checkBox.setCheckState(restore_geometry_on_layout_change)
self.__engine.layouts_man... | Sets the **Restore_Geometry_On_Layout_Change_checkBox** Widget. |
369,725 | def nth(iterable, n, default=None):
if type(n) != int:
raise TypeError("n is not an integer")
return next(islice(iterable, n, None), default) | Returns the nth item or a default value
Arguments
---------
iterable : iterable
n : int
default : default=None
The default value to return |
369,726 | def read_asc_grid(filename, footer=0):
ncols = None
nrows = None
xllcorner = None
xllcenter = None
yllcorner = None
yllcenter = None
cellsize = None
dx = None
dy = None
no_data = None
header_lines = 0
with io.open(filename, ) as f:
while True:... | Reads ASCII grid file (*.asc).
Parameters
----------
filename : str
Name of *.asc file.
footer : int, optional
Number of lines at bottom of *.asc file to skip.
Returns
-------
grid_array : numpy array, shape (M, N)
(M, N) array of grid values, where M is... |
369,727 | def FormatAsHexString(num, width=None, prefix="0x"):
hex_str = hex(num)[2:]
hex_str = hex_str.replace("L", "")
if width:
hex_str = hex_str.rjust(width, "0")
return "%s%s" % (prefix, hex_str) | Takes an int and returns the number formatted as a hex string. |
369,728 | def marketShortInterestDF(date=None, token=, version=):
df = pd.DataFrame(marketShortInterest(date, token, version))
_toDatetime(df)
return df | The consolidated market short interest positions in all IEX-listed securities are included in the IEX Short Interest Report.
The report data will be published daily at 4:00pm ET.
https://iexcloud.io/docs/api/#listed-short-interest-list-in-dev
Args:
date (datetime); Effective Datetime
toke... |
369,729 | def import_command(dest, src, name, api=None, filter_symbol=None):
if not filter_symbol:
filter_symbol = _default_filter_symbol
cmd = src.commands[name]
for x in cmd.required_types:
if not filter_symbol(, x):
continue
import_type(dest, src, x, api, filter_symbol)
... | Import Command `name` and its dependencies from Registry `src`
to Registry `dest`
:param Registry dest: Destination Registry
:param Registry src: Source Registry
:param str name: Name of Command to import
:param str api: Prefer to import Types with api name `api`, or None to
imp... |
369,730 | def _login(login_func, *args):
response = login_func(*args)
_fail_if_contains_errors(response)
user_json = response.json()
return User(user_json) | A helper function for logging in. It's purpose is to avoid duplicate
code in the login functions. |
369,731 | def get_energy_management_properties(self):
result = self.manager.session.get(self.uri + )
em_list = result[]
if len(em_list) != 1:
uris = [em_obj[] for em_obj in em_list]
raise ParseError("Energy management data returned for no resource "
... | Return the energy management properties of the CPC.
The returned energy management properties are a subset of the
properties of the CPC resource, and are also available as normal
properties of the CPC resource. In so far, there is no new data
provided by this method. However, because on... |
369,732 | def uninstall_wic(self, wic_slot_number):
slot_number = 0
adapter = self._slots[slot_number]
if wic_slot_number > len(adapter.wics) - 1:
raise DynamipsError("WIC slot {wic_slot_number} doesnvm slot_remove_binding "{name}" {slot_number} {wic_slot... | Uninstalls a WIC adapter from this router.
:param wic_slot_number: WIC slot number |
369,733 | def compute_summary_statistic(iscs, summary_statistic=, axis=None):
if summary_statistic not in (, ):
raise ValueError("Summary statistic must be or ")
if summary_statistic == :
statistic = np.tanh(np.nanmean(np.arctanh(iscs), axis=axis))
elif summary_statistic == :
sta... | Computes summary statistics for ISCs
Computes either the 'mean' or 'median' across a set of ISCs. In the
case of the mean, ISC values are first Fisher Z transformed (arctanh),
averaged, then inverse Fisher Z transformed (tanh).
The implementation is based on the work in [SilverDunlap1987]_.
.. [S... |
369,734 | def list_product_releases(page_size=200, page_index=0, sort="", q=""):
data = list_product_releases_raw(page_size, page_index, sort, q)
if data:
return utils.format_json_list(data) | List all ProductReleases |
369,735 | def path_helper(self, path=None, operations=None, **kwargs):
RE_URL = re.compile(r"<(?:[^:<>]+:)?([^<>]+)>")
path = RE_URL.sub(r"{\1}", path)
return "/{}{}".format(self.resource_name, path) | Works like a apispec plugin
May return a path as string and mutate operations dict.
:param str path: Path to the resource
:param dict operations: A `dict` mapping HTTP methods to operation object. See
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#ope... |
369,736 | def set_xylims(self, lims, axes=None, panel=None):
if panel is None: panel = self.current_panel
self.panels[panel].set_xylims(lims, axes=axes, **kw) | overwrite data for trace t |
369,737 | def blockSignals( self, state ):
super(XLocationWidget, self).blockSignals(state)
self._locationEdit.blockSignals(state)
self._locationButton.blockSignals(state) | Blocks the signals for this widget and its sub-parts.
:param state | <bool> |
369,738 | def add_alias(self, alias, source, clean=True):
if clean:
alias = self.catalog.clean_entry_name(alias)
self.add_quantity(self._KEYS.ALIAS, alias, source)
return alias | Add an alias, optionally 'cleaning' the alias string.
Calls the parent `catalog` method `clean_entry_name` - to apply the
same name-cleaning as is applied to entry names themselves.
Returns
-------
alias : str
The stored version of the alias (cleaned or not). |
369,739 | def get_unique_together_validators(self):
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents.keys())
)
field_names = {
field.source for field in self.fields.values()
if (field.source ... | Determine a default set of validators for any unique_together contraints. |
369,740 | def add_mandates(self, representative, rep_json):
if rep_json.get():
constituency, _ = Constituency.objects.get_or_create(
name=rep_json.get(), country=self.france)
group, _ = self.touch_model(model=Group,
abbrev... | Create mandates from rep data based on variant configuration |
369,741 | def reduce_after(method):
def new_method(self, *args, **kwargs):
result = method(self, *args, **kwargs)
if result == self:
return result
return result.reduce()
return new_method | reduce() the result of this method call (unless you already reduced it). |
369,742 | def get_first_and_last(year, month):
ym_first = make_aware(
datetime.datetime(year, month, 1),
get_default_timezone()
)
ym_last = make_aware(
datetime.datetime(year, month, monthrange(year, month)[1], 23, 59, 59, 1000000-1),
get_default_ti... | Returns two datetimes: first day and last day of given year&month |
369,743 | def with_json_path(self, path, field=None):
if field is None:
field = .join([] + json_path_components(path))
kwargs = {field: JsonGetPath(, path)}
return self.defer().annotate(**kwargs) | Annotate Storage objects with a specific JSON path.
:param path: Path to get inside the stored object, which can be
either a list of path components or a comma-separated
string
:param field: Optional output field name |
369,744 | def resize(self, new_size):
assert new_size > self.size
new_data = self._allocate(new_size)
new_data[0:self.size * self.chunk_size] = self.data
self.size = new_size
self.data = new_data | Create a new larger array, and copy data over |
369,745 | def members(self, is_manager=None):
if not is_manager:
return [member for member in self._json_data[] if member[]]
else:
return [member for member in self._json_data[] if
member.get(, False) and member.get(, False)] | Retrieve members of the scope.
:param is_manager: (optional) set to True to return only Scope members that are also managers.
:type is_manager: bool
:return: List of members (usernames)
Examples
--------
>>> members = project.members()
>>> managers = project.mem... |
369,746 | def add_notification_listener(self, notification_type, notification_callback):
if notification_type not in self.notifications:
self.notifications[notification_type] = [(self.notification_id, notification_callback)]
else:
if reduce(lambda a, b: a + 1,
filter(lambda tup: tup[1] =... | Add a notification callback to the notification center.
Args:
notification_type: A string representing the notification type from .helpers.enums.NotificationTypes
notification_callback: closure of function to call when event is triggered.
Returns:
Integer notification id used to remove the n... |
369,747 | def remove_user_from_group(self, username, groupname, raise_on_error=False):
response = self._delete(self.rest_url + "/group/user/direct",params={"username": username, "groupname": groupname})
if response.status_code == 204:
return True
if raise_on_error:
rais... | Remove a user from a group
Attempts to remove a user from a group
Args
username: The username to remove from the group.
groupname: The group name to be removed from the user.
Returns:
True: Succeeded
False: If unsuccessful |
369,748 | def expand(self, m):
if m is None:
raise ValueError("Match is None!")
sep = m.string[:0]
if isinstance(sep, bytes) != self._bytes:
raise TypeError()
text = []
for x in range(0, len(self.literals)):
index = x
l = ... | Using the template, expand the string. |
369,749 | def os_walk_pre_35(top, topdown=True, onerror=None, followlinks=False):
islink, join, isdir = os.path.islink, os.path.join, os.path.isdir
try:
names = os.listdir(top)
except OSError as err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
... | Pre Python 3.5 implementation of os.walk() that doesn't use scandir. |
369,750 | def select(cls, dataset, selection_mask=None, **selection):
import iris
constraint = cls.select_to_constraint(dataset, selection)
pre_dim_coords = [c.name() for c in dataset.data.dim_coords]
indexed = cls.indexed(dataset, selection)
extracted = dataset.data.extract(const... | Apply a selection to the data. |
369,751 | def match_rows(rows1, rows2, key, sort_keys=True):
matched = OrderedDict()
for i, rows in enumerate([rows1, rows2]):
for row in rows:
val = row[key]
try:
data = matched[val]
except KeyError:
matched[val] = ([], [])
... | Yield triples of `(value, left_rows, right_rows)` where
`left_rows` and `right_rows` are lists of rows that share the same
column value for *key*. This means that both *rows1* and *rows2*
must have a column with the same name *key*.
.. warning::
Both *rows1* and *rows2* will exist in memory for... |
369,752 | def housecode_to_index(housecode):
match = re.search(r, housecode.upper())
if match:
house_index = int(match.group(2))
if 1 <= house_index <= 16:
return (ord(match.group(1)) - ord()) * 16 + house_index - 1
raise ValueError("Invalid X10 housecode: %s" % housecode) | Convert a X10 housecode to a zero-based index |
369,753 | def list_vrf(self):
try:
vrfs = VRF.list()
except NipapError, e:
return json.dumps({: 1, : e.args, : type(e).__name__})
return json.dumps(vrfs, cls=NipapJSONEncoder) | List VRFs and return JSON encoded result. |
369,754 | def connect_mysql(host, port, user, password, database):
return pymysql.connect(
host=host, port=port,
user=user, passwd=password,
db=database
) | Connect to MySQL with retries. |
369,755 | def letras(song):
translate = {
: ,
URLESCAPE: ,
:
}
artist = song.artist.lower()
artist = normalize(artist, translate)
title = song.title.lower()
title = normalize(title, translate)
url = .format(artist, title)
soup = get_url(url)
if not soup:
... | Returns the lyrics found in letras.com for the specified mp3 file or an
empty string if not found. |
369,756 | def foldl1(f: Callable[[T, T], T], xs: Iterable[T]) -> T:
return reduce(f, xs) | Returns the accumulated result of a binary function applied to elements
of an iterable.
.. math::
foldl1(f, [x_0, x_1, x_2, x_3]) = f(f(f(f(x_0, x_1), x_2), x_3)
Examples
--------
>>> from delphi.utils.fp import foldl1
>>> foldl1(lambda x, y: x + y, range(5))
10 |
369,757 | def append(args):
p = OptionParser(append.__doc__)
p.add_option("--prepend", help="Prepend string to read names")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
bamfile, = args
prepend = opts.prepend
icmd = "samtools view -h {0}".format(bamfil... | %prog append bamfile
Append /1 or /2 to read names. Useful for using the Tophat2 bam file for
training AUGUSTUS gene models. |
369,758 | def _is_numeric_data(self, data_type):
dt = DATA_TYPES[data_type]
if dt[] and dt[]:
if type(self.data) is dt[] and dt[] < self.data < dt[]:
self.type = data_type.upper()
self.len = len(str(self.data))
return True | Private method for testing text data types. |
369,759 | def missing_categories(context):
user = user_for_context(context)
categories_available = set(CategoryController.available_categories(user))
items = ItemController(user).items_pending_or_purchased()
categories_held = set()
for product, quantity in items:
categories_held.add(product.cat... | Adds the categories that the user does not currently have. |
369,760 | def load_from_rdf_file(self, rdf_file):
self.graph = rdflib.Graph()
self.graph.parse(os.path.abspath(rdf_file), format=)
self.initialize() | Initialize given an RDF input file representing the hierarchy."
Parameters
----------
rdf_file : str
Path to an RDF file. |
369,761 | def rank(self, n, mu, sigma, crit=.5, upper=10000, xtol=1):
return _make_rank(self, n, mu, sigma, crit=crit, upper=upper,
xtol=xtol) | %(super)s
Additional Parameters
----------------------
{0} |
369,762 | def save(self, session_file, verbose=False):
PARAMS={"file":session_file}
response=api(url=self.__url+"/save", PARAMS=PARAMS, verbose=verbose)
return response | Saves the current session to an existing file, which will be replaced.
If this is a new session that has not been saved yet, use 'save as'
instead.
:param session_file: The path to the file where the current session
must be saved to.
:param verbose: print more |
369,763 | def authentication(self):
if self.session.digest:
authentication = self.session.generate_digest()
elif self.session.basic:
authentication = self.session.generate_basic()
else:
return
return "Authorization: " + authentication + | Generate authentication string. |
369,764 | def make_cache_keys(self, endpoint, kwargs):
keys = sorted(kwargs.keys())
i18n_cache_key = endpoint+"|"+"|".join([kwargs[k] for k in keys])
if "lang" in keys:
cache_key = endpoint+"|" + "|".join([kwargs[k] for k in keys if k != "lang"])
else:
cache_key = ... | This function is built to provide cache keys for templates
:param endpoint: Current endpoint
:param kwargs: Keyword Arguments
:return: tuple of i18n dependant cache key and i18n ignoring cache key
:rtype: tuple(str) |
369,765 | def get_future_days(self):
today = timezone.now().date()
return Day.objects.filter(date__gte=today) | Return only future Day objects. |
369,766 | def create_training_instances(x):
(input_files, out, tokenizer, max_seq_length, dupe_factor,
short_seq_prob, masked_lm_prob, max_predictions_per_seq, rng) = x
time_start = time.time()
logging.info(, input_files)
all_documents = [[]]
| Create `TrainingInstance`s from raw text. |
369,767 | def select_qadapter(self, pconfs):
policy, max_ncpus = self.policy, self.max_cores
pconfs = pconfs.get_ordered_with_policy(policy, max_ncpus)
if policy.precedence == "qadapter":
for qadpos, qad in enumerate(self.qads):
possible_pconfs ... | Given a list of parallel configurations, pconfs, this method select an `optimal` configuration
according to some criterion as well as the :class:`QueueAdapter` to use.
Args:
pconfs: :class:`ParalHints` object with the list of parallel configurations
Returns:
:class:`Par... |
369,768 | def upload(self, project_id, processor_name, **fields):
p = self.processors(processor_name=processor_name)
if len(p) == 1:
p = p[0]
else:
Exception(.format(processor_name))
for field_name, field_val in fields.items():
if field_name not in p[... | Upload files and data objects.
:param project_id: ObjectId of Genesis project
:type project_id: string
:param processor_name: Processor object name
:type processor_name: string
:param fields: Processor field-value pairs
:type fields: args
:rtype: HTTP Response ob... |
369,769 | def _executable_memory_regions(self, objects=None, force_segment=False):
if objects is None:
binaries = self.project.loader.all_objects
else:
binaries = objects
memory_regions = [ ]
for b in binaries:
if isinstance(b, ELF):
... | Get all executable memory regions from the binaries
:param objects: A collection of binary objects to collect regions from. If None, regions from all project
binary objects are used.
:param bool force_segment: Rely on binary segments instead of sections.
:return: A sorte... |
369,770 | def _parse_persons(self, datafield, subfield, roles=["aut"]):
parsed_persons = []
raw_persons = self.get_subfields(datafield, subfield)
for person in raw_persons:
other_subfields = person.other_subfields
if "4" in other_subfield... | Parse persons from given datafield.
Args:
datafield (str): code of datafield ("010", "730", etc..)
subfield (char): code of subfield ("a", "z", "4", etc..)
role (list of str): set to ["any"] for any role, ["aut"] for
authors, etc.. For details see
... |
369,771 | def getRaw(self, context, aslist=False, **kwargs):
value = StringField.get(self, context, **kwargs)
if not value:
return [] if self.multiValued else None
if self.multiValued:
ret = value
else:
ret = self.get_uid(context, value)
if ... | Grab the stored value, and return it directly as UIDs.
:param context: context is the object who's schema contains this field.
:type context: BaseContent
:param aslist: Forces a single-valued field to return a list type.
:type aslist: bool
:param kwargs: kwargs are passed direct... |
369,772 | def deserialize(self, to_deserialize: PrimitiveJsonType) \
-> Optional[Union[SerializableType, List[SerializableType]]]:
if to_deserialize is None:
return None
elif isinstance(to_deserialize, List):
deserialized = []
for item in to_de... | Deserializes the given representation of the serialized object.
:param to_deserialize: the serialized object as a dictionary
:return: the deserialized object or collection of deserialized objects |
369,773 | def unregister_directory(self, directory_node, raise_exception=False):
if raise_exception:
if not directory_node in self.list_directory_nodes():
raise foundations.exceptions.ProgrammingError(
"{0} | directory isn{0}DirectoryNode'.".format(directory_nod... | Unregisters given :class:`umbra.components.factory.script_editor.nodes.DirectoryNode` class Node from the Model.
:param directory_node: DirectoryNode to unregister.
:type directory_node: DirectoryNode
:param raise_exception: Raise the exception.
:type raise_exception: bool
:retu... |
369,774 | def import_(module_name, name):
module = importlib.import_module(module_name, __package__)
return getattr(module, name) | Imports an object by a relative module path::
Profiler = import_('profiling.profiler', 'Profiler') |
369,775 | def score(self, X, eval_metric=, num_batch=None, batch_end_callback=None, reset=True):
if not isinstance(eval_metric, metric.EvalMetric):
eval_metric = metric.create(eval_metric)
X = self._init_iter(X, None, is_train=False)
if reset:
X.reset()
... | Run the model given an input and calculate the score
as assessed by an evaluation metric.
Parameters
----------
X : mxnet.DataIter
eval_metric : metric.metric
The metric for calculating score.
num_batch : int or None
The number of batches to run. ... |
369,776 | def lyric(id):
if id is None:
raise ParamsError()
r = NCloudBot()
r.method =
r.params = {: id}
r.send()
return r.response | 通过歌曲 ID 获取歌曲歌词地址
:param id: 歌曲ID |
369,777 | def get_sample_value(self, name, labels=None):
if labels is None:
labels = {}
for metric in self.collect():
for s in metric.samples:
if s.name == name and s.labels == labels:
return s.value
return None | Returns the sample value, or None if not found.
This is inefficient, and intended only for use in unittests. |
369,778 | def post_predictions(self, document_id: str, model_name: str) -> dict:
body = json.dumps({: document_id, : model_name}).encode()
uri, headers = self._create_signing_headers(, , body)
post_predictions_response = requests.post(
url=uri.geturl(),
headers=headers,
... | Run inference and create a prediction, calls the POST /predictions endpoint.
>>> from las import Client
>>> client = Client(endpoint='<api endpoint>')
>>> client.post_predictions(document_id='<document id>', model_name='invoice')
:param document_id: The document id to run inference and... |
369,779 | def get_consumed_read_units_percent(
table_name, lookback_window_start=15, lookback_period=5):
try:
metrics = __get_aws_metric(
table_name,
lookback_window_start,
lookback_period,
)
except BotoServerError:
raise
if metrics:
... | Returns the number of consumed read units in percent
:type table_name: str
:param table_name: Name of the DynamoDB table
:type lookback_window_start: int
:param lookback_window_start: Relative start time for the CloudWatch metric
:type lookback_period: int
:param lookback_period: Number of minu... |
369,780 | def document_agents(p):
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, )
p.comment(, ) | Document agents in AIKIF (purpose and intent) |
369,781 | def insert_cylinder(im, xyz0, xyz1, r):
r
if im.ndim != 3:
raise Exception()
xyz0, xyz1 = [sp.array(xyz).astype(int) for xyz in (xyz0, xyz1)]
r = int(r)
L = sp.absolute(xyz0 - xyz1).max() + 1
xyz_line = [sp.linspace(xyz0[i], xyz1[i], L).astype(int) for i in range(3)]
xyz_min = ... | r"""
Inserts a cylinder of given radius onto a given image
Parameters
----------
im : array_like
Original voxelated image
xyz0, xyz1 : 3-by-1 array_like
Voxel coordinates of the two end points of the cylinder
r : int
Radius of the cylinder
Returns
-------
im... |
369,782 | def update_pos(self, pos_id, name, pos_type, location=None):
arguments = {: name,
: pos_type,
: location}
return self.do_req(,
self.merchant_api_base_url + +
pos_id + , arguments) | Update POS resource. Returns the raw response object.
Arguments:
pos_id:
POS id as chosen on registration
name:
Human-readable name of the POS, used for displaying payment
request origin to end user
pos_type:
PO... |
369,783 | def _subtract_timedelta(self, delta):
if isinstance(delta, pendulum.Duration):
return self.subtract(
years=delta.years,
months=delta.months,
weeks=delta.weeks,
days=delta.remaining_days,
hours=delta.hours,
... | Remove timedelta duration from the instance.
:param delta: The timedelta instance
:type delta: pendulum.Duration or datetime.timedelta
:rtype: DateTime |
369,784 | def LMLgrad(self,params=None):
if params is not None:
self.setParams(params)
KV = self._update_cache()
W = KV[]
LMLgrad = SP.zeros(self.covar.n_params)
for i in range(self.covar.n_params):
Kd = self.covar.Kgrad_param(i)
LMLgrad[i] = 0.... | evaluates the gradient of the log marginal likelihood for the given hyperparameters |
369,785 | def grants(self):
from linode_api4.objects.account import UserGrants
resp = self._client.get()
grants = None
if resp is not None:
grants = UserGrants(self._client, self.username, resp)
return grants | Returns grants for the current user |
369,786 | def get_function_doc(function, config=default_config):
if config.exclude_function:
for ex in config.exclude_function:
if ex.match(function.__name__):
return None
return _doc_object(function, , config=config) | Return doc for a function. |
369,787 | def has_valid_dispatch_view_docs(endpoint):
klass = endpoint.__dict__.get(, None)
return klass and hasattr(klass, ) \
and hasattr(endpoint, ) \
and getattr(klass, ).__doc__ | Return True if dispatch_request is swaggable |
369,788 | def getRow(leftU, rightV, jVec):
jLeft = None
jRight = None
if len(leftU) > 0:
jLeft = jVec[:len(leftU)]
if len(rightV) > 0:
jRight = jVec[-len(rightV):]
multU = np.ones([1,1])
for k in xrange(len(leftU)):
multU = np.dot(multU, leftU[k][:, jLeft[k], :])
mult... | Compute X_{\geq \mu}^T \otimes X_{leq \mu}
X_{\geq \mu} = V_{\mu+1}(j_{\mu}) \ldots V_{d} (j_{d}) [left interface matrix]
X_{\leq \mu} = U_{1} (j_{1}) \ldots U_{\mu-1}(j_{\mu-1}) [right interface matrix]
Parameters:
:list of numpy.arrays: leftU
left-orthogonal cores from 1 to \mu-1
... |
369,789 | def export(self, input_stats=None):
input_stats = input_stats or {}
for e in self._exports:
logger.debug("Export stats using the %s module" % e)
thread = threading.Thread(target=self._exports[e].update,
args=(input_stats,))... | Export all the stats.
Each export module is ran in a dedicated thread. |
369,790 | def t_COMMA(self, t):
r
t.endlexpos = t.lexpos + len(t.value)
return t | r', |
369,791 | def client(self):
if self._client is None:
self._client = get_session(self.user_agent)
return self._client | Returns client session object |
369,792 | def from_xyz_string(xyz_string):
rot_matrix = np.zeros((3, 3))
trans = np.zeros(3)
toks = xyz_string.strip().replace(" ", "").lower().split(",")
re_rot = re.compile(r"([+-]?)([\d\.]*)/?([\d\.]*)([x-z])")
re_trans = re.compile(r"([+-]?)([\d\.]+)/?([\d\.]*)(?![x-z])")
... | Args:
xyz_string: string of the form 'x, y, z', '-x, -y, z',
'-2y+1/2, 3x+1/2, z-y+1/2', etc.
Returns:
SymmOp |
369,793 | def _validate_namespace(self, namespace):
if namespace not in self.namespaces:
raise CIMError(
CIM_ERR_INVALID_NAMESPACE,
_format("Namespace does not exist in mock repository: {0!A}",
namespace)) | Validate whether a CIM namespace exists in the mock repository.
Parameters:
namespace (:term:`string`):
The name of the CIM namespace in the mock repository. Must not be
`None`.
Raises:
:exc:`~pywbem.CIMError`: CIM_ERR_INVALID_NAMESPACE: Namespace does
... |
369,794 | def main():
(cred, providers) = config_read()
cmd_mode = True
conn_objs = cld.get_conns(cred, providers)
while cmd_mode:
nodes = cld.get_data(conn_objs, providers)
node_dict = make_node_dict(nodes, "name")
idx_tbl = table.indx_table(node_dict, True)
cmd_mode = ui.ui_... | Command-Mode: Retrieve and display data then process commands. |
369,795 | def result(self, value):
if self._process_result:
self._result = self._process_result(value)
self._raw_result = value | The result of the command. |
369,796 | def get_parameter(self, parameter):
"Return a dict for given parameter"
parameter = self._get_parameter_name(parameter)
return self._parameters[parameter] | Return a dict for given parameter |
369,797 | def __expect(self, exp=, timeout=None):
timeout_before = self._port.timeout
timeout = timeout or self._timeout
if SYSTEM != :
if self._port.timeout != MINIMAL_TIMEOUT:
self._port.timeout = MINIMAL_TIMEOUT
end = time.time() + tim... | will wait for exp to be returned from nodemcu or timeout |
369,798 | def set_fig_y_label(self, ylabel, **kwargs):
prop_default = {
: 0.45,
: 0.02,
: 20,
: ,
: ,
}
for prop, default in prop_default.items():
kwargs[prop] = kwargs.get(prop, default)
self._set_fig_label(, ylabe... | Set overall figure y.
Set label for y axis on overall figure. This is not for a specific plot.
It will place the label on the figure at the left with a call to ``fig.text``.
Args:
ylabel (str): ylabel for entire figure.
Keyword Arguments:
x/y (float, optional):... |
369,799 | def remove(self, document_id, namespace, timestamp):
database, coll = self._db_and_collection(namespace)
meta_collection = self._get_meta_collection(namespace)
doc2 = self.meta_database[meta_collection].find_one_and_delete(
{self.id_field: document_id}
)
if... | Removes document from Mongo
The input is a python dictionary that represents a mongo document.
The documents has ns and _ts fields. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.