Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
1,900 | def validate_request():
flask_request = request
request_data = flask_request.get_data()
if not request_data:
request_data = b
request_data = request_data.decode()
try:
json.loads(request_data)
except ValueError as json_error:
... | Validates the incoming request
The following are invalid
1. The Request data is not json serializable
2. Query Parameters are sent to the endpoint
3. The Request Content-Type is not application/json
4. 'X-Amz-Log-Type' header is not 'None'
5. 'X-Amz-I... |
1,901 | def _get_rate(self, mag):
mag_lo = mag - self.bin_width / 2.0
mag_hi = mag + self.bin_width / 2.0
if mag >= self.min_mag and mag < self.char_mag - DELTA_CHAR / 2:
return (10 ** (self.a_val - self.b_val * mag_lo)
- 10 ** (self.a_val - self.b_... | Calculate and return the annual occurrence rate for a specific bin.
:param mag:
Magnitude value corresponding to the center of the bin of interest.
:returns:
Float number, the annual occurrence rate for the :param mag value. |
1,902 | def delete_refund_transaction_by_id(cls, refund_transaction_id, **kwargs):
kwargs[] = True
if kwargs.get():
return cls._delete_refund_transaction_by_id_with_http_info(refund_transaction_id, **kwargs)
else:
(data) = cls._delete_refund_transaction_by_id_with_http_i... | Delete RefundTransaction
Delete an instance of RefundTransaction by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_refund_transaction_by_id(refund_transaction_id, async=True)
>... |
1,903 | def cores_orthogonalization_step(coresX, dim, left_to_right=True):
cc = coresX[dim]
r1, n, r2 = cc.shape
if left_to_right:
assert(0 <= dim < len(coresX) - 1)
cc, rr = np.linalg.qr(reshape(cc, (-1, r2)))
r2 = cc.shape[1]
coresX[dim] = reshape(cc, (r1, n, r2))
... | TT-Tensor X orthogonalization step.
The function can change the shape of some cores. |
1,904 | def add_object_to_scope(self, obj):
if isinstance(obj, Computer):
self.add_object_to_path(obj, "scope/computers")
elif isinstance(obj, ComputerGroup):
self.add_object_to_path(obj, "scope/computer_groups")
elif isinstance(obj, Building):
self.add_objec... | Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided. |
1,905 | def remove_breakpoint(self, py_db, filename, breakpoint_type, breakpoint_id):
python-linedjango-linejinja2-line
file_to_id_to_breakpoint = None
if breakpoint_type == :
breakpoints = py_db.breakpoints
file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint
... | :param str filename:
Note: must be already translated for the server.
:param str breakpoint_type:
One of: 'python-line', 'django-line', 'jinja2-line'.
:param int breakpoint_id: |
1,906 | def move_distance(self, distance_x_m, distance_y_m, distance_z_m,
velocity=VELOCITY):
distance = math.sqrt(distance_x_m * distance_x_m +
distance_y_m * distance_y_m +
distance_z_m * distance_z_m)
flight_time = dista... | Move in a straight line.
positive X is forward
positive Y is left
positive Z is up
:param distance_x_m: The distance to travel along the X-axis (meters)
:param distance_y_m: The distance to travel along the Y-axis (meters)
:param distance_z_m: The distance to travel alon... |
1,907 | def load_env(print_vars=False):
env_file = os.environ.get(, )
try:
variables = open(env_file).read().splitlines()
for v in variables:
if in v:
key, value = v.split(, 1)
if key.startswith():
continue
if key not ... | Load environment variables from a .env file, if present.
If an .env file is found in the working directory, and the listed
environment variables are not already set, they will be set according to
the values listed in the file. |
1,908 | def put(self, resource, **params):
return self._execute(self.session.put, , resource, **params) | Generic TeleSign REST API PUT handler.
:param resource: The partial resource URI to perform the request against, as a string.
:param params: Body params to perform the PUT request with, as a dictionary.
:return: The RestClient Response object. |
1,909 | def update_ontology(ont_url, rdf_path):
yaml_root = load_yaml_from_url(ont_url)
G = rdf_graph_from_yaml(yaml_root)
save_hierarchy(G, rdf_path) | Load an ontology formatted like Eidos' from github. |
1,910 | def from_localhost(self) -> bool:
sock_family = self.socket.family
if sock_family == _socket.AF_UNIX:
return True
elif sock_family not in (_socket.AF_INET, _socket.AF_INET6):
return False
sock_address, *_ = self.peername
ip = ipaddress.ip_address(... | True if :attr:`.peername` is a connection from a ``localhost``
address. |
1,911 | def create_explicit(bounds):
safe_bounds = sorted(float(x) for x in bounds)
if len(safe_bounds) != len(set(safe_bounds)):
raise ValueError(u)
return sc_messages.Distribution(
bucketCounts=[0] * (len(safe_bounds) + 1),
explicitBuckets=sc_messages.ExplicitBuckets(bounds=safe_bound... | Creates a new instance of distribution with explicit buckets.
bounds is an iterable of ordered floats that define the explicit buckets
Args:
bounds (iterable[float]): initializes the bounds
Return:
:class:`endpoints_management.gen.servicecontrol_v1_messages.Distribution`
Raises:
... |
1,912 | def fit1d(samples, e, remove_zeros = False, **kw):
samples = samples[~np.isnan(samples)]
length = len(e)-1
hist,_ = np.histogramdd(samples, (e,))
hist = hist/sum(hist)
basis, knots = spline_base1d(length, marginal = hist, **kw)
non_zero = hist>0
model = linear_model.BayesianRidge()
... | Fits a 1D distribution with splines.
Input:
samples: Array
Array of samples from a probability distribution
e: Array
Edges that define the events in the probability
distribution. For example, e[0] < x <= e[1] is
the range of values that are associate... |
1,913 | def association_pivot(self, association_resource):
resource = self.copy()
resource._request_uri = .format(
association_resource.request_uri, resource._request_uri
)
return resource | Pivot point on association for this resource.
This method will return all *resources* (group, indicators, task, victims, etc) for this
resource that are associated with the provided resource.
**Example Endpoints URI's**
+---------+------------------------------------------------------... |
1,914 | def needs_manager_helps():
message = m.Message()
message.add(m.Brand())
message.add(heading())
message.add(content())
return message | Help message for Batch Dialog.
.. versionadded:: 3.2.1
:returns: A message object containing helpful information.
:rtype: messaging.message.Message |
1,915 | def key_absent(name, region=None, key=None, keyid=None, profile=None):
ret = {: name,
: True,
: ,
: {}
}
exists = __salt__[](name, region, key, keyid, profile)
if exists:
if __opts__[]:
ret[] = .format(name)
ret[] = None
... | Deletes a key pair |
1,916 | def titlecase(text, callback=None, small_first_last=True):
lines = re.split(, text)
processed = []
for line in lines:
all_caps = line.upper() == line
words = re.split(, line)
tc_line = []
for word in words:
if callback:
new_word = callback(wo... | Titlecases input text
This filter changes all words to Title Caps, and attempts to be clever
about *un*capitalizing SMALL words like a/an/the in the input.
The list of "SMALL words" which are not capped comes from
the New York Times Manual of Style, plus 'vs' and 'v'. |
1,917 | def p_namelist(self,t):
"namelist : namelist NAME \n | NAME"
if len(t)==2: t[0] = [t[1]]
elif len(t)==4: t[0] = t[1] + [t[3]]
else: raise NotImplementedError(,len(t)) | namelist : namelist ',' NAME \n | NAME |
1,918 | def upgrade(refresh=True, **kwargs):
<package>old<old-version>new<new-version>*
ret = {: {},
: True,
: ,
}
old = list_pkgs()
if salt.utils.data.is_true(refresh):
refresh_db()
result = _call_brew(, failhard=False)
__context__.pop(, None)
new = list_... | Upgrade outdated, unpinned brews.
refresh
Fetch the newest version of Homebrew and all formulae from GitHub before installing.
Returns a dictionary containing the changes:
.. code-block:: python
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
... |
1,919 | def create_tomodir(self, directory):
pwd = os.getcwd()
if not os.path.isdir(directory):
os.makedirs(directory)
os.chdir(directory)
directories = (
,
,
,
,
,
,
,
)
for... | Create a tomodir subdirectory structure in the given directory |
1,920 | def _GetMountpoints(only_physical=True):
partitions = psutil.disk_partitions(all=not only_physical)
return set(partition.mountpoint for partition in partitions) | Fetches a list of mountpoints.
Args:
only_physical: Determines whether only mountpoints for physical devices
(e.g. hard disks) should be listed. If false, mountpoints for things such
as memory partitions or `/dev/shm` will be returned as well.
Returns:
A set of mountpoints. |
1,921 | def getaddress(self):
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
if plist:
... | Parse the next address. |
1,922 | def count_lines_in_file(self, fname=):
i = 0
if fname == :
fname = self.fullname
try:
with codecs.open(fname, "r",encoding=, errors=) as f:
for i, _ in enumerate(f):
pass
return i + 1
ex... | you wont believe what this method does |
1,923 | def write_default_config(self, filename):
try:
with open(filename, ) as file:
file.write(DEFAULT_CONFIG)
return True
except (IOError, OSError) as e:
print( % (filename, e.strerror or e), file=sys.stderr)
return False | Write the default config file. |
1,924 | def report(self, score_map, type="valid", epoch=-1, new_best=False):
type_str = type
if len(type_str) < 5:
type_str += " " * (5 - len(type_str))
info = " ".join("%s=%.2f" % el for el in score_map.items())
current_epoch = epoch if epoch > 0 else self.current_epoch()
... | Report the scores and record them in the log. |
1,925 | def resize(self, width, height, **kwargs):
opts = Image._normalize_options(kwargs)
size = self._get_size(width, height)
if opts["mode"] == "adapt":
self._adapt(size, opts)
elif opts["mode"] == "clip":
self._clip(size, opts)
elif opts["mode"] == "f... | Resizes the image to the supplied width/height. Returns the
instance. Supports the following optional keyword arguments:
mode - The resizing mode to use, see Image.MODES
filter - The filter to use: see Image.FILTERS
background - The hexadecimal background fill color, RGB or ARGB
... |
1,926 | def MobileDeviceApplication(self, data=None, subset=None):
return self.factory.get_object(jssobjects.MobileDeviceApplication,
data, subset) | {dynamic_docstring} |
1,927 | def get_string_resources(self, package_name, locale=):
self._analyse()
buff =
buff +=
try:
for i in self.values[package_name][locale]["string"]:
if any(map(i[1].__contains__, )):
value = % i[1]
else:
... | Get the XML (as string) of all resources of type 'string'.
Read more about string resources:
https://developer.android.com/guide/topics/resources/string-resource.html
:param package_name: the package name to get the resources for
:param locale: the locale to get the resources for (defa... |
1,928 | def _get_data_by_field(self, field_number):
if not self.is_data_loaded:
self._import_data()
if not 0 <= field_number < self._num_of_fields:
raise ValueError("Field number should be between 0-%d" % self._num_of_fields)
return self._data[field_number] | Return a data field by field number.
This is a useful method to get the values for fields that Ladybug
currently doesn't import by default. You can find list of fields by typing
EPWFields.fields
Args:
field_number: a value between 0 to 34 for different available epw fields.... |
1,929 | def check_messages(*messages: str) -> Callable:
def store_messages(func):
func.checks_msgs = messages
return func
return store_messages | decorator to store messages that are handled by a checker method |
1,930 | def create(self):
steps = [
(self.create_role, (), {}),
(self.create_vpc, (), {}),
(self.create_cluster, (), {}),
(self.create_node_group, (), {}),
(self.create_spot_nodes, (), {}),
(self.create_utilities, (), {}),
]
... | Deploy a cluster on Amazon's EKS Service configured
for Jupyterhub Deployments. |
1,931 | def _submit_metrics(self, metrics, metric_name_and_type_by_property):
for metric in metrics:
if (
metric.name not in metric_name_and_type_by_property
and metric.name.lower() not in metric_name_and_type_by_property
):
... | Resolve metric names and types and submit it. |
1,932 | def netconf_config_change_changed_by_server_or_user_server_server(self, **kwargs):
config = ET.Element("config")
netconf_config_change = ET.SubElement(config, "netconf-config-change", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-notifications")
changed_by = ET.SubElement(netconf_conf... | Auto Generated Code |
1,933 | def hr_dp996(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
.format(value))
self._hr_dp996 = value | Corresponds to IDD Field `hr_dp996`
humidity ratio, calculated at standard atmospheric pressure
at elevation of station, corresponding to
Dew-point temperature corresponding to 99.6% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): val... |
1,934 | def items(self):
if ver == (2, 7):
return self.viewitems()
elif ver == (2, 6):
return self.iteritems()
elif ver >= (3, 0):
return self.items() | On Python 2.7+:
D.items() -> a set-like object providing a view on D's items
On Python 2.6:
D.items() -> an iterator over D's items |
1,935 | def _read_image_slice(self, arg):
if not in self._info:
raise ValueError("Attempt to slice empty extension")
if isinstance(arg, slice):
return self._read_image_slice((arg,))
if not isinstance(arg, tuple):
raise ValueError("arguments mu... | workhorse to read a slice |
1,936 | def _push_condition(predicate):
global _depth
_check_under_condition()
_depth += 1
if predicate is not otherwise and len(predicate) > 1:
raise PyrtlError()
_conditions_list_stack[-1].append(predicate)
_conditions_list_stack.append([]) | As we enter new conditions, this pushes them on the predicate stack. |
1,937 | def validate(self, ticket=None):
qs = self.filter(validation__isnull=True).check_groupable()
if qs.count() == 0:
return []
qs.order_by(, )._assign_numbers()
return qs._validate(ticket) | Validates all receipts matching this queryset.
Note that, due to how AFIP implements its numbering, this method is not
thread-safe, or even multiprocess-safe.
Because of this, it is possible that not all instances matching this
queryset are validated properly. Obviously, only successfu... |
1,938 | def ssh_check_mic(self, mic_token, session_id, username=None):
self._session_id = session_id
self._username = username
if username is not None:
mic_field = self._ssh_build_mic(
self._session_id,
self._username,
sel... | Verify the MIC token for a SSH2 message.
:param str mic_token: The MIC token received from the client
:param str session_id: The SSH session ID
:param str username: The name of the user who attempts to login
:return: None if the MIC check was successful
:raises: ``sspi.error`` -... |
1,939 | def check_roles(self, account, aws_policies, aws_roles):
self.log.debug(.format(account.account_name))
max_session_duration = self.dbconfig.get(, self.ns, 8) * 60 * 60
sess = get_aws_session(account)
iam = sess.client()
account_roles = copy.deepcopy(self.cfg_ro... | Iterate through the roles of a specific account and create or update the roles if they're missing or
does not match the roles from Git.
Args:
account (:obj:`Account`): The account to check roles on
aws_policies (:obj:`dict` of `str`: `dict`): A dictionary containing all the poli... |
1,940 | def updateCurrentValue(self, value):
xsnap = None
ysnap = None
if value != self.endValue():
xsnap = self.targetObject().isXSnappedToGrid()
ysnap = self.targetObject().isYSnappedToGrid()
self.targetObject().setXSnapToGrid(False)
... | Disables snapping during the current value update to ensure a smooth
transition for node animations. Since this can only be called via
code, we don't need to worry about snapping to the grid for a user. |
1,941 | def min_rank(series, ascending=True):
ranks = series.rank(method=, ascending=ascending)
return ranks | Equivalent to `series.rank(method='min', ascending=ascending)`.
Args:
series: column to rank.
Kwargs:
ascending (bool): whether to rank in ascending order (default is `True`). |
1,942 | def parse_sidebar(self, user_page):
user_info = {}
except:
if not self.session.suppress_parse_exceptions:
raise
return user_info | Parses the DOM and returns user attributes in the sidebar.
:type user_page: :class:`bs4.BeautifulSoup`
:param user_page: MAL user page's DOM
:rtype: dict
:return: User attributes
:raises: :class:`.InvalidUserError`, :class:`.MalformedUserPageError` |
1,943 | def process_request(self, request, client_address):
self.finish_request(request, client_address)
self.shutdown_request(request) | Call finish_request. |
1,944 | def get_start_and_end_time(self, ref=None):
now = time.localtime(ref)
if self.syear == 0:
self.syear = now.tm_year
day_start = find_day_by_weekday_offset(self.syear, self.smon, self.swday, self.swday_offset)
start_time = get_start_of_day(self.syear, self.smon, day_s... | Specific function to get start time and end time for MonthWeekDayDaterange
:param ref: time in seconds
:type ref: int | None
:return: tuple with start and end time
:rtype: tuple |
1,945 | def cmd(send, msg, args):
args[].query(Permissions).update({"registered": False})
args[].get_admins()
send("Verified admins reset.") | Clears the verified admin list
Syntax: {command} |
1,946 | def get_context(self, url, expiration):
self._feed = self.get(url, expiration)
return {
self.feed_context_name: self.format_feed_content(self._feed),
} | Build template context with formatted feed content |
1,947 | def gdaldem_mem_ma(ma, ds=None, res=None, extent=None, srs=None, processing=, returnma=False, computeEdges=False):
if ds is None:
ds = mem_ds(res, extent, srs=None, dtype=gdal.GDT_Float32)
else:
ds = mem_ds_copy(ds)
b = ds.GetRasterBand(1)
b.WriteArray(ma)
out = gdaldem_mem_ds(d... | Wrapper to allow gdaldem calculations for arbitrary NumPy masked array input
Untested, work in progress placeholder
Should only need to specify res, can caluclate local gt, cartesian srs |
1,948 | def check_file(self, filename):
can_read = super(SecuredConfig, self).check_file(filename)
if not can_read:
return False
mode = get_stat(filename).st_mode
if (mode & stat.S_IRGRP) or (mode & stat.S_IROTH):
msg = "File %r is not secure enough. Ch... | Overrides :py:meth:`.Config.check_file` |
1,949 | def get_subnets_count(context, filters=None):
LOG.info("get_subnets_count for tenant %s with filters %s" %
(context.tenant_id, filters))
return db_api.subnet_count_all(context, **filters) | Return the number of subnets.
The result depends on the identity of the user making the request
(as indicated by the context) as well as any filters.
: param context: neutron api request context
: param filters: a dictionary with keys that are valid keys for
a network as listed in the RESOURCE_... |
1,950 | def fix_repeat_dt(dt_list, offset_s=0.001):
idx = (np.diff(dt_list) == timedelta(0))
while np.any(idx):
dt_list[idx.nonzero()[0] + 1] += timedelta(seconds=offset_s)
idx = (np.diff(dt_list) == timedelta(0))
return dt_list | Add some small offset to remove duplicate times
Needed for xarray interp, which expects monotonically increasing times |
1,951 | def run_timeit(self, stmt, setup):
_timer = timeit.Timer(stmt=stmt, setup=setup)
trials = _timer.repeat(self.timeit_repeat, self.timeit_number)
self.time_average_seconds = sum(trials) / len(trials) / self.timeit_number
time_avg = convert_time_units(self.time_average_sec... | Create the function call statement as a string used for timeit. |
1,952 | def password_get(username=None):
password = keyring.get_password(, username)
if password is None:
split_username = tuple(username.split())
msg = ("Couldnascii') | Retrieves a password from the keychain based on the environment and
configuration parameter pair.
If this fails, None is returned. |
1,953 | def status_line(self):
date = self.date_published
status = self.state.title()
if self.state == self.DRAFT:
status = "Draft saved"
date = self.last_save
if date and self.last_save == self.last_scheduled:
... | Returns a status line for an item.
Only really interesting when called for a draft
item as it can tell you if the draft is the same as
another version. |
1,954 | def autoparal_run(self):
policy = self.manager.policy
if policy.autoparal == 0:
logger.info("Nothing to do in autoparal, returning (None, None)")
return 0
if policy.autoparal != 1:
raise NotImplementedError("autoparal != 1")
... | Find an optimal set of parameters for the execution of the task
This method can change the ABINIT input variables and/or the
submission parameters e.g. the number of CPUs for MPI and OpenMp.
Set:
self.pconfs where pconfs is a :class:`ParalHints` object with the configuration reported... |
1,955 | def from_pubkey(cls: Type[CRCPubkeyType], pubkey: str) -> CRCPubkeyType:
hash_root = hashlib.sha256()
hash_root.update(base58.b58decode(pubkey))
hash_squared = hashlib.sha256()
hash_squared.update(hash_root.digest())
b58_checksum = ensure_str(base58.b58encode(hash_square... | Return CRCPubkey instance from public key string
:param pubkey: Public key
:return: |
1,956 | def get_output(src):
output =
lines = open(src.path, ).readlines()
for line in lines:
m = re.match(config.import_regex,line)
if m:
include_path = os.path.abspath(src.dir + + m.group());
if include_path not in config.sources:
script = Script(incl... | parse lines looking for commands |
1,957 | def do_transition_for(brain_or_object, transition):
if not isinstance(transition, basestring):
fail("Transition type needs to be string, got " % type(transition))
obj = get_object(brain_or_object)
ploneapi.content.transition(obj, transition)
return obj | Performs a workflow transition for the passed in object.
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: The object where the transtion was performed |
1,958 | def as_string(value):
if six.PY2:
buffer_types = buffer, memoryview
else:
buffer_types = memoryview
if value is None:
return u
elif isinstance(value, buffer_types):
return bytes(value).decode(, )
elif isinstance(value, bytes):
return value.decode(, )
... | Convert a value to a Unicode object for matching with a query.
None becomes the empty string. Bytestrings are silently decoded. |
1,959 | def expand_cause_repertoire(self, new_purview=None):
return self.subsystem.expand_cause_repertoire(
self.cause.repertoire, new_purview) | See |Subsystem.expand_repertoire()|. |
1,960 | def visibleCount(self):
return sum(int(not self.item(i).isHidden()) for i in range(self.count())) | Returns the number of visible items in this list.
:return <int> |
1,961 | def fit_predict(self, y, exogenous=None, n_periods=10, **fit_args):
self.fit(y, exogenous, **fit_args)
return self.predict(n_periods=n_periods, exogenous=exogenous) | Fit an ARIMA to a vector, ``y``, of observations with an
optional matrix of ``exogenous`` variables, and then generate
predictions.
Parameters
----------
y : array-like or iterable, shape=(n_samples,)
The time-series to which to fit the ``ARIMA`` estimator. This may
... |
1,962 | def REV(self, params):
Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params)
self.check_arguments(low_registers=(Ra, Rb))
def REV_func():
self.register[Ra] = ((self.register[Rb] & 0xFF000000) >> 24) | \
((self.register... | REV Ra, Rb
Reverse the byte order in register Rb and store the result in Ra |
1,963 | def get_schema_input_format(self, **kwargs):
config = ET.Element("config")
get_schema = ET.Element("get_schema")
config = get_schema
input = ET.SubElement(get_schema, "input")
format = ET.SubElement(input, "format")
format.text = kwargs.pop()
callback = ... | Auto Generated Code |
1,964 | def execute(self, points, *args, **kwargs):
if isinstance(self.model, OrdinaryKriging) or \
isinstance(self.model, OrdinaryKriging3D):
prediction, variance = \
self.model.execute(,
n_closest_points=self.n_closest_po... | Parameters
----------
points: dict
Returns:
-------
Prediction array
Variance array |
1,965 | def call_ck(i):
import subprocess
import re
action=i.get(,)
if action==:
return {:1, :}
if not re.match(, action):
return {:1, :}
fd, fn=tempfile.mkstemp(suffix=, prefix=)
os.close(fd)
dc=i.get(,)
if dc==: i[]=
rr={:0}
rr[]=
... | Input: {
Input for CK
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
(stdout) - stdout, if available
(stderr) - std... |
1,966 | def instanceStarted(self, *args, **kwargs):
return self._makeApiCall(self.funcinfo["instanceStarted"], *args, **kwargs) | Report an instance starting
An instance will report in by giving its instance id as well
as its security token. The token is given and checked to ensure
that it matches a real token that exists to ensure that random
machines do not check in. We could generate a different token
... |
1,967 | def instancelist(obj_list, check=False, shared_attrs=None):
class InstanceList_(object):
def __init__(self, obj_list, shared_attrs=None):
self._obj_list = []
self._shared_public_attrs = []
self._example_type = None
if len(obj_list) > 0:
i... | Executes methods and attribute calls on a list of objects of the same type
Bundles a list of object of the same type into a single object.
The new object contains the same functions as each original object
but applies them to each element of the list independantly when called.
CommandLine:
pyt... |
1,968 | def _head(self, uri):
resp, resp_body = self.api.method_head(uri)
return resp | Handles the communication with the API when performing a HEAD request
on a specific resource managed by this class. Returns the headers
contained in the response. |
1,969 | def _get_and_assert_slice_param(url_dict, param_name, default_int):
param_str = url_dict[].get(param_name, default_int)
try:
n = int(param_str)
except ValueError:
raise d1_common.types.exceptions.InvalidRequest(
0,
.format(
param_name, param_str
... | Return ``param_str`` converted to an int.
If str cannot be converted to int or int is not zero or positive, raise
InvalidRequest. |
1,970 | def inputs(ctx, client, revision, paths):
r
from renku.models.provenance import ProcessRun
graph = Graph(client)
paths = set(paths)
nodes = graph.build(revision=revision)
commits = {node.commit for node in nodes}
candidates = {(node.commit, node.path)
for node in nodes if... | r"""Show inputs files in the repository.
<PATHS> Files to show. If no files are given all input files are shown. |
1,971 | def _expon_solve_lam_from_mu(mu, b):
def lam_eq(lam, mu, b):
lam, mu, b = Decimal(lam), Decimal(mu), Decimal(b)
return ( (1 - (lam*b + 1) * np.exp(-lam*b)) /
(lam - lam * np.exp(-lam*b) + Decimal(1e-32)) - mu )
return optim.brentq(lam_eq, -100, 100, args=(mu, b),... | For the expon_uptrunc, given mu and b, return lam.
Similar to geom_uptrunc |
1,972 | def GaussianLogDensity(x, mu, log_var, name=, EPSILON = 1e-6):
c = mx.sym.ones_like(log_var)*2.0 * 3.1416
c = mx.symbol.log(c)
var = mx.sym.exp(log_var)
x_mu2 = mx.symbol.square(x - mu)
x_mu2_over_var = mx.symbol.broadcast_div(x_mu2, var + EPSILON)
log_prob = -0.5 * (c + log_var + x_mu2_... | GaussianLogDensity loss calculation for layer wise loss |
1,973 | def size(self):
if self._size is None:
self._size = 0
for csv_file in self.files:
self._size += sum(1 if line else 0 for line in _util.open_local_or_gcs(csv_file, ))
return self._size | The size of the schema. If the underlying data source changes, it may be outdated. |
1,974 | def reflectance(self, band):
if band == 6:
raise ValueError()
rad = self.radiance(band)
esun = self.ex_atm_irrad[band - 1]
toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad))
return toa_reflect | :param band: An optical band, i.e. 1-5, 7
:return: At satellite reflectance, [-] |
1,975 | def _scrollView( self, value ):
if self._scrolling:
return
view_bar = self.uiGanttVIEW.verticalScrollBar()
self._scrolling = True
view_bar.setValue(value)
self._scrolling = False | Updates the gantt view scrolling to the inputed value.
:param value | <int> |
1,976 | def _init_db(self):
with self._get_db() as db:
with open(self.schemapath) as f:
db.cursor().executescript(f.read())
db.commit() | Creates the database tables. |
1,977 | def timestampFormat(self, timestampFormat):
if not isinstance(timestampFormat, str):
raise TypeError()
self._timestampFormat = timestampFormat | Setter to _timestampFormat. Formatting string for conversion of timestamps to QtCore.QDateTime
Raises:
AssertionError: if timestampFormat is not of type unicode.
Args:
timestampFormat (unicode): assign timestampFormat to _timestampFormat.
Formatting string for c... |
1,978 | def WriteSignedBinaryBlobs(binary_urn,
blobs,
token = None):
if _ShouldUseLegacyDatastore():
aff4.FACTORY.Delete(binary_urn, token=token)
with data_store.DB.GetMutationPool() as mutation_pool:
with aff4.FACTORY.Create(
binary_urn,
... | Saves signed blobs to the datastore.
If a signed binary with the given URN already exists, its contents will get
overwritten.
Args:
binary_urn: RDFURN that should serve as a unique identifier for the binary.
blobs: An Iterable of signed blobs to write to the datastore.
token: ACL token to use with t... |
1,979 | def list_to_string(input, delimiter):
if isinstance(input, list):
return delimiter.join(
list_to_string(item, delimiter) for item in input)
return input | converts list to string recursively so that nested lists are supported
:param input: a list of strings and lists of strings (and so on recursive)
:type input: list
:param delimiter: the deimiter to use when joining the items
:type delimiter: str
:returns: the recursively joined list
:rtype: str |
1,980 | def get_axes(process_or_domain):
if isinstance(process_or_domain, Process):
dom = process_or_domain.domains
else:
dom = process_or_domain
if isinstance(dom, _Domain):
return dom.axes
elif isinstance(dom, dict):
axes = {}
for thisdom in list(dom.values()):
... | Returns a dictionary of all Axis in a domain or dictionary of domains.
:param process_or_domain: a process or a domain object
:type process_or_domain: :class:`~climlab.process.process.Process` or
:class:`~climlab.domain.domain._Domain`
:raises: :exc: `TypeE... |
1,981 | def findall(obj, prs, forced_type=None,
cls=anyconfig.models.processor.Processor):
if (obj is None or not obj) and forced_type is None:
raise ValueError("The first argument or the second argument "
" must be something other than "
"None or ... | :param obj:
a file path, file, file-like object, pathlib.Path object or an
'anyconfig.globals.IOInfo` (namedtuple) object
:param prs: A list of :class:`anyconfig.models.processor.Processor` classes
:param forced_type:
Forced processor type of the data to process or ID of the processor
... |
1,982 | def find_file(folder, filename):
matches = []
if os.path.isabs(filename) and os.path.isfile(filename):
return filename
for root, _, filenames in os.walk(folder):
for fn in fnmatch.filter(filenames, filename):
matches.append(os.path.join(root, fn))
if not matches:
... | Find a file given folder and filename. If the filename can be
resolved directly returns otherwise walks the supplied folder. |
1,983 | def define_snowflake_config():
account = Field(
String,
description=,
is_optional=True,
)
user = Field(String, description=, is_optional=False)
password = Field(String, description=, is_optional=False)
database = Field(
String,
description=,
i... | Snowflake configuration.
See the Snowflake documentation for reference:
https://docs.snowflake.net/manuals/user-guide/python-connector-api.html |
1,984 | def stack_decoders(self, *layers):
self.stack(*layers)
self.decoding_layers.extend(layers) | Stack decoding layers. |
1,985 | def parse(text: str) -> Docstring:
ret = Docstring()
if not text:
return ret
text = inspect.cleandoc(text)
match = _titles_re.search(text)
if match:
desc_chunk = text[: match.start()]
meta_chunk = text[match.start() :]
else:
desc_chunk = text
... | Parse the Google-style docstring into its components.
:returns: parsed docstring |
1,986 | def from_pandas(df, value=, x=, y=, cellx=None, celly=None, xmin=None, ymax=None,
geot=None, nodata_value=None, projection=None, datatype=None):
if not cellx:
cellx = (df.sort_values(x)[x]-df.sort_values(x).shift(1)[x]).max()
if not celly:
celly = (df.sort_values(y, ascendin... | Creates a GeoRaster from a Pandas DataFrame. Useful to plot or export data to rasters.
Usage:
raster = from_pandas(df, value='value', x='x', y='y', cellx= cellx, celly=celly,
xmin=xmin, ymax=ymax, geot=geot, nodata_value=ndv,
projection=projection, d... |
1,987 | def rget(d, key):
if not isinstance(d, dict):
return None
assert isinstance(key, str) or isinstance(key, list)
keys = key.split() if isinstance(key, str) else key
cdrs = cdr(keys)
cars = car(keys)
return rget(d.get(cars), cdrs) if cdrs else d.get(cars) | Recursively get keys from dict, for example:
'a.b.c' --> d['a']['b']['c'], return None if not exist. |
1,988 | def make_app(config=None):
config = config or {}
app = CoolMagicApplication(config)
app = SharedDataMiddleware(
app, {"/public": path.join(path.dirname(__file__), "public")}
)
app = local_manager.make_middleware(app)
return app | Factory function that creates a new `CoolmagicApplication`
object. Optional WSGI middlewares should be applied here. |
1,989 | def verify_tree_consistency(self, old_tree_size: int, new_tree_size: int,
old_root: bytes, new_root: bytes,
proof: Sequence[bytes]):
old_size = old_tree_size
new_size = new_tree_size
if old_size < 0 or new_size < 0:
... | Verify the consistency between two root hashes.
old_tree_size must be <= new_tree_size.
Args:
old_tree_size: size of the older tree.
new_tree_size: size of the newer_tree.
old_root: the root hash of the older tree.
new_root: the root hash of the newer tr... |
1,990 | def get_firewall_rules(self, server):
server_uuid, server_instance = uuid_and_instance(server)
url = .format(server_uuid)
res = self.get_request(url)
return [
FirewallRule(server=server_instance, **firewall_rule)
for firewall_rule in res[][]
] | Return all FirewallRule objects based on a server instance or uuid. |
1,991 | def registerPolling(self, fd, options = POLLING_IN|POLLING_OUT, daemon = False):
self.polling.register(fd, options, daemon) | register a polling file descriptor
:param fd: file descriptor or socket object
:param options: bit mask flags. Polling object should ignore the incompatible flag. |
1,992 | def _add_docstring(format_dict):
def add_docstring_context(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.__doc__ = func.__doc__.format(**format_dict)
return wrapper
return add_docstring_context | Format a doc-string on the fly.
@arg format_dict: A dictionary to format the doc-strings
Example:
@add_docstring({'context': __doc_string_context})
def predict(x):
'''
{context}
>> model.predict(data)
'''
return x |
1,993 | def _execute(job, f, o=None):
out = ctypes.create_string_buffer(RS_JOB_BLOCKSIZE)
while True:
block = f.read(RS_JOB_BLOCKSIZE)
buff = Buffer()
buff.next_in = ctypes.c_char_p(block)
buff.avail_in = ctypes.c_size_t(len(block))
buff.eof_in = ctypes.c_... | Executes a librsync "job" by reading bytes from `f` and writing results to
`o` if provided. If `o` is omitted, the output is ignored. |
1,994 | def _complete_exit(self, cmd, args, text):
if args:
return
return [ x for x in { , , } \
if x.startswith(text) ] | Find candidates for the 'exit' command. |
1,995 | def pointwise_free_energies(self, therm_state=None):
r
assert self.therm_energies is not None, \
d before pointwise free energies can be calculated.'
if therm_state is not None:
assert therm_state<=self.nthermo
mu = [_np.zeros(d.shape[0], dtype=_np.float64) for d ... | r"""
Computes the pointwise free energies :math:`-\log(\mu^k(x))` for all points x.
:math:`\mu^k(x)` is the optimal estimate of the Boltzmann distribution
of the k'th ensemble defined on the set of all samples.
Parameters
----------
therm_state : int or None, default=No... |
1,996 | def warning(f, *args, **kwargs):
kwargs.update({: logging.WARNING})
return _stump(f, *args, **kwargs) | Automatically log progress on function entry and exit. Default logging
value: warning.
*Logging with values contained in the parameters of the decorated function*
Message (args[0]) may be a string to be formatted with parameters passed to
the decorated function. Each '{varname}' will be replaced by the... |
1,997 | def get_mcu_definition(self, project_file):
project_file = join(getcwd(), project_file)
coproj_dic = xmltodict.parse(file(project_file), dict_constructor=dict)
mcu = MCU_TEMPLATE
IROM1_index = self._coproj_find_option(coproj_dic[][][][][][], , )
IROM2_index = self._cop... | Parse project file to get mcu definition |
1,998 | def close (self, force=True):
if not self.closed:
self.flush()
os.close (self.child_fd)
time.sleep(self.delayafterclose)
if self.isalive():
if not self.terminate(force):
raise ExceptionPexpect ()
self.... | This closes the connection with the child application. Note that
calling close() more than once is valid. This emulates standard Python
behavior with files. Set force to True if you want to make sure that
the child is terminated (SIGKILL is sent if the child ignores SIGHUP
and SIGINT). |
1,999 | def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
import traceback
traceback.print_stack()
log = file if hasattr(file, ) else sys.stderr
settings.write(warnings.formatwarning(message, category, filename, lineno, line)) | Get full tracebacks when warning is raised by setting
warnings.showwarning = warn_with_traceback
See also
--------
http://stackoverflow.com/questions/22373927/get-traceback-of-warnings |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.