Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
384,200 | def _validate_contains(self, expected_values, field, value):
if not isinstance(value, Iterable):
return
if not isinstance(expected_values, Iterable) or isinstance(
expected_values, _str_type
):
expected_values = set((expected_values,))
else:
... | {'empty': False } |
384,201 | def add_to_group(server_context, user_ids, group_id, container_path=None):
return __make_security_group_api_request(server_context, , user_ids, group_id, container_path) | Add user to group
:param server_context: A LabKey server context. See utils.create_server_context.
:param user_ids: users to add
:param group_id: to add to
:param container_path:
:return: |
384,202 | def unsubscribe_from_data(
self,
subscriber: Callable[[bytes], bool],
) -> None:
self._data_subscribers.remove(subscriber) | Not thread-safe. |
384,203 | def import_string(import_name, silent=False):
import_name = str(import_name).replace(, )
try:
try:
__import__(import_name)
except ImportError:
if not in import_name:
raise
else:
return sys.modules[import_n... | Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True th... |
384,204 | def create_new_file(help_string=NO_HELP, default=NO_DEFAULT, suffixes=None):
return ParamFilename(
help_string=help_string,
default=default,
type_name="new_file",
suffixes=suffixes,
) | Create a new file parameter
:param help_string:
:param default:
:param suffixes:
:return: |
384,205 | def setup_auditlog_catalog(portal):
logger.info("*** Setup Audit Log Catalog ***")
catalog_id = auditlog_catalog.CATALOG_AUDITLOG
catalog = api.get_tool(catalog_id)
for name, meta_type in auditlog_catalog._indexes.iteritems():
indexes = catalog.indexes()
if name in indexes:
... | Setup auditlog catalog |
384,206 | def query_saved_guest_screen_info(self, screen_id):
if not isinstance(screen_id, baseinteger):
raise TypeError("screen_id can only be an instance of type baseinteger")
(origin_x, origin_y, width, height, enabled) = self._call("querySavedGuestScreenInfo",
in_p=[s... | Returns the guest dimensions from the saved state.
in screen_id of type int
Saved guest screen to query info from.
out origin_x of type int
The X position of the guest monitor top left corner.
out origin_y of type int
The Y position of the guest monitor top... |
384,207 | def validate_wavelengths(wavelengths):
if isinstance(wavelengths, u.Quantity):
units.validate_wave_unit(wavelengths.unit)
wave = wavelengths.value
else:
wave = wavelengths
if np.isscalar(wave):
wave = [wave]
wave = np.asarray(wave)
if np.any(wave <= 0):
... | Check wavelengths for ``synphot`` compatibility.
Wavelengths must satisfy these conditions:
* valid unit type, if given
* no zeroes
* monotonic ascending or descending
* no duplicate values
Parameters
----------
wavelengths : array-like or `~astropy.units.quantity.Quan... |
384,208 | def friends(self, delegate, params={}, extra_args=None):
return self.__get(, delegate, params,
txml.Statuses, extra_args=extra_args) | Get updates from friends.
Calls the delgate once for each status object received. |
384,209 | def offset_data(data_section, offset, readable = False, wraparound = False):
for pos in range(0, len(data_section)/2):
data_section = offset_byte_in_data(data_section, offset, pos, readable, wraparound)
return data_section | Offset the whole data section.
see offset_byte_in_data for more information
Returns: the entire data section + offset on each byte |
384,210 | def map(self, func):
self._data = xmap(func, self._data)
return self | A lazy way to apply the given function to each element in the stream.
Useful for type casting, like:
>>> from audiolazy import count
>>> count().take(5)
[0, 1, 2, 3, 4]
>>> my_stream = count().map(float)
>>> my_stream.take(5) # A float counter
[0.0, 1.0, 2.0, 3.0, 4.0] |
384,211 | def class_variables(self):
p = lambda o: isinstance(o, Variable) and self.module._docfilter(o)
return filter(p, self.doc.values()) | Returns all documented class variables in the class, sorted
alphabetically as a list of `pydoc.Variable`. |
384,212 | def GetSources(self, event):
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter(.format(
event.data_type))
source_long = getattr(event, , )
source_append = getattr(event, , None)
if source_append:
source_long = .format(source_long, source_append)
return se... | Determines the the short and long source for an event object.
Args:
event (EventObject): event.
Returns:
tuple(str, str): short and long source string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter. |
384,213 | def usearch(query, db, type, out, threads = , evalue = , alignment = , max_hits = 100, cluster = False):
if in os.environ:
usearch_loc = os.environ[]
else:
usearch_loc =
if os.path.exists(out) is False:
db = usearchdb(db, alignment, usearch_loc)
print( % (query, db), ... | run usearch |
384,214 | def _reregister_types(self):
for _type in self._register_types:
psycopg2.extensions.register_type(psycopg2.extensions.new_type(*_type)) | Registers existing types for a new connection |
384,215 | def unfreeze_extensions(self):
output_path = os.path.join(_registry_folder(), )
if not os.path.isfile(output_path):
raise ExternalError("There is no frozen extension list")
os.remove(output_path)
ComponentRegistry._frozen_extensions = None | Remove a previously frozen list of extensions. |
384,216 | async def run_asgi(self):
try:
result = await self.app(self.scope, self.asgi_receive, self.asgi_send)
except BaseException as exc:
self.closed_event.set()
msg = "Exception in ASGI application\n"
self.logger.error(msg, exc_info=exc)
if ... | Wrapper around the ASGI callable, handling exceptions and unexpected
termination states. |
384,217 | def read_config(args):
configfile = os.path.expanduser()
if os.path.isfile(configfile):
with open(configfile, ) as f:
config = toml.loads(f.read())
for key in config:
param = key.replace(, )
if not param in args or args[param] in [False, None]:
... | Read configuration options from ~/.shakedown (if exists)
:param args: a dict of arguments
:type args: dict
:return: a dict of arguments
:rtype: dict |
384,218 | def _register_client(self, client, region_name):
for item in client.meta.method_to_api_mapping:
method = getattr(client, item)
wrapped_method = functools.partial(self._wrap_client, region_name, method)
setattr(client, item, wrapped_method) | Uses functools.partial to wrap all methods on a client with the self._wrap_client method
:param botocore.client.BaseClient client: the client to proxy
:param str region_name: AWS Region ID (ex: us-east-1) |
384,219 | def _create_dmnd_database(self, unaligned_sequences_path, daa_output):
logging.debug("Building diamond database")
cmd = "diamond makedb --in -d " % (unaligned_sequences_path, daa_output)
extern.run(cmd) | Build a diamond database using diamond makedb
Parameters
----------
unaligned_sequences_path: str
path to a FASTA file containing unaligned sequences
daa_output: str
Name of output database. |
384,220 | def _ClientPathToString(client_path, prefix=""):
return os.path.join(prefix, client_path.client_id, client_path.vfs_path) | Returns a path-like String of client_path with optional prefix. |
384,221 | def get_composition_smart_repository_session(self, repository_id, proxy):
if repository_id is None:
raise NullArgument()
if not self.supports_composition_smart_repository():
raise Unimplemented()
try:
from . import sessions
except ImportError:... | Gets a composition smart repository session for the given
repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSmartRepositorySession) - a
CompositionSmartRepositorySession
... |
384,222 | def op_token(self, display_name, opt):
args = {
: opt.lease,
: display_name,
: token_meta(opt)
}
try:
token = self.create_token(**args)
except (hvac.exceptions.InvalidRequest,
hvac.exceptions.Forbidden) as vault_exc... | Return a properly annotated token for our use. This
token will be revoked at the end of the session. The token
will have some decent amounts of metadata tho. |
384,223 | def escape_dictionary(dictionary, datetime_format=):
for k, v in dictionary.iteritems():
if isinstance(v, datetime.datetime):
v = v.strftime(datetime_format)
if isinstance(v, basestring):
v = CoyoteDb.db_escape(str(v))
v = .format... | Escape dictionary values with keys as column names and values column values
@type dictionary: dict
@param dictionary: Key-values |
384,224 | def genargs() -> ArgumentParser:
parser = ArgumentParser()
parser.add_argument("infile", help="Input ShExC specification")
parser.add_argument("-nj", "--nojson", help="Do not produce json output", action="store_true")
parser.add_argument("-nr", "--nordf", help="Do not produce rdf output", action="s... | Create a command line parser
:return: parser |
384,225 | def spam(self, msg, *args, **kw):
if self.isEnabledFor(SPAM):
self._log(SPAM, msg, args, **kw) | Log a message with level :data:`SPAM`. The arguments are interpreted as for :func:`logging.debug()`. |
384,226 | def request_start(self):
self._queue.put(command_packet(CMD_START_STREAM))
_LOGGER.info()
self._source.run() | Indicate readiness to receive stream.
This is a blocking call. |
384,227 | def text(self, x, y, txt=):
"Output a string"
txt = self.normalize_text(txt)
if (self.unifontsubset):
txt2 = self._escape(UTF8ToUTF16BE(txt, False))
for uni in UTF8StringToArray(txt):
self.current_font[].append(uni)
else:
txt2 = self._e... | Output a string |
384,228 | def parse_params(self, core_params):
params = []
for core_param in core_params:
params.append(self.parse_param(core_param))
return params | Goes through a set of parameters, extracting information about each.
:param core_params: The collection of parameters
:type core_params: A collection of ``<botocore.parameters.Parameter>``
subclasses
:returns: A list of dictionaries |
384,229 | def serialize_to_file(
root_processor,
value,
xml_file_path,
encoding=,
indent=None
):
serialized_value = serialize_to_string(root_processor, value, indent)
with open(xml_file_path, , encoding=encoding) as xml_file:
xml_file.write(serialized_v... | Serialize the value to an XML file using the root processor.
:param root_processor: Root processor of the XML document.
:param value: Value to serialize.
:param xml_file_path: Path to the XML file to which the serialized value will be written.
:param encoding: Encoding of the file.
:param indent: I... |
384,230 | def step_a_new_working_directory(context):
command_util.ensure_context_attribute_exists(context, "workdir", None)
command_util.ensure_workdir_exists(context)
shutil.rmtree(context.workdir, ignore_errors=True) | Creates a new, empty working directory |
384,231 | def add_var_arg(self, arg):
self.__args.append(arg)
self.__job.add_var_arg(self.__arg_index)
self.__arg_index += 1 | Add a variable (or macro) argument to the condor job. The argument is
added to the submit file and a different value of the argument can be set
for each node in the DAG.
@param arg: name of option to add. |
384,232 | def Storage_clearDataForOrigin(self, origin, storageTypes):
assert isinstance(origin, (str,)
), "Argument must be of type str. Received type: " % type(
origin)
assert isinstance(storageTypes, (str,)
), "Argument must be of type str. Received type: " % type(
storageTypes)
subdom_funcs ... | Function path: Storage.clearDataForOrigin
Domain: Storage
Method name: clearDataForOrigin
Parameters:
Required arguments:
'origin' (type: string) -> Security origin.
'storageTypes' (type: string) -> Comma separated origin names.
No return value.
Description: Clears storage for origin. |
384,233 | def are_forms_valid(self, forms):
for form in six.itervalues(forms):
if not form.is_valid():
return False
return True | Check if all forms defined in `form_classes` are valid. |
384,234 | def render_css_classes(self):
ret = []
if not self.enabled:
ret.append()
if self.draggable:
ret.append()
if self.collapsible:
ret.append()
if self.deletable:
ret.append()
ret += self.css_classes
return .join... | Return a string containing the css classes for the module.
>>> mod = DashboardModule(enabled=False, draggable=True,
... collapsible=True, deletable=True)
>>> mod.render_css_classes()
'dashboard-module disabled draggable collapsible deletable'
>>> mod.css_cl... |
384,235 | def clearness_index_zenith_independent(clearness_index, airmass,
max_clearness_index=2.0):
kt_prime = clearness_index / _kt_kt_prime_factor(airmass)
kt_prime = np.maximum(kt_prime, 0)
kt_prime = np.minimum(kt_prime, max_clearness_index)
return kt_prime | Calculate the zenith angle independent clearness index.
Parameters
----------
clearness_index : numeric
Ratio of global to extraterrestrial irradiance on a horizontal
plane
airmass : numeric
Airmass
max_clearness_index : numeric, default 2.0
Maximum value of the cl... |
384,236 | def remove(self, recursive=True, ignore_error=True):
try:
if recursive or self._cleanup == :
shutil.rmtree(self.path)
else:
os.rmdir(self.path)
except Exception as e:
if not ignore_error:
raise e | Remove the directory. |
384,237 | def network_create(provider, names, **kwargs):
salt192.168.100.0/24
client = _get_client()
return client.extra_action(provider=provider, names=names, action=, **kwargs) | Create private network
CLI Example:
.. code-block:: bash
salt minionname cloud.network_create my-nova names=['salt'] cidr='192.168.100.0/24' |
384,238 | def get_points_within_r(center_points, target_points, r):
r
tree = cKDTree(target_points)
indices = tree.query_ball_point(center_points, r)
return tree.data[indices].T | r"""Get all target_points within a specified radius of a center point.
All data must be in same coordinate system, or you will get undetermined results.
Parameters
----------
center_points: (X, Y) ndarray
location from which to grab surrounding points within r
target_points: (X, Y) ndarray... |
384,239 | def request(self, *args, **kwargs) -> XMLResponse:
r = super(XMLSession, self).request(*args, **kwargs)
return XMLResponse._from_response(r) | Makes an HTTP Request, with mocked User–Agent headers.
Returns a class:`HTTPResponse <HTTPResponse>`. |
384,240 | def _print_topics(self, header: str, cmds: List[str], verbose: bool) -> None:
import io
if cmds:
if not verbose:
self.print_topics(header, cmds, 15, 80)
else:
self.stdout.write(.format(str(header)))
widest = 0
... | Customized version of print_topics that can switch between verbose or traditional output |
384,241 | def getList(self, aspList):
objects = self._elements(self.SIG_OBJECTS, self.N, [0])
houses = self._elements(self.SIG_HOUSES, self.N, [0])
angles = self._elements(self.SIG_ANGLES, self.N, [0])
significators = objects + houses + angles
objects = ... | Returns a sorted list with all
primary directions. |
384,242 | def createPenStyleCti(nodeName, defaultData=0, includeNone=False):
displayValues=PEN_STYLE_DISPLAY_VALUES
configValues=PEN_STYLE_CONFIG_VALUES
if includeNone:
displayValues = [] + list(displayValues)
configValues = [None] + list(configValues)
return ChoiceCti(nodeName, defaultData,
... | Creates a ChoiceCti with Qt PenStyles.
If includeEmtpy is True, the first option will be None. |
384,243 | def identity(obj):
if hasattr(obj, ):
return mark_safe("{0},{1}".format(unlocalize(obj.pk),
get_revision_of_object(obj)))
else:
return mark_safe(unlocalize(obj.pk)) | returns a string representing "<pk>,<version>" of the passed object |
384,244 | def getEyeToHeadTransform(self, eEye):
fn = self.function_table.getEyeToHeadTransform
result = fn(eEye)
return result | Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head
space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection.
Normally View and Eye^-1 will be multiplied together and treated as View in your... |
384,245 | def placebo_session(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
session_kwargs = {
: os.environ.get(, )
}
profile_name = os.environ.get(, None)
if profile_name:
session_kwargs[] = profile_name
session = boto3.Session(... | Decorator to help do testing with placebo.
Simply wrap the function you want to test and make sure to add
a "session" argument so the decorator can pass the placebo session.
Accepts the following environment variables to configure placebo:
PLACEBO_MODE: set to "record" to record AWS calls and save them
... |
384,246 | def get_instance(self, payload):
return CredentialListInstance(self._version, payload, account_sid=self._solution[], ) | Build an instance of CredentialListInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.sip.credential_list.CredentialListInstance
:rtype: twilio.rest.api.v2010.account.sip.credential_list.CredentialListInstance |
384,247 | def _bss_decomp_mtifilt(reference_sources, estimated_source, j, C, Cj):
filters_len = Cj.shape[-2]
s_true = _zeropad(reference_sources[j], filters_len - 1, axis=0)
e_spat = _project(reference_sources[j], Cj) - s_true
e_interf = _project(reference_sources, C) - s_true - e_spat
e_arti... | Decomposition of an estimated source image into four components
representing respectively the true source image, spatial (or filtering)
distortion, interference and artifacts, derived from the true source
images using multichannel time-invariant filters. |
384,248 | def tabulate_state_blocks(x, states, pos=None):
x = asarray_ndim(x, 1)
check_integer_dtype(x)
x = memoryview_safe(x)
switch_points, transitions, observations = state_transitions(x, states)
t = transitions[1:, 0]
o = observations[1:]
s1 = switch_points[:-1]
s2 = swi... | Construct a dataframe where each row provides information about continuous state blocks.
Parameters
----------
x : array_like, int
1-dimensional array of state values.
states : set
Set of states of interest. Any state value not in this set will be ignored.
pos : array_like, int, opt... |
384,249 | def rename(old_name, new_name):
with Session() as session:
try:
session.VFolder(old_name).rename(new_name)
print_done()
except Exception as e:
print_error(e)
sys.exit(1) | Rename the given virtual folder. This operation is irreversible!
You cannot change the vfolders that are shared by other users,
and the new name must be unique among all your accessible vfolders
including the shared ones.
OLD_NAME: The current name of a virtual folder.
NEW_NAME: The new name of a v... |
384,250 | def select_ipam_strategy(self, network_id, network_strategy, **kwargs):
LOG.info("Selecting IPAM strategy for network_id:%s "
"network_strategy:%s" % (network_id, network_strategy))
net_type = "tenant"
if STRATEGY.is_provider_network(network_id):
net_type =... | Return relevant IPAM strategy name.
:param network_id: neutron network id.
:param network_strategy: default strategy for the network.
NOTE(morgabra) This feels like a hack but I can't think of a better
idea. The root problem is we can now attach ports to networks with
a differe... |
384,251 | def inet_pton(address_family, ip_string):
global __inet_pton
if __inet_pton is None:
if hasattr(socket, ):
__inet_pton = socket.inet_pton
else:
from ospd import win_socket
__inet_pton = win_socket.inet_pton
return __inet_pton(address_family, ip_strin... | A platform independent version of inet_pton |
384,252 | def prepare_framework_container_def(model, instance_type, s3_operations):
deploy_image = model.image
if not deploy_image:
region_name = model.sagemaker_session.boto_session.region_name
deploy_image = fw_utils.create_image_uri(
region_name, model.__framework_name__, instance_type... | Prepare the framework model container information. Specify related S3 operations for Airflow to perform.
(Upload `source_dir`)
Args:
model (sagemaker.model.FrameworkModel): The framework model
instance_type (str): The EC2 instance type to deploy this Model to. For example, 'ml.p2.xlarge'.
... |
384,253 | def post_info(self, name, message):
self.post_command(OPERATIONS.CMD_POST_MESSAGE,
_create_message(name, states.INFO_LEVEL, message)) | Asynchronously post a user facing info message about a service.
Args:
name (string): The name of the service
message (string): The user facing info message that will be stored
for the service and can be queried later. |
384,254 | def new_driver(browser_name, *args, **kwargs):
if browser_name == FIREFOX:
return webdriver.Firefox(*args, **kwargs)
elif browser_name == PHANTOMJS:
executable_path = os.path.join(os.path.dirname(_... | Instantiates a new WebDriver instance, determining class by environment variables |
384,255 | def get_message_content(self):
body = self.doc.find(
".//{http://salmon-protocol.org/ns/magic-env}data").text
body = urlsafe_b64decode(body.encode("ascii"))
logger.debug("diaspora.protocol.get_message_content: %s", body)
return body | Given the Slap XML, extract out the payload. |
384,256 | def add_node(self, node):
new = ClusterNode.from_uri(node["addr"])
cluster_member = self.nodes[0]
check_new_nodes([new], [cluster_member])
new.meet(cluster_member.host, cluster_member.port)
self.nodes.append(new)
self.wait()
if node["role"] != "slave":... | Add a node to cluster.
:param node: should be formated like this
`{"addr": "", "role": "slave", "master": "master_node_id"} |
384,257 | def _cache_is_expired():
now = timezone.now()
timediff = TransCache.SINGLETON_CREATION_DATETIME - now
return (timediff.total_seconds() > TransCache.SINGLETON_EXPIRATION_MAX_SECONDS) | Indica si la caché está caducada |
384,258 | def set_features(self, filter_type):
elements_to_split = {: self.allpsms, : self.allpeps}
self.features = self.splitfunc(elements_to_split, self.ns, filter_type) | Calls splitter to split percolator output into target/decoy
elements.
Writes two new xml files with features. Currently only psms and
peptides. Proteins not here, since one cannot do protein inference
before having merged and remapped multifraction data anyway. |
384,259 | def _unescape_str(value):
if isinstance(value, int):
return "%d" % value
value = value.replace(r"\\", "\\")
for i, j in ts3_escape.items():
value = value.replace(j, i)
return value | Unescape a TS3 compatible string into a normal string
@param value: Value
@type value: string/int |
384,260 | def agent_version(self):
version = self.safe_data[][]
if version:
return client.Number.from_json(version)
else:
return None | Get the version of the Juju machine agent.
May return None if the agent is not yet available. |
384,261 | def status(self, build_record_id, **kwargs):
kwargs[] = True
if kwargs.get():
return self.status_with_http_info(build_record_id, **kwargs)
else:
(data) = self.status_with_http_info(build_record_id, **kwargs)
return data | Latest push result of BuildRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
... |
384,262 | def build_defaults(self):
defaults = {}
for arg in self.args:
if not isinstance(arg, _BaseOpt):
raise errors.InvalidSchemeError()
if not isinstance(arg.default, NoDefault):
defaults[arg.name] = arg.default
... | Build a dictionary of default values from the `Scheme`.
Returns:
dict: The default configurations as set by the `Scheme`.
Raises:
errors.InvalidSchemeError: The `Scheme` does not contain
valid options. |
384,263 | def guess_payload_class(self, payload):
plen = len(payload)
if plen > _NTP_AUTH_MD5_TAIL_SIZE:
return NTPExtensions
elif plen == _NTP_AUTH_MD5_TAIL_SIZE:
return NTPAuthenticator
return Packet.guess_payload_class(self, payload) | Handles NTPv4 extensions and MAC part (when authentication is used.) |
384,264 | async def close(self):
if self.server:
self.server.close()
await self.server.wait_closed()
self.server = None | Close the listening socket. This does not close any ServerSession
objects created to handle incoming connections. |
384,265 | def start(self, poll_period=None):
logger.info("Incoming ports bound")
if poll_period is None:
poll_period = self.poll_period
start = time.time()
count = 0
self._kill_event = threading.Event()
self._task_puller_thread = threading.Thread(target=self... | Start the NeedNameQeueu
Parameters:
----------
TODO: Move task receiving to a thread |
384,266 | def sensitivity(imgs, bg=None):
bg = getBackground(bg)
for n, i in enumerate(imgs):
i = imread(i, dtype=float)
i -= bg
smooth = fastMean(median_filter(i, 3))
i /= smooth
if n == 0:
out = i
else:
out += i
out /= (n + 1)
... | Extract pixel sensitivity from a set of homogeneously illuminated images
This method is detailed in Section 5 of:
---
K.Bedrich, M.Bokalic et al.:
ELECTROLUMINESCENCE IMAGING OF PV DEVICES:
ADVANCED FLAT FIELD CALIBRATION,2017
--- |
384,267 | def block(self, mcs):
self.oracle.add_clause([self.sels[cl_id - 1] for cl_id in mcs]) | Block a (previously computed) MCS. The MCS should be given as an
iterable of integers. Note that this method is not automatically
invoked from :func:`enumerate` because a user may want to block
some of the MCSes conditionally depending on the needs. For
example, one may w... |
384,268 | def get_composition_repository_assignment_session(self, proxy):
if not self.supports_composition_repository_assignment():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise
proxy = self._convert_proxy(proxy)
... | Gets the session for assigning composition to repository
mappings.
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a CompositionRepositoryAssignmentSession
raise: OperationFailed - unable to complete request
... |
384,269 | def listen(self, topic, timeout=1, limit=1):
if not self._subscribed:
logger.warn()
return []
if topic not in self._messages:
logger.warn( % topic)
return []
if limit != 0 and len(self._messages[topic]) >= limit:
mes... | Listen to a topic and return a list of message payloads received
within the specified time. Requires an async Subscribe to have been called previously.
`topic` topic to listen to
`timeout` duration to listen
`limit` the max number of payloads that will be returned. Specify 0
... |
384,270 | def profile(func):
def inner(*args, **kwargs):
pr = cProfile.Profile()
pr.enable()
res = func(*args, **kwargs)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats()
ps.print_stats()
print(s.getvalue())
return res
... | Decorator to profile functions with cProfile
Args:
func: python function
Returns:
profile report
References:
https://osf.io/upav8/ |
384,271 | def _at_dump_context(self, calculator, rule, scope, block):
sys.stderr.write("%s\n" % repr(rule.namespace._variables)) | Implements @dump_context |
384,272 | def is_child_of_objective_bank(self, id_, objective_bank_id):
if self._catalog_session is not None:
return self._catalog_session.is_child_of_catalog(id_=id_, catalog_id=objective_bank_id)
return self._hierarchy_session.is_child(id_=objective_bank_id, child_id=id_) | Tests if an objective bank is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: objective_bank_id (osid.id.Id): the ``Id`` of an
objective bank
return: (boolean) - ``true`` if the ``id`` is a child of
``objective_bank_id,`` ``false`` otherwis... |
384,273 | def _prfx_getattr_(obj, item):
if item.startswith() or item.startswith():
return getattr(obj, item[2:])
raise AttributeError( % (obj.__class__.__name__, item)) | Replacement of __getattr__ |
384,274 | def rename(idf, objkey, objname, newname):
refnames = getrefnames(idf, objkey)
for refname in refnames:
objlists = getallobjlists(idf, refname)
for refname in refnames:
for robjkey, refname, fieldindexlist in objlists:
idfobjects = idf.... | rename all the refrences to this objname |
384,275 | def _buildTime(self, source, quantity, modifier, units):
if _debug:
print % (quantity, modifier, units)
if source is None:
source = time.localtime()
if quantity is None:
quantity =
else:
quantity = quantity.strip()
... | Take C{quantity}, C{modifier} and C{unit} strings and convert them into values.
After converting, calcuate the time and return the adjusted sourceTime.
@type source: time
@param source: time to use as the base (or source)
@type quantity: string
@param quantity: quant... |
384,276 | def activate():
parent = lib.parent()
try:
cmd = lib.cmd(parent)
except SystemError as exc:
lib.echo(exc)
sys.exit(lib.PROGRAM_ERROR)
context = lib.context(root=_extern.cwd())
context["BE_SHELL"] = parent
if lib.platform() == "unix":
context["BE_TABC... | Enter into an environment with support for tab-completion
This command drops you into a subshell, similar to the one
generated via `be in ...`, except no topic is present and
instead it enables tab-completion for supported shells.
See documentation for further information.
https://github.com/motto... |
384,277 | def interface(iface):
iface_info, error = _get_iface_info(iface)
if error is False:
return iface_info.get(iface, {}).get(, )
else:
return error | Return the details of `iface` or an error if it does not exist |
384,278 | def parseArguments(argv=None):
store_opt = StoreOpt()
parser = argparse.ArgumentParser(
prog=,
usage=,
add_help=False,
description=dedent(
.rstrip()),
epilog=dedent(
.rstrip()),
formatter_class=argpars... | I parse arguments in sys.argv and return the args object. The parser
itself is available as args.parser.
Adds the following members to args:
parser = the parser object
store_opt = the StoreOpt object |
384,279 | def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
v... | Returns the model properties as a dict |
384,280 | def process_transport_command(self, header, message):
if not isinstance(message, dict):
return
relevant = False
if "host" in message:
if message["host"] != self.__hostid:
return
relevant = True
if "service" in message:
... | Parse a command coming in through the transport command subscription |
384,281 | def on_tool_finish(self, tool):
with self._lock:
if tool in self.current_tools:
self.current_tools.remove(tool)
self.completed_tools.append(tool) | Called when an individual tool completes execution.
:param tool: the name of the tool that completed
:type tool: str |
384,282 | def _load_mapping(self, mapping):
mapping["oid_as_pk"] = bool(mapping.get("fields", {}).get("Id"))
job_id, local_ids_for_batch = self._create_job(mapping)
result = self._wait_for_job(job_id)
self._store_inserted_ids(mapping, job_id, local_ids_for_batch)
return r... | Load data for a single step. |
384,283 | def append(self, name, data, start):
for throttle in self.throttles.values():
getattr(throttle, name).append(data, start) | Update timeout for all throttles
:param name: name of throttle to append to ("read" or "write")
:type name: :py:class:`str`
:param data: bytes of data for count
:type data: :py:class:`bytes`
:param start: start of read/write time from
:py:meth:`asyncio.BaseEventLoo... |
384,284 | def find_tf_idf(file_names=[],prev_file_path=None, dump_path=None):
tf_idf = []
df = defaultdict(int)
if prev_file_path:
print(TAG,,prev_file_path)
df,tf_idf = pickle.load(open(prev_file_path,))
prev_doc_count = len(df)
prev_corpus_length = len(tf_idf)
for f i... | Function to create a TF-IDF list of dictionaries for a corpus of docs.
If you opt for dumping the data, you can provide a file_path with .tfidfpkl extension(standard made for better understanding)
and also re-generate a new tfidf list which overrides over an old one by mentioning its path.
@Args:
--
... |
384,285 | def add_dependency(self, name, obj):
if name in self._deps:
if self._deps[name] is obj:
return
raise ValueError(
"There exists a different dep with the same name : %r" % name)
self._deps[name] = obj | Add a code dependency so it gets inserted into globals |
384,286 | def get_auth_token_login_url(
self,
auth_token_ticket,
authenticator,
private_key,
service_url,
username,
):
auth_token, auth_token_signature = self._build_auth_token_data(
auth_token_ticket,
authenticator,
private_... | Build an auth token login URL.
See https://github.com/rbCAS/CASino/wiki/Auth-Token-Login for details. |
384,287 | def search(self, value, createIndex=None):
pecel leleFullNameUsername
if createIndex:
self._createIndex = createIndex
self._search = True
self.filter(QueryExpression({ : {: value}}))
return self | Full-text support, make sure that text index already exist on collection. Raise IndexNotFound if text index not exist.
**Examples**: ``query.search('pecel lele', createIndex=['FullName', 'Username'])`` |
384,288 | def get_graphs_by_ids(self, network_ids: Iterable[int]) -> List[BELGraph]:
rv = [
self.get_graph_by_id(network_id)
for network_id in network_ids
]
log.debug(, network_ids)
return rv | Get a list of networks with the given identifiers and converts to BEL graphs. |
384,289 | def _retry_deliveries(self):
self.logger.debug("Begin messages delivery retries")
tasks = []
for message in itertools.chain(self.session.inflight_in.values(), self.session.inflight_out.values()):
tasks.append(asyncio.wait_for(self._handle_message_flow(message), 10, loop=self... | Handle [MQTT-4.4.0-1] by resending PUBLISH and PUBREL messages for pending out messages
:return: |
384,290 | def choose(msg, items, attr):
if len(items) == 1:
return items[0]
print()
for index, i in enumerate(items):
name = attr(i) if callable(attr) else getattr(i, attr)
print( % (index, name))
print()
while True:
try:
inp = input( % msg)
... | Command line helper to display a list of choices, asking the
user to choose one of the options. |
384,291 | def sub_channel(self):
if self._sub_channel is None:
self._sub_channel = self.sub_channel_class(self.context,
self.session,
(self.ip, self.iopub_port))
return self._sub_chan... | Get the SUB socket channel object. |
384,292 | def per(arga, argb, prec=10):
r
if not isinstance(prec, int):
raise RuntimeError("Argument `prec` is not valid")
a_type = 1 * _isreal(arga) + 2 * (isiterable(arga) and not isinstance(arga, str))
b_type = 1 * _isreal(argb) + 2 * (isiterable(argb) and not isinstance(argb, str))
if not a_t... | r"""
Calculate percentage difference between numbers.
If only two numbers are given, the percentage difference between them is
computed. If two sequences of numbers are given (either two lists of
numbers or Numpy vectors), the element-wise percentage difference is
computed. If any of the numbers in... |
384,293 | def generalized_lsp_value_withtau(times, mags, errs, omega):
one_over_errs2 = 1.0/(errs*errs)
W = npsum(one_over_errs2)
wi = one_over_errs2/W
sin_omegat = npsin(omega*times)
cos_omegat = npcos(omega*times)
sin2_omegat = sin_omegat*sin_omegat
cos2_omegat = cos_omegat*cos_omegat
s... | Generalized LSP value for a single omega.
This uses tau to provide an arbitrary time-reference point.
The relations used are::
P(w) = (1/YY) * (YC*YC/CC + YS*YS/SS)
where: YC, YS, CC, and SS are all calculated at T
and where: tan 2omegaT = 2*CS/(CC - SS)
and where:
... |
384,294 | def load_configuration(conf_path):
with open(conf_path) as f:
conf_dict = yaml.load(f)
validate_config(conf_dict)
return conf_dict | Load and validate test configuration.
:param conf_path: path to YAML configuration file.
:return: configuration as dict. |
384,295 | def show_firmware_version_output_show_firmware_version_node_info_firmware_version_info_application_name(self, **kwargs):
config = ET.Element("config")
show_firmware_version = ET.Element("show_firmware_version")
config = show_firmware_version
output = ET.SubElement(show_firmware_... | Auto Generated Code |
384,296 | def get(self, requirement):
if isinstance(requirement, basestring):
requirement = Requirement.parse(requirement)
return sorted(p for p in self.packages
if requirement.name == p.name and requirement.match(p)) | Find packages matching ``requirement``.
:param requirement: Requirement to match against repository packages.
:type requirement: `str` or :class:`.Requirement`
:returns: :func:`list` of matching :class:`.Package` objects. |
384,297 | def format_op_row(ipFile, totLines, totWords, uniqueWords):
txt = os.path.basename(ipFile).ljust(36) +
txt += str(totLines).rjust(7) +
txt += str(totWords).rjust(7) +
txt += str(len(uniqueWords)).rjust(7) +
return txt | Format the output row with stats |
384,298 | def create(cls, cli, management_address,
local_username=None, local_password=None,
remote_username=None, remote_password=None,
connection_type=None):
req_body = cli.make_body(
managementAddress=management_address, localUsername=local_username,
... | Configures a remote system for remote replication.
:param cls: this class.
:param cli: the rest client.
:param management_address: the management IP address of the remote
system.
:param local_username: administrative username of local system.
:param local_password: a... |
384,299 | def _initialize_likelihood_prior(self, positions, log_likelihoods, log_priors):
func = SimpleCLFunction.from_string( + str(self._nmr_params) + , dependencies=[self._get_log_prior_cl_func(), self._get_log_likelihood_cl_func()])
kernel_data = {
: Array(positions, , mode=, ensure_zero... | Initialize the likelihood and the prior using the given positions.
This is a general method for computing the log likelihoods and log priors for given positions.
Subclasses can use this to instantiate secondary chains as well. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.