code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _read_all_from_socket(self, timeout): pkts = [] try: self._sock.settimeout(timeout) while True: p = self._sock.recv(64) pkts.append((bytearray(p), time.time())) self._sock.settimeout(0) except socket.timeout: ...
Read all packets we currently can on the socket. Returns list of tuples. Each tuple contains a packet and the time at which it was received. NOTE: The receive time is the time when our recv() call returned, which greatly depends on when it was called. The time is NOT the time at which t...
def insert_colorpoint(self, position=0.5, color1=[1.0,1.0,0.0], color2=[1.0,1.0,0.0]): L = self._colorpoint_list if position <= 0.0: L.insert(0,[0.0,color1,color2]) elif position >= 1.0: L.append([1.0,color1,color2]) else: for n in range(len(self._co...
Inserts the specified color into the list.
def _oxford_comma_separator(i, length): if length == 1: return None elif length < 3 and i == 0: return ' and ' elif i < length - 2: return ', ' elif i == length - 2: return ', and ' else: return None
Make a separator for a prose-like list with `,` between items except for `, and` after the second to last item.
def get_handler_name(route: Route, logic: Callable) -> str: if route.handler_name is not None: return route.handler_name if any(m for m in route.methods if m.method.lower() == 'post'): if route.heading != 'API': return '{}ListHandler'.format(get_valid_class_name(route.heading)) ...
Gets the handler name. :param route: A Route instance. :param logic: The logic function. :returns: A handler class name.
def key_func(*keys, **kwargs): ensure_argcount(keys, min_=1) ensure_keyword_args(kwargs, optional=('default',)) keys = list(map(ensure_string, keys)) if 'default' in kwargs: default = kwargs['default'] def getitems(obj): for key in keys: try: ...
Creates a "key function" based on given keys. Resulting function will perform lookup using specified keys, in order, on the object passed to it as an argument. For example, ``key_func('a', 'b')(foo)`` is equivalent to ``foo['a']['b']``. :param keys: Lookup keys :param default: Optional keyword arg...
def notification_preference(obj_type, profile): default_alert_value = True if not profile: alerts_on = True else: notifications = profile.get('notifications', {}) alerts_on = notifications.get(obj_type, default_alert_value) return dict(alerts_on=alerts_on, obj_type=obj_type)
Display two radio buttons for turning notifications on or off. The default value is is have alerts_on = True.
def interpolate(self, lon, lat, egy=None, interp_log=True): if self.data.ndim == 1: theta = np.pi / 2. - np.radians(lat) phi = np.radians(lon) return hp.pixelfunc.get_interp_val(self.counts, theta, phi, nest=self.hpx.nest) ...
Interpolate map values. Parameters ---------- interp_log : bool Interpolate the z-coordinate in logspace.
def getSampleFrequencies(self): return np.array([round(self.samplefrequency(chn)) for chn in np.arange(self.signals_in_file)])
Returns samplefrequencies of all signals. Parameters ---------- None Examples -------- >>> import pyedflib >>> f = pyedflib.data.test_generator() >>> all(f.getSampleFrequencies()==200.0) True >>> f._close() >>> del f
def _tokenize_latex(self, exp): tokens = [] prevexp = "" while exp: t, exp = self._get_next_token(exp) if t.strip() != "": tokens.append(t) if prevexp == exp: break prevexp = exp return tokens
Internal method to tokenize latex
def add_densities(density1, density2): return {spin: np.array(density1[spin]) + np.array(density2[spin]) for spin in density1.keys()}
Method to sum two densities. Args: density1: First density. density2: Second density. Returns: Dict of {spin: density}.
def register(self, target): for rule, options in self.url_rules: target.add_url_rule(rule, self.name, self.dispatch_request, **options)
Registers url_rules on the blueprint
def addCategory(self, categoryUri, weight): assert isinstance(weight, (float, int)), "weight value has to be a positive or negative integer" self.topicPage["categories"].append({"uri": categoryUri, "wgt": weight})
add a relevant category to the topic page @param categoryUri: uri of the category to be added @param weight: importance of the provided category (typically in range 1 - 50)
def format_vk(vk): for ext in get_extensions_filtered(vk): req = ext['require'] if not isinstance(req, list): ext['require'] = [req]
Format vk before using it
def add(self, path): with salt.utils.files.fopen(path, 'rb') as ifile: for chunk in iter(lambda: ifile.read(self.__buff), b''): self.__digest.update(chunk)
Update digest with the file content by path. :param path: :return:
def smart_object(self): if not hasattr(self, '_smart_object'): self._smart_object = SmartObject(self) return self._smart_object
Associated smart object. :return: :py:class:`~psd_tools.api.smart_object.SmartObject`.
def file2abspath(filename, this_file=__file__): return os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(this_file)), filename))
generate absolute path for the given file and base dir
def send_datagram(self, message): if not self.stopped.isSet(): host, port = message.destination logger.debug("send_datagram - " + str(message)) serializer = Serializer() message = serializer.serialize(message) self._socket.sendto(message, (host, port))
Send a message through the udp socket. :type message: Message :param message: the message to send
def assert_is_not(expected, actual, message=None, extra=None): assert expected is not actual, _assert_fail_message( message, expected, actual, "is", extra )
Raises an AssertionError if expected is actual.
def get_instance(self, payload): return TodayInstance(self._version, payload, account_sid=self._solution['account_sid'], )
Build an instance of TodayInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.usage.record.today.TodayInstance :rtype: twilio.rest.api.v2010.account.usage.record.today.TodayInstance
def shift_and_pad(tensor, shift, axis=0): shape = tensor.shape rank = len(shape) assert 0 <= abs(axis) < rank length = int(shape[axis]) assert 0 <= abs(shift) < length paddings = [(0, 0)] * rank begin = [0] * rank size = [-1] * rank if shift > 0: paddings[axis] = (shift, 0) size[axis] = length...
Shifts and pads with zero along an axis. Example: shift_and_pad([1, 2, 3, 4], 2) --> [0, 0, 1, 2] shift_and_pad([1, 2, 3, 4], -2) --> [3, 4, 0, 0] Args: tensor: Tensor; to be shifted and padded. shift: int; number of positions to shift by. axis: int; along which axis to shift and pad. Retu...
def _nix_env(): nixhome = os.path.join(os.path.expanduser('~{0}'.format(__opts__['user'])), '.nix-profile/bin/') return [os.path.join(nixhome, 'nix-env')]
nix-env with quiet option. By default, nix is extremely verbose and prints the build log of every package to stderr. This tells nix to only show changes.
def get_chromecasts(tries=None, retry_wait=None, timeout=None, blocking=True, callback=None): if blocking: hosts = discover_chromecasts() cc_list = [] for host in hosts: try: cc_list.append(_get_chromecast_from_host( host, t...
Searches the network for chromecast devices. If blocking = True, returns a list of discovered chromecast devices. If blocking = False, triggers a callback for each discovered chromecast, and returns a function which can be executed to stop discovery. May r...
def _run_bunny(args): main_file, json_file, project_name = _get_main_and_json(args.directory) work_dir = utils.safe_makedir(os.path.join(os.getcwd(), "bunny_work")) flags = ["-b", work_dir] log_file = os.path.join(work_dir, "%s-bunny.log" % project_name) if os.path.exists(work_dir): caches =...
Run CWL with rabix bunny.
def user_has_permission(self, user, name): targetRecord = AuthMembership.objects(creator=self.client, user=user).first() if not targetRecord: return False for group in targetRecord.groups: if self.has_permission(group.role, name): return True retur...
verify user has permission
def pairwise( iterable: Iterable, default_value: Any, ) -> Iterable[Tuple[Any, Any]]: a, b = tee(iterable) _ = next(b, default_value) return zip_longest(a, b, fillvalue=default_value)
Return pairs of items from `iterable`. pairwise([1, 2, 3], default_value=None) -> (1, 2) (2, 3), (3, None)
def load_bookmark(self, slot_num): bookmarks = CONF.get('editor', 'bookmarks') if slot_num in bookmarks: filename, line_num, column = bookmarks[slot_num] else: return if not osp.isfile(filename): self.last_edit_cursor_pos = None ret...
Set cursor to bookmarked file and position.
def encode(self, value): if value is None and self._default is not None: value = self._default for encoder in self._encoders: try: return encoder(value) except ValueError as ex: pass raise ValueError('Value \'{}\' is invalid. {}...
The encoder for this schema. Tries each encoder in order of the types specified for this schema.
def work(self, interval=5): self._setproctitle("Starting") logger.info("starting") self.startup() while True: if self._shutdown: logger.info('shutdown scheduled') break self.register_worker() job = self.reserve(interval)...
Invoked by ``run`` method. ``work`` listens on a list of queues and sleeps for ``interval`` time. ``interval`` -- Number of seconds the worker will wait until processing the next job. Default is "5". Whenever a worker finds a job on the queue it first calls ``reserve`` on that job to m...
def getSignificance(wk1, wk2, nout, ofac): expy = exp(-wk2) effm = 2.0*(nout)/ofac sig = effm*expy ind = (sig > 0.01).nonzero() sig[ind] = 1.0-(1.0-expy[ind])**effm return sig
returns the peak false alarm probabilities Hence the lower is the probability and the more significant is the peak
def xray(im, direction='X'): r im = sp.array(~im, dtype=int) if direction in ['Y', 'y']: im = sp.transpose(im, axes=[1, 0, 2]) if direction in ['Z', 'z']: im = sp.transpose(im, axes=[2, 1, 0]) im = sp.sum(im, axis=0) return im
r""" Simulates an X-ray radiograph looking through the porouls material in the specfied direction. The resulting image is colored according to the amount of attenuation an X-ray would experience, so regions with more solid will appear darker. Parameters ---------- im : array_like N...
def get(self): header = '' while len(header) < self.HEADER_LENGTH: chunk = self._sock.recv(self.HEADER_LENGTH - len(header)) chunk = chunk.decode() if self._encode else chunk if chunk == '': return None header += chunk length = int(...
Receive a message. Return the message upon successful reception, or None upon failure.
def yticksize(self, size, index=1): self.layout['yaxis' + str(index)]['tickfont']['size'] = size return self
Set the tick font size. Parameters ---------- size : int Returns ------- Chart
def delete_webhook(webhook_id): webhook = get_data_or_404('webhook', webhook_id) action = get_data_or_404('action', webhook['action_id']) project = get_data_or_404('project', action['project_id']) if project['owner_id'] != get_current_user_id(): return jsonify(message='forbidden'), 403 delet...
Delete webhook.
def upsert(self, _id, dct, attribute="_id"): mongo_response = yield self.update(_id, dct, upsert=True, attribute=attribute) raise Return(mongo_response)
Update or Insert a new document :param str _id: The document id :param dict dct: The dictionary to set on the document :param str attribute: The attribute to query for to find the object to set this data on :returns: JSON Mongo client response including the "n" key to show number of obj...
def uniform_cost(problem, graph_search=False, viewer=None): return _search(problem, BoundedPriorityQueue(), graph_search=graph_search, node_factory=SearchNodeCostOrdered, graph_replace_when_better=True, viewer=viewer)
Uniform cost search. If graph_search=True, will avoid exploring repeated states. Requires: SearchProblem.actions, SearchProblem.result, SearchProblem.is_goal, and SearchProblem.cost.
def f1_score(y_true, y_pred, average='micro', suffix=False): true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_pred = len(pred_entities) nb_true = len(true_entities) p = nb_correct / nb_pred if n...
Compute the F1 score. The F1 score can be interpreted as a weighted average of the precision and recall, where an F1 score reaches its best value at 1 and worst score at 0. The relative contribution of precision and recall to the F1 score are equal. The formula for the F1 score is:: F1 = 2 * (...
def _initialize_table(self, column_count): header = [''] * column_count alignment = [self.default_alignment] * column_count width = [0] * column_count padding = [self.default_padding] * column_count self._column_count = column_count self._column_headers = HeaderData(self,...
Sets the column count of the table. This method is called to set the number of columns for the first time. Parameters ---------- column_count : int number of columns in the table
def get_random_label(): return ''.join(random.choice(string.ascii_uppercase + string.digits) \ for _ in range(15))
Get a random label string to use when clustering jobs.
def contains(self, rect): return (rect.y >= self.y and \ rect.x >= self.x and \ rect.y+rect.height <= self.y+self.height and \ rect.x+rect.width <= self.x+self.width)
Tests if another rectangle is contained by this one Arguments: rect (Rectangle): The other rectangle Returns: bool: True if it is container, False otherwise
def typedefs( self, name=None, function=None, header_dir=None, header_file=None, recursive=None, allow_empty=None): return ( self._find_multiple( self._impl_matchers[scopedef_t.typedef], ...
returns a set of typedef declarations, that are matched defined criteria
def mustExposeRequest(self, service_request): expose_request = service_request.service.mustExposeRequest(service_request) if expose_request is None: if self.expose_request is None: return False return self.expose_request return expose_request
Decides whether the underlying http request should be exposed as the first argument to the method call. This is granular, looking at the service method first, then at the service level and finally checking the gateway. @rtype: C{bool}
def sample(self, num): if num > len(self): return self.copy() elif num < 0: raise IndexError("Cannot sample a negative number of rows " "from a DataTable") random_row_mask = ([True] * num) + ([False] * (len(self) - num)) shuffle(random...
Returns a new table with rows randomly sampled. We create a mask with `num` True bools, and fill it with False bools until it is the length of the table. We shuffle it, and apply that mask to the table.
def _GetClientLibCallback(args, client_func=_GetClientLib): client_paths = client_func( args.service, args.language, args.output, args.build_system, hostname=args.hostname, application_path=args.application) for client_path in client_paths: print 'API client library written to %s' % client_path
Generate discovery docs and client libraries to files. Args: args: An argparse.Namespace object to extract parameters from. client_func: A function that generates client libraries and stores them to files, accepting a list of service names, a client library language, an output directory, a build ...
def _get_magnitude_term(self, C, mag): f_mag = C["c0"] + C["c1"] * mag if (mag > 4.5) and (mag <= 5.5): return f_mag + (C["c2"] * (mag - 4.5)) elif (mag > 5.5) and (mag <= 6.5): return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) elif mag > 6.5: ...
Returns the magnitude scaling term defined in equation 2
def oauth_manager(self, oauth_manager): @self.app.before_request def before_request(): endpoint = request.endpoint resource = self.app.view_functions[endpoint].view_class if not getattr(resource, 'disable_oauth'): scopes = request.args.get('scopes') ...
Use the oauth manager to enable oauth for API :param oauth_manager: the oauth manager
def _parse_ergodic_cutoff(self): ec_is_str = isinstance(self.ergodic_cutoff, str) if ec_is_str and self.ergodic_cutoff.lower() == 'on': if self.sliding_window: return 1.0 / self.lag_time else: return 1.0 elif ec_is_str and self.ergodic_cuto...
Get a numeric value from the ergodic_cutoff input, which can be 'on' or 'off'.
def checkFuelPosition(obs, agent_host): for i in range(1,39): key = 'InventorySlot_'+str(i)+'_item' if key in obs: item = obs[key] if item == 'coal': agent_host.sendCommand("swapInventoryItems 0 " + str(i)) return
Make sure our coal, if we have any, is in slot 0.
def calc_file_md5(filepath, chunk_size=None): if chunk_size is None: chunk_size = 256 * 1024 md5sum = hashlib.md5() with io.open(filepath, 'r+b') as f: datachunk = f.read(chunk_size) while datachunk is not None and len(datachunk) > 0: md5sum.update(datachunk) ...
Calculate a file's md5 checksum. Use the specified chunk_size for IO or the default 256KB :param filepath: :param chunk_size: :return:
async def get_kernel_options(cls) -> typing.Optional[str]: data = await cls.get_config("kernel_opts") return None if data is None or data == "" else data
Kernel options. Boot parameters to pass to the kernel by default.
def print_modules(self): shutit_global.shutit_global_object.yield_to_draw() cfg = self.cfg module_string = '' module_string += 'Modules: \n' module_string += ' Run order Build Remove Module ID\n' for module_id in self.module_ids(): module_string += ' ' + str(self.shutit_map[module_id].run_...
Returns a string table representing the modules in the ShutIt module map.
def create_connection_model(service): services = service._services bases = (BaseModel,) attributes = {model_service_name(service): fields.CharField() for service in services} return type(BaseModel)(connection_service_name(service), bases, attributes)
Create an SQL Alchemy table that connects the provides services
def clear_genus_type(self): if (self.get_genus_type_metadata().is_read_only() or self.get_genus_type_metadata().is_required()): raise errors.NoAccess() self._my_map['genusTypeId'] = self._genus_type_default
Clears the genus type. raise: NoAccess - ``Metadata.isRequired()`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def install_caller_instruction(self, token_type="Unrestricted", transaction_id=None): response = self.install_payment_instruction("MyRole=='Caller';", token_type=token_type, ...
Set us up as a caller This will install a new caller_token into the FPS section. This should really only be called to regenerate the caller token.
def get_values(self, attr_name): ret = list(self._attr_value_cdist[attr_name].keys()) \ + list(self._attr_value_counts[attr_name].keys()) \ + list(self._branches.keys()) ret = set(ret) return ret
Retrieves the unique set of values seen for the given attribute at this node.
def size(self): width = c_double(0) height = c_double(0) rc = self._libinput.libinput_device_get_size( self._handle, byref(width), byref(height)) assert rc == 0, 'This device does not provide size information' return width.value, height.value
The physical size of a device in mm, where meaningful. This property is only valid on devices with the required data, i.e. tablets, touchpads and touchscreens. For other devices this property raises :exc:`AssertionError`. Returns: (float, float): (Width, Height) in mm. Raises: AssertionError
def add_dataset(data_type, val, unit_id=None, metadata={}, name="", user_id=None, flush=False): d = Dataset() d.type = data_type d.value = val d.set_metadata(metadata) d.unit_id = unit_id d.name = name d.created_by = user_id d.hash = d.set_hash() try: existing_dataset = d...
Data can exist without scenarios. This is the mechanism whereby single pieces of data can be added without doing it through a scenario. A typical use of this would be for setting default values on types.
def _copy_binder_notebooks(app): gallery_conf = app.config.sphinx_gallery_conf gallery_dirs = gallery_conf.get('gallery_dirs') binder_conf = gallery_conf.get('binder') notebooks_dir = os.path.join(app.outdir, binder_conf.get('notebooks_dir')) shutil.rmtree(notebooks_dir, ignore_errors=True) os.m...
Copy Jupyter notebooks to the binder notebooks directory. Copy each output gallery directory structure but only including the Jupyter notebook files.
def on(self, state): self._on = state cmd = self.command_set.off() if state: cmd = self.command_set.on() self.send(cmd)
Turn on or off. :param state: True (on) or False (off).
def destroyCommit(self, varBind, **context): name, val = varBind (debug.logger & debug.FLAG_INS and debug.logger('%s: destroyCommit(%s, %r)' % (self, name, val))) instances = context['instances'].setdefault(self.name, {self.ST_CREATE: {}, self.ST_DESTROY: {}}) idx = context['idx...
Destroy Managed Object Instance. Implements the second of the multi-step workflow similar to the SNMP SET command processing (:RFC:`1905#section-4.2.5`). The goal of the second phase is to actually remove requested Managed Object Instance from the MIB tree. When multiple Managed Object...
def get_hessian(self): force_const = self.fields.get("Cartesian Force Constants") if force_const is None: return None N = len(self.molecule.numbers) result = np.zeros((3*N, 3*N), float) counter = 0 for row in range(3*N): result[row, :row+1] = force...
Return the hessian
def _build_params_from_kwargs(self, **kwargs): api_methods = self.get_api_params() required_methods = self.get_api_required_params() ret_kwargs = {} for key, val in kwargs.items(): if key not in api_methods: warnings.warn( 'Passed uknown pa...
Builds parameters from passed arguments Search passed parameters in available methods, prepend specified API key, and return dictionary which can be sent directly to API server. :param kwargs: :type param: dict :raises ValueError: If type of specified parameter doesn't...
def beginning_of_history(self): u self.history_cursor = 0 if len(self.history) > 0: self.l_buffer = self.history[0]
u'''Move to the first line in the history.
def _validate_sleep(minutes): if isinstance(minutes, six.string_types): if minutes.lower() in ['never', 'off']: return 'Never' else: msg = 'Invalid String Value for Minutes.\n' \ 'String values must be "Never" or "Off".\n' \ 'Passed: {0}'.f...
Helper function that validates the minutes parameter. Can be any number between 1 and 180. Can also be the string values "Never" and "Off". Because "On" and "Off" get converted to boolean values on the command line it will error if "On" is passed Returns: The value to be passed to the command
def delete_group_cached(group_id, broker=None): if not broker: broker = get_broker() group_key = '{}:{}:keys'.format(broker.list_key, group_id) group_list = broker.cache.get(group_key) broker.cache.delete_many(group_list) broker.cache.delete(group_key)
Delete a group from the cache backend
def LoadServerCertificate(self, server_certificate=None, ca_certificate=None): try: server_certificate.Verify(ca_certificate.GetPublicKey()) except rdf_crypto.VerificationError as e: self.server_name = None raise IOError("Server cert is invalid: %s" % e) server_cert_serial = server_certifi...
Loads and verifies the server certificate.
def IsDirectory(self): if self._stat_object is None: self._stat_object = self._GetStat() if self._stat_object is not None: self.entry_type = self._stat_object.type return self.entry_type == definitions.FILE_ENTRY_TYPE_DIRECTORY
Determines if the file entry is a directory. Returns: bool: True if the file entry is a directory.
def base_id(self): if self._base_id is not None: return self._base_id self.send(Packet(PACKET.COMMON_COMMAND, data=[0x08])) for i in range(0, 10): try: packet = self.receive.get(block=True, timeout=0.1) if packet.packet_type == PACKET.RESPO...
Fetches Base ID from the transmitter, if required. Otherwise returns the currently set Base ID.
def writer(f): return unicodecsv.writer(f, encoding='utf-8', delimiter=b',', quotechar=b'"')
CSV writer factory for CADA format
def get_day_of_month(datestring): get_day = re.compile(r"\d{1,2}(st|nd|rd|th)?", re.IGNORECASE) day = get_day.search(datestring) the_day = None if day: if bool(re.search(r"[st|nd|rd|th]", day.group().lower())): the_day = day.group()[:-2] else: the_day = day.group(...
Transforms an ordinal number into plain number with padding zero. E.g. 3rd -> 03, or 12th -> 12 Keyword arguments: datestring -- a string Returns: String, or None if the transformation fails
def initialize(**kwargs): global config config_opts = kwargs.setdefault('config',{}) if isinstance(config_opts,basestring): config_opts = {'config_filename':config_opts} kwargs['config'] = config_opts if 'environment' in kwargs: config_opts['environment'] = kwargs['environment'] ...
Loads the globally shared YAML configuration
def get_asset_content_lookup_session_for_repository(self, repository_id=None): return AssetContentLookupSession( self._provider_manager.get_asset_content_lookup_session_for_repository(repository_id), self._config_map)
Gets the ``OsidSession`` associated with the asset content lookup service for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository return: (osid.repository.AssetLookupSession) - the new ``AssetLookupSession`` raise: NotFound - ``reposit...
def get_non_magic_cols(self): table_dm = self.data_model.dm[self.dtype] approved_cols = table_dm.index unrecognized_cols = (set(self.df.columns) - set(approved_cols)) return unrecognized_cols
Find all columns in self.df that are not real MagIC 3 columns. Returns -------- unrecognized_cols : list
def _process_dimension_kwargs(direction, kwargs): acceptable_keys = ['unit', 'pad', 'lim', 'label'] processed_kwargs = {} for k,v in kwargs.items(): if k.startswith(direction): processed_key = k.lstrip(direction) else: processed_key = k if processed_key in acc...
process kwargs for AxDimension instances by stripping off the prefix for the appropriate direction
def execute(self, operation, parameters=None, job_id=None): self._query_data = None self._query_job = None client = self.connection._client formatted_operation = _format_operation(operation, parameters=parameters) query_parameters = _helpers.to_query_parameters(parameters) ...
Prepare and execute a database operation. .. note:: When setting query parameters, values which are "text" (``unicode`` in Python2, ``str`` in Python3) will use the 'STRING' BigQuery type. Values which are "bytes" (``str`` in Python2, ``bytes`` in Python3), will ...
def MGMT_ANNOUNCE_BEGIN(self, sAddr, xCommissionerSessionId, listChannelMask, xCount, xPeriod): print '%s call MGMT_ANNOUNCE_BEGIN' % self.port channelMask = '' channelMask = self.__ChannelMaskListToStr(listChannelMask) try: cmd = WPANCTL_CMD + 'commissioner announce-begin %s...
send MGMT_ANNOUNCE_BEGIN message to a given destination Returns: True: successful to send MGMT_ANNOUNCE_BEGIN message. False: fail to send MGMT_ANNOUNCE_BEGIN message.
def handle_joined(self, connection, event): nicknames = [s.lstrip("@+") for s in event.arguments()[-1].split()] for nickname in nicknames: self.joined[nickname] = datetime.now()
Store join times for current nicknames when we first join.
def describe_snapshots(self, *snapshot_ids): snapshot_set = {} for pos, snapshot_id in enumerate(snapshot_ids): snapshot_set["SnapshotId.%d" % (pos + 1)] = snapshot_id query = self.query_factory( action="DescribeSnapshots", creds=self.creds, endpoint=self.endp...
Describe available snapshots. TODO: ownerSet, restorableBySet
def upgradedb(options): version = options.get('version') if version in ['1.1', '1.2']: sh("python manage.py migrate maps 0001 --fake") sh("python manage.py migrate avatar 0001 --fake") elif version is None: print "Please specify your GeoNode version" else: print "Upgrades...
Add 'fake' data migrations for existing tables from legacy GeoNode versions
def answer_shipping_query(self, shipping_query_id, ok, shipping_options=None, error_message=None): from pytgbot.api_types.sendable.payments import ShippingOption assert_type_or_raise(shipping_query_id, unicode_type, parameter_name="shipping_query_id") assert_type_or_raise(ok, bool, parameter_nam...
If you sent an invoice requesting a shipping address and the parameter is_flexible was specified, the Bot API will send an Update with a shipping_query field to the bot. Use this method to reply to shipping queries. On success, True is returned. https://core.telegram.org/bots/api#answershippingquery ...
def vars_args(parser): parser.add_argument('--extra-vars', dest='extra_vars', help='Extra template variables', default=[], type=str, action='append') parser.add_argument('--extra-vars-file', ...
Add various command line options for external vars
async def get_power_parameters(self): data = await self._handler.power_parameters(system_id=self.system_id) return data
Get the power paramters for this node.
def __PrintAdditionalImports(self, imports): google_imports = [x for x in imports if 'google' in x] other_imports = [x for x in imports if 'google' not in x] if other_imports: for import_ in sorted(other_imports): self.__printer(import_) self.__printer() ...
Print additional imports needed for protorpc.
def collectLocations(self): pts = [] for l, (value, deltaName) in self.items(): pts.append(Location(l)) return pts
Return a dictionary with all objects.
def createbranch(self, project_id, branch, ref): data = {"id": project_id, "branch_name": branch, "ref": ref} request = requests.post( '{0}/{1}/repository/branches'.format(self.projects_url, project_id), headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, tim...
Create branch from commit SHA or existing branch :param project_id: The ID of a project :param branch: The name of the branch :param ref: Create branch from commit SHA or existing branch :return: True if success, False if not
def _get_roles_for_request(request, application): roles = application.get_roles_for_person(request.user) if common.is_admin(request): roles.add("is_admin") roles.add('is_authorised') return roles
Check the authentication of the current user.
def msw(self): return (t for t, c in self.tcmap().items() if len(c) > 1)
Return a generator of tokens with more than one sense.
def make_executable(script_path): status = os.stat(script_path) os.chmod(script_path, status.st_mode | stat.S_IEXEC)
Make `script_path` executable. :param script_path: The file to change
def do(cmdline, runas=None, env=None): if not cmdline: raise SaltInvocationError('Command must be specified') path = _rbenv_path(runas) if not env: env = {} env[str('PATH')] = salt.utils.stringutils.to_str( os.pathsep.join(( salt.utils.path.join(path, 'shims'), ...
Execute a ruby command with rbenv's shims from the user or the system CLI Example: .. code-block:: bash salt '*' rbenv.do 'gem list bundler' salt '*' rbenv.do 'gem list bundler' deploy
def _WsdlHasMethod(self, method_name): return method_name in self.suds_client.wsdl.services[0].ports[0].methods
Determine if the wsdl contains a method. Args: method_name: The name of the method to search. Returns: True if the method is in the WSDL, otherwise False.
def adjust_white_for_scc(cls, rgb_p, rgb_b, rgb_w, p): p_rgb = rgb_p / rgb_b rgb_w = rgb_w * (((1 - p) * p_rgb + (1 + p) / p_rgb) ** 0.5) / (((1 + p) * p_rgb + (1 - p) / p_rgb) ** 0.5) return rgb_w
Adjust the white point for simultaneous chromatic contrast. :param rgb_p: Cone signals of proxima field. :param rgb_b: Cone signals of background. :param rgb_w: Cone signals of reference white. :param p: Simultaneous contrast/assimilation parameter. :return: Adjusted cone signal...
def ConvertToTemplate(server,template,password=None,alias=None): if alias is None: alias = clc.v1.Account.GetAlias() if password is None: password = clc.v1.Server.GetCredentials([server,],alias)[0]['Password'] r = clc.v1.API.Call('post','Server/ConvertServerToTemplate', { 'AccountAlias': ...
Converts an existing server into a template. http://www.centurylinkcloud.com/api-docs/v1/#server-convert-server-to-template :param server: source server to convert :param template: name of destination template :param password: source server password (optional - will lookup password if None) :param alias: sh...
def run(self): "Run each middleware function on files" files = self.get_files() for func in self.middleware: func(files, self) self.files.update(files) return files
Run each middleware function on files
def save(self, out, kind=None, **kw): writers.save(self.matrix, self._version, out, kind, **kw)
\ Serializes the QR Code in one of the supported formats. The serialization format depends on the filename extension. **Common keywords** ========== ============================================================== Name Description ========== ==============...
def _macaroons_for_domain(cookies, domain): req = urllib.request.Request('https://' + domain + '/') cookies.add_cookie_header(req) return httpbakery.extract_macaroons(req)
Return any macaroons from the given cookie jar that apply to the given domain name.
def cuts_outside(self): for index in self.cut_site: if index < 0 or index > len(self.recognition_site) + 1: return True return False
Report whether the enzyme cuts outside its recognition site. Cutting at the very end of the site returns True. :returns: Whether the enzyme will cut outside its recognition site. :rtype: bool
def get_example(config, exam_lex): if config.BOOLEAN_STATES[config.config.get('Layout', 'examples')]: return Window( content=BufferControl( buffer_name="examples", lexer=exam_lex)) return get_empty()
example description window
def get_gene2aart(gene2section2gos, sec2chr): geneid2str = {} for geneid, section2gos_gene in gene2section2gos.items(): letters = [abc if s in section2gos_gene else "." for s, abc in sec2chr.items()] geneid2str[geneid] = "".join(letters) return geneid2str
Return a string for each gene representing GO section membership.
def extended_sys_state_send(self, vtol_state, landed_state, force_mavlink1=False): return self.send(self.extended_sys_state_encode(vtol_state, landed_state), force_mavlink1=force_mavlink1)
Provides state for additional features vtol_state : The VTOL state if applicable. Is set to MAV_VTOL_STATE_UNDEFINED if UAV is not in VTOL configuration. (uint8_t) landed_state : The landed state. Is set to MAV_LANDED_STATE_UNDEFINED if landed state is unknow...
def _verify(self, rj, token): keys = self.key_jar.get_jwt_verify_keys(rj.jwt) return rj.verify_compact(token, keys)
Verify a signed JSON Web Token :param rj: A :py:class:`cryptojwt.jws.JWS` instance :param token: The signed JSON Web Token :return: A verified message
def _trim_zeros_complex(str_complexes, na_rep='NaN'): def separate_and_trim(str_complex, na_rep): num_arr = str_complex.split('+') return (_trim_zeros_float([num_arr[0]], na_rep) + ['+'] + _trim_zeros_float([num_arr[1][:-1]], na_rep) + ['j']) retur...
Separates the real and imaginary parts from the complex number, and executes the _trim_zeros_float method on each of those.