code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def resize(self): resized_size = self.get_resized_size() if not resized_size: return self.image = self.image.resize(resized_size, Image.ANTIALIAS)
Get target size for a cropped image and do the resizing if we got anything usable.
def from_json(self, json_string): obj = json.loads(json_string) self.from_dict(obj['groups'], obj['data']) return self
Override current FlatTable using data from json. :param json_string: JSON String :type json_string: str
def previous_obj(self): previous_obj = None if self.previous_visit: try: previous_obj = self.model.objects.get( **{f"{self.model.visit_model_attr()}": self.previous_visit} ) except ObjectDoesNotExist: pass return previous_obj
Returns a model obj that is the first occurrence of a previous obj relative to this object's appointment. Override this method if not am EDC subject model / CRF.
def _read_openephys(openephys_file): root = ElementTree.parse(openephys_file).getroot() channels = [] for recording in root: s_freq = float(recording.attrib['samplerate']) for processor in recording: for channel in processor: channels.append(channel.attrib) return s_freq, channels
Read the channel labels and their respective files from the 'Continuous_Data.openephys' file Parameters ---------- openephys_file : Path path to Continuous_Data.openephys inside the open-ephys folder Returns ------- int sampling frequency list of dict list of channels containing the label, the filename and the gain
def remove(item): if os.path.isdir(item): shutil.rmtree(item) else: os.remove(item)
Delete item, whether it's a file, a folder, or a folder full of other files and folders.
def create_roots(self, yam): self.local_grammar = SchemaNode("grammar") self.local_grammar.attr = { "ns": yam.search_one("namespace").arg, "nma:module": self.module.arg} src_text = "YANG module '%s'" % yam.arg revs = yam.search("revision") if len(revs) > 0: src_text += " revision %s" % self.current_revision(revs) self.dc_element(self.local_grammar, "source", src_text) start = SchemaNode("start", self.local_grammar) self.data = SchemaNode("nma:data", start, interleave=True) self.data.occur = 2 self.rpcs = SchemaNode("nma:rpcs", start, interleave=False) self.notifications = SchemaNode("nma:notifications", start, interleave=False)
Create the top-level structure for module `yam`.
def fill(image, mask=None, iterations=1): global fill_table if mask is None: masked_image = image else: masked_image = image.astype(bool).copy() masked_image[~mask] = True result = table_lookup(masked_image, fill_table, True, iterations) if not mask is None: result[~mask] = image[~mask] return result
Fill isolated black pixels 1 1 1 1 1 1 1 0 1 -> 1 1 1 1 1 1 1 1 1
def authenticate(self, username, password): url = URLS['token'] data = { "grant_type": "password", "client_id": self.client_id, "client_secret": self.client_secret, "username": username, "password": password } r = requests.post(url, data=data) r.raise_for_status() j = r.json() self.access_token = j['access_token'] self.refresh_token = j['refresh_token'] self._set_token_expiration_time(expires_in=j['expires_in']) return r
Uses a Smappee username and password to request an access token, refresh token and expiry date. Parameters ---------- username : str password : str Returns ------- requests.Response access token is saved in self.access_token refresh token is saved in self.refresh_token expiration time is set in self.token_expiration_time as datetime.datetime
def drain(self): if self.debug: sys.stderr.write("%s: DRAIN INPUT (%i bytes waiting)\n" %( self.__class__.__name__, self.ser.inWaiting() )) old_timeout = self.ser.timeout self.ser.timeout = 0.1 data = self.ser.read(1) while len(data): if self.debug: sys.stderr.write("%s: DRAINED 0x%x (%c)\n" %(self.__class__.__name__, ord(data[0]), data[0])) data = self.ser.read(1) self.ser.timeout = old_timeout return True
Drain input.
def encode(self, s): if s.endswith(".mp3"): out_filepath = s[:-4] + ".wav" call([ "sox", "--guard", s, "-r", "16k", "-b", "16", "-c", "1", out_filepath ]) s = out_filepath elif not s.endswith(".wav"): out_filepath = s + ".wav" if not os.path.exists(out_filepath): call(["sox", "-r", "16k", "-b", "16", "-c", "1", s, out_filepath]) s = out_filepath rate, data = wavfile.read(s) assert rate == self._sample_rate assert len(data.shape) == 1 if data.dtype not in [np.float32, np.float64]: data = data.astype(np.float32) / np.iinfo(data.dtype).max return data.tolist()
Transform a string with a filename into a list of float32. Args: s: path to the file with a waveform. Returns: samples: list of int16s
def run_slurm(self, steps=None, **kwargs): params = self.extra_slurm_params params.update(kwargs) if 'time' not in params: params['time'] = self.default_time if 'job_name' not in params: params['job_name'] = self.job_name if 'email' not in params: params['email'] = None if 'dependency' not in params: params['dependency'] = 'singleton' self.slurm_job = LoggedJobSLURM(self.command(steps), base_dir = self.parent.p.logs_dir, modules = self.modules, **params) return self.slurm_job.run()
Run the steps via the SLURM queue.
def get_numeric_features_to_observed_range(examples): observed_features = collections.defaultdict(list) for example in examples: for feature_name in get_numeric_feature_names(example): original_feature = parse_original_feature_from_example( example, feature_name) observed_features[feature_name].extend(original_feature.original_value) return { feature_name: { 'observedMin': min(feature_values), 'observedMax': max(feature_values), } for feature_name, feature_values in iteritems(observed_features) }
Returns numerical features and their observed ranges. Args: examples: Examples to read to get ranges. Returns: A dict mapping feature_name -> {'observedMin': 'observedMax': } dicts, with a key for each numerical feature.
def collides(self,position,size): word_rect = pygame.Rect(position,self.word_size) if word_rect.collidelistall(self.used_pos) == []: return False else: return True
Returns True if the word collides with another plotted word.
def setup_client(self, client_id=None, user_data=None, scan=True, broadcast=False): if client_id is None: client_id = str(uuid.uuid4()) if client_id in self._clients: raise ArgumentError("Duplicate client_id: {}".format(client_id)) async def _client_callback(conn_string, _, event_name, event): event_tuple = (conn_string, event_name, event) await self._forward_client_event(client_id, event_tuple) client_monitor = self.adapter.register_monitor([], [], _client_callback) self._clients[client_id] = dict(user_data=user_data, connections={}, monitor=client_monitor) self._adjust_global_events(client_id, scan, broadcast) return client_id
Setup a newly connected client. ``client_id`` must be unique among all connected clients. If it is passed as None, a random client_id will be generated as a string and returned. This method reserves internal resources for tracking what devices this client has connected to and installs a monitor into the adapter on behalf of the client. It should be called whenever a new client connects to the device server before any other activities by that client are allowed. By default, all clients start receiving ``device_seen`` events but if you want your client to also receive broadcast events, you can pass broadcast=True. Args: client_id (str): A unique identifier for this client that will be used to refer to it in all future interactions. If this is None, then a random string will be generated for the client_id. user_data (object): An arbitrary object that you would like to store with this client and will be passed to your event handler when events are forwarded to this client. scan (bool): Whether to install a monitor to listen for device_found events. broadcast (bool): Whether to install a monitor to list for broadcast events. Returns: str: The client_id. If a client id was passed in, it will be the same as what was passed in. If no client id was passed in then it will be a random unique string.
def read_perseus(f): df = pd.read_csv(f, delimiter='\t', header=[0,1,2,3], low_memory=False) df.columns = pd.MultiIndex.from_tuples([(x,) for x in df.columns.get_level_values(0)]) return df
Load a Perseus processed data table :param f: Source file :return: Pandas dataframe of imported data
def DefaultSelector(sock): "Return the best selector for the platform" global _DEFAULT_SELECTOR if _DEFAULT_SELECTOR is None: if has_selector('poll'): _DEFAULT_SELECTOR = PollSelector elif hasattr(select, 'select'): _DEFAULT_SELECTOR = SelectSelector else: raise RedisError('Platform does not support any selectors') return _DEFAULT_SELECTOR(sock)
Return the best selector for the platform
def get_certificate(): if os.path.exists(CERT_PATH): log('Reading ovs certificate from {}'.format(CERT_PATH)) with open(CERT_PATH, 'r') as cert: full_cert = cert.read() begin_marker = "-----BEGIN CERTIFICATE-----" end_marker = "-----END CERTIFICATE-----" begin_index = full_cert.find(begin_marker) end_index = full_cert.rfind(end_marker) if end_index == -1 or begin_index == -1: raise RuntimeError("Certificate does not contain valid begin" " and end markers.") full_cert = full_cert[begin_index:(end_index + len(end_marker))] return full_cert else: log('Certificate not found', level=WARNING) return None
Read openvswitch certificate from disk
def updateCheckedText(self): if not self.isCheckable(): return indexes = self.checkedIndexes() items = self.checkedItems() if len(items) < 2 or self.separator(): self.lineEdit().setText(self.separator().join(items)) else: self.lineEdit().setText('{0} items selected'.format(len(items))) if not self.signalsBlocked(): self.checkedItemsChanged.emit(items) self.checkedIndexesChanged.emit(indexes)
Updates the text in the editor to reflect the latest state.
def verify(password, hash): _, algorithm, cost, salt, password_hash = hash.split("$") password = pbkdf2.pbkdf2_hex(password, salt, int(cost) * 500) return _safe_str_cmp(password, password_hash)
Verify a password against a passed hash
def get_urls(self): not_clone_url = [url(r'^(.+)/will_not_clone/$', admin.site.admin_view(self.will_not_clone))] restore_url = [ url(r'^(.+)/restore/$', admin.site.admin_view(self.restore))] return not_clone_url + restore_url + super(VersionedAdmin, self).get_urls()
Appends the custom will_not_clone url to the admin site
def finder_for_path(path): result = None pkgutil.get_importer(path) loader = sys.path_importer_cache.get(path) finder = _finder_registry.get(type(loader)) if finder: module = _dummy_module module.__file__ = os.path.join(path, '') module.__loader__ = loader result = finder(module) return result
Return a resource finder for a path, which should represent a container. :param path: The path. :return: A :class:`ResourceFinder` instance for the path.
def optional_else(self, node, last): if node.orelse: min_first_max_last(node, node.orelse[-1]) if 'else' in self.operators: position = (node.orelse[0].first_line, node.orelse[0].first_col) _, efirst = self.operators['else'].find_previous(position) if efirst and efirst > last: elast, _ = self.operators[':'].find_previous(position) node.op_pos.append(NodeWithPosition(elast, efirst))
Create op_pos for optional else
def onNicknameChange( self, mid=None, author_id=None, changed_for=None, new_nickname=None, thread_id=None, thread_type=ThreadType.USER, ts=None, metadata=None, msg=None, ): log.info( "Nickname change from {} in {} ({}) for {}: {}".format( author_id, thread_id, thread_type.name, changed_for, new_nickname ) )
Called when the client is listening, and somebody changes the nickname of a person :param mid: The action ID :param author_id: The ID of the person who changed the nickname :param changed_for: The ID of the person whom got their nickname changed :param new_nickname: The new nickname :param thread_id: Thread ID that the action was sent to. See :ref:`intro_threads` :param thread_type: Type of thread that the action was sent to. See :ref:`intro_threads` :param ts: A timestamp of the action :param metadata: Extra metadata about the action :param msg: A full set of the data recieved :type thread_type: models.ThreadType
def on_iteration(self): self._cancel_consumers_if_requested() if len(self._consumers) == 0: _log.debug('requesting stop after iteration') self.should_stop = True
Kombu callback for each `drain_events` loop iteration.
def _len_frame(obj): c = getattr(obj, 'f_code', None) if c: n = _len_code(c) else: n = 0 return n
Length of a frame object.
def load_manifest(data): if isinstance(data, dict): return data doc = yaml.safe_load(data) if not isinstance(doc, dict): raise Exception("Manifest didn't result in dict.") return doc
Helper for loading a manifest yaml doc.
def get_auth_info(self): try: response = self.session.get(self.auth_root, headers={ 'Accept': 'application/protobuf' }) message = web_pb2.AuthInfo() message.ParseFromString(response.content) return AuthInfo(message) except requests.exceptions.ConnectionError: raise ConnectionFailure('Connection to {} refused'.format(self.address))
Returns general authentication information. This operation does not require authenticating and is useful to test if a server requires authentication or not. :rtype: .AuthInfo
def _year_expand(s): regex = r"^((?:19|20)\d{2})?(\s*-\s*)?((?:19|20)\d{2})?$" try: start, dash, end = match(regex, ustr(s)).groups() start = start or 1900 end = end or 2099 except AttributeError: return 1900, 2099 return (int(start), int(end)) if dash else (int(start), int(start))
Parses a year or dash-delimeted year range
def reverse(self): enabled = self.lib.iperf_get_test_reverse(self._test) if enabled: self._reverse = True else: self._reverse = False return self._reverse
Toggles direction of test :rtype: bool
def process_array_items(self, array, json): for item in json['items']: key = None processed = self.from_json(item) if isinstance(processed, Asset): key = 'Asset' elif isinstance(processed, Entry): key = 'Entry' if key is not None: array.items_mapped[key][processed.sys['id']] = processed array.items.append(processed)
Iterate through all `items` and create a resource for each. In addition map the resources under the `items_mapped` by the resource id and type. :param array: Array resource. :param json: Raw JSON dictionary.
def value(self): value = self._properties.get("value") if value is not None: value = base64.b64decode(value) return value
Value of the variable, as bytes. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables :rtype: bytes or ``NoneType`` :returns: The value of the variable or ``None`` if the property is not set locally.
def send_message( self, title=None, body=None, icon=None, data=None, sound=None, badge=None, api_key=None, **kwargs): from .fcm import fcm_send_message result = fcm_send_message( registration_id=str(self.registration_id), title=title, body=body, icon=icon, data=data, sound=sound, badge=badge, api_key=api_key, **kwargs ) self._deactivate_device_on_error_result(result) return result
Send single notification message.
def _split_keys_v2(joined): left, _, right = joined.rpartition('::') return _decode_v2(left), _decode_v2(right)
Split two keys out a string created by _join_keys_v2.
def mount(dmg): temp_dir = __salt__['temp.dir'](prefix='dmg-') cmd = 'hdiutil attach -readonly -nobrowse -mountpoint {0} "{1}"'.format(temp_dir, dmg) return __salt__['cmd.run'](cmd), temp_dir
Attempt to mount a dmg file to a temporary location and return the location of the pkg file inside Args: dmg (str): The location of the dmg file to mount Returns: tuple: Tuple containing the results of the command along with the mount point CLI Example: .. code-block:: bash salt '*' macpackage.mount /tmp/software.dmg
def terminate_ex(self, nodes, threads=False, attempts=3): while nodes and attempts > 0: if threads: nodes = self.terminate_with_threads(nodes) else: nodes = self.terminate(nodes) if nodes: logger.info("Attempt to terminate the remaining instances once more.") attempts -= 1 return nodes
Wrapper method for terminate. :param nodes: Nodes to be destroyed. :type nodes: ``list`` :param attempts: The amount of attempts for retrying to terminate failed instances. :type attempts: ``int`` :param threads: Whether to use the threaded approach or not. :type threads: ``bool``
def _getFirstPathExpression(name): tokens = grammar.parseString(name) pathExpression = None while pathExpression is None: if tokens.pathExpression: pathExpression = tokens.pathExpression elif tokens.expression: tokens = tokens.expression elif tokens.call: tokens = tokens.call.args[0] else: break return pathExpression
Returns the first metric path in an expression.
def dataset_view(self, dataset): if '/' in dataset: self.validate_dataset_string(dataset) dataset_urls = dataset.split('/') owner_slug = dataset_urls[0] dataset_slug = dataset_urls[1] else: owner_slug = self.get_config_value(self.CONFIG_NAME_USER) dataset_slug = dataset result = self.process_response( self.datasets_view_with_http_info(owner_slug, dataset_slug)) return Dataset(result)
view metadata for a dataset. Parameters ========== dataset: the string identified of the dataset should be in format [owner]/[dataset-name]
def set_memory(self, total=None, static=None): if total: self.params["rem"]["mem_total"] = total if static: self.params["rem"]["mem_static"] = static
Set the maxium allowed memory. Args: total: The total memory. Integer. Unit: MBytes. If set to None, this parameter will be neglected. static: The static memory. Integer. Unit MBytes. If set to None, this parameterwill be neglected.
def raise_for_redefined_namespace(self, line: str, position: int, namespace: str) -> None: if self.disallow_redefinition and self.has_namespace(namespace): raise RedefinedNamespaceError(self.get_line_number(), line, position, namespace)
Raise an exception if a namespace is already defined. :raises: RedefinedNamespaceError
def loadFromFile(self, filename): file = open(filename, 'rb') try: wsdl = self.loadFromStream(file) finally: file.close() return wsdl
Return a WSDL instance loaded from the given file.
def CreateMenuItem(self, MenuItemId, PluginContext, CaptionText, HintText=u'', IconPath='', Enabled=True, ContactType=pluginContactTypeAll, MultipleContacts=False): cmd = 'CREATE MENU_ITEM %s CONTEXT %s CAPTION %s ENABLED %s' % (tounicode(MenuItemId), PluginContext, quote(tounicode(CaptionText)), cndexp(Enabled, 'true', 'false')) if HintText: cmd += ' HINT %s' % quote(tounicode(HintText)) if IconPath: cmd += ' ICON %s' % quote(path2unicode(IconPath)) if MultipleContacts: cmd += ' ENABLE_MULTIPLE_CONTACTS true' if PluginContext == pluginContextContact: cmd += ' CONTACT_TYPE_FILTER %s' % ContactType self._Skype._DoCommand(cmd) return PluginMenuItem(self._Skype, MenuItemId, CaptionText, HintText, Enabled)
Creates custom menu item in Skype client's "Do More" menus. :Parameters: MenuItemId : unicode Unique identifier for the menu item. PluginContext : `enums`.pluginContext* Menu item context. Allows to choose in which client windows will the menu item appear. CaptionText : unicode Caption text. HintText : unicode Hint text (optional). Shown when mouse hoovers over the menu item. IconPath : unicode Path to the icon (optional). Enabled : bool Initial state of the menu item. True by default. ContactType : `enums`.pluginContactType* In case of `enums.pluginContextContact` tells which contacts the menu item should appear for. Defaults to `enums.pluginContactTypeAll`. MultipleContacts : bool Set to True if multiple contacts should be allowed (defaults to False). :return: Menu item object. :rtype: `PluginMenuItem`
def format_exp_floats(decimals): threshold = 10 ** 5 return ( lambda n: "{:.{prec}e}".format(n, prec=decimals) if n > threshold else "{:4.{prec}f}".format(n, prec=decimals) )
sometimes the exp. column can be too large
def getDarkCurrentAverages(exposuretimes, imgs): x, imgs_p = sortForSameExpTime(exposuretimes, imgs) s0, s1 = imgs[0].shape imgs = np.empty(shape=(len(x), s0, s1), dtype=imgs[0].dtype) for i, ip in zip(imgs, imgs_p): if len(ip) == 1: i[:] = ip[0] else: i[:] = averageSameExpTimes(ip) return x, imgs
return exposure times, image averages for each exposure time
def start(self): self.zap_socket = self.context.socket(zmq.REP) self.zap_socket.linger = 1 zapLoc = 'inproc://zeromq.zap.{}'.format(MultiZapAuthenticator.count) self.zap_socket.bind(zapLoc) self.log.debug('Starting ZAP at {}'.format(zapLoc))
Create and bind the ZAP socket
def is_ipv4(ip: str) -> bool: try: socket.inet_aton(ip) except socket.error: return False return True
Returns True if the IPv4 address ia valid, otherwise returns False.
def load(self): self.layer.ResetReading() for i in range(self.nfeatures): if self.__features[i] is None: self.__features[i] = self.layer[i]
load all feature into memory Returns -------
def cmd_stop(self, argv, help): parser = argparse.ArgumentParser( prog="%s stop" % self.progname, description=help, ) instances = self.get_instances(command='stop') parser.add_argument("instance", nargs=1, metavar="instance", help="Name of the instance from the config.", choices=sorted(instances)) args = parser.parse_args(argv) instance = instances[args.instance[0]] instance.stop()
Stops the instance
def run_tpm(system, steps, blackbox): node_tpms = [] for node in system.nodes: node_tpm = node.tpm_on for input_node in node.inputs: if not blackbox.in_same_box(node.index, input_node): if input_node in blackbox.output_indices: node_tpm = marginalize_out([input_node], node_tpm) node_tpms.append(node_tpm) noised_tpm = rebuild_system_tpm(node_tpms) noised_tpm = convert.state_by_node2state_by_state(noised_tpm) tpm = convert.state_by_node2state_by_state(system.tpm) tpm = np.dot(tpm, np.linalg.matrix_power(noised_tpm, steps - 1)) return convert.state_by_state2state_by_node(tpm)
Iterate the TPM for the given number of timesteps. Returns: np.ndarray: tpm * (noise_tpm^(t-1))
def all(self, domain=None): if domain is None: return {k: dict(v) for k, v in list(self.messages.items())} return dict(self.messages.get(domain, {}))
Gets the messages within a given domain. If domain is None, it returns all messages. @type id: The @param id: message id @rtype: dict @return: A dict of messages
def getCheckerByName(self, checkerType): for checker in sum(list(self.linter._checkers.values()), []): if isinstance(checker, checkerType): return checker return None
Get checker by given name. @checkerType: type of the checker
def construct(cls, faker, path_to_factories=None): factory = faker.__class__() if path_to_factories is not None and os.path.isdir(path_to_factories): for filename in os.listdir(path_to_factories): if os.path.isfile(filename): cls._resolve(path_to_factories, filename) return factory
Create a new factory container. :param faker: A faker generator instance :type faker: faker.Generator :param path_to_factories: The path to factories :type path_to_factories: str :rtype: Factory
def get_asset_spatial_assignment_session(self, proxy): if not self.supports_asset_spatial_assignment(): raise Unimplemented() try: from . import sessions except ImportError: raise proxy = self._convert_proxy(proxy) try: session = sessions.AssetSpatialAssignmentSession(proxy, runtime=self._runtime) except AttributeError: raise return session
Gets the session for assigning spatial coverage to an asset. arg proxy (osid.proxy.Proxy): a proxy return: (osid.repository.AssetSpatialAssignmentSession) - an AssetSpatialAssignmentSession raise: OperationFailed - unable to complete request raise: Unimplemented - supports_asset_spatial_assignment() is false compliance: optional - This method must be implemented if supports_asset_spatial_assignment() is true.
def distclean(ctx=None): global commands lst=os.listdir('.') for f in lst: if f==Options.lockfile: try: proj=Environment.Environment(f) except: Logs.warn('could not read %r'%f) continue try: shutil.rmtree(proj[BLDDIR]) except IOError: pass except OSError,e: if e.errno!=errno.ENOENT: Logs.warn('project %r cannot be removed'%proj[BLDDIR]) try: os.remove(f) except OSError,e: if e.errno!=errno.ENOENT: Logs.warn('file %r cannot be removed'%f) if not commands and f.startswith('.waf'): shutil.rmtree(f,ignore_errors=True)
removes the build directory
def tagrefs(self): n = self._nmembers ret = [] if n: tags = _C.array_int32(n) refs = _C.array_int32(n) k = _C.Vgettagrefs(self._id, tags, refs, n) _checkErr('tagrefs', k, "error getting tags and refs") for m in xrange(k): ret.append((tags[m], refs[m])) return ret
Get the tags and reference numbers of all the vgroup members. Args:: no argument Returns:: list of (tag,ref) tuples, one for each vgroup member C library equivalent : Vgettagrefs
def trimSegments(self, minPermanence=None, minNumSyns=None): if minPermanence is None: minPermanence = self.connectedPerm if minNumSyns is None: minNumSyns = self.activationThreshold totalSegsRemoved, totalSynsRemoved = 0, 0 for c,i in product(xrange(self.numberOfCols), xrange(self.cellsPerColumn)): (segsRemoved, synsRemoved) = self.trimSegmentsInCell(colIdx=c, cellIdx=i, segList=self.cells[c][i], minPermanence=minPermanence, minNumSyns=minNumSyns) totalSegsRemoved += segsRemoved totalSynsRemoved += synsRemoved return totalSegsRemoved, totalSynsRemoved
This method deletes all synapses whose permanence is less than minPermanence and deletes any segments that have less than minNumSyns synapses remaining. Parameters: -------------------------------------------------------------- minPermanence: Any syn whose permamence is 0 or < minPermanence will be deleted. If None is passed in, then self.connectedPerm is used. minNumSyns: Any segment with less than minNumSyns synapses remaining in it will be deleted. If None is passed in, then self.activationThreshold is used. retval: (numSegsRemoved, numSynsRemoved)
def type_to_string(t): if t == MemoryElement.TYPE_I2C: return 'I2C' if t == MemoryElement.TYPE_1W: return '1-wire' if t == MemoryElement.TYPE_DRIVER_LED: return 'LED driver' if t == MemoryElement.TYPE_LOCO: return 'Loco Positioning' if t == MemoryElement.TYPE_TRAJ: return 'Trajectory' if t == MemoryElement.TYPE_LOCO2: return 'Loco Positioning 2' return 'Unknown'
Get string representation of memory type
def get_initial_status_brok(self): data = {'uuid': self.uuid} self.fill_data_brok_from(data, 'full_status') return Brok({'type': 'notification_raise', 'data': data})
Get a initial status brok :return: brok with wanted data :rtype: alignak.brok.Brok
def prompt_protocol(): stop = 3 ans = "" while True and stop > 0: ans = input("Save as (d)ictionary or (o)bject?\n" "* Note:\n" "Dictionaries are more basic, and are compatible with Python v2.7+.\n" "Objects are more complex, and are only compatible with v3.4+ ") if ans not in ("d", "o"): print("Invalid response: Please choose 'd' or 'o'") else: break if ans == "": ans = "d" return ans
Prompt user if they would like to save pickle file as a dictionary or an object. :return str: Answer
def ordered_tags(self): tags = list(self.tags.all()) return sorted( tags, key=lambda tag: ((type(tag) != Tag) * 100000) + tag.count(), reverse=True )
gets the related tags :return: `list` of `Tag` instances
def forget(identifier): errors = False for one in identifier: cfg = RepoListConfig() info = cfg.find_by_any(one, "ilc") if not info: warn("No repos matching %r" % one) errors = True continue note("Removing record of repo [%s] at %s" % ( info.shortid(), info.localrepo.repo_path)) with saveconfig(RepoListConfig()) as cfg: cfg.remove_repo(info.repoid) if errors: sys.exit(1)
Tells homely to forget about a dotfiles repository that was previously added. You can then run `homely update` to have homely perform automatic cleanup of anything that was installed by that dotfiles repo. REPO This should be the path to a local dotfiles repository that has already been registered using `homely add`. You may specify multiple REPOs to remove at once.
def sorted_bfs_successors(G, source=None): if source is None: source = G.root successors = defaultdict(list) for src, target in sorted_bfs_edges(G, source): successors[src].append(target) return dict(successors)
Return dictionary of successors in breadth-first-search from source. Parameters ---------- G : DiscourseDocumentGraph graph source : node Specify starting node for breadth-first search and return edges in the component reachable from source. Returns ------- successors: dict A dictionary with nodes as keys and list of succssors nodes as values.
def select_grid_model_residential(lvgd): string_properties = lvgd.lv_grid.network.static_data['LV_model_grids_strings'] apartment_string = lvgd.lv_grid.network.static_data[ 'LV_model_grids_strings_per_grid'] apartment_house_branch_ratio = cfg_ding0.get("assumptions", "apartment_house_branch_ratio") population_per_apartment = cfg_ding0.get("assumptions", "population_per_apartment") apartments = round(lvgd.population / population_per_apartment) if apartments > 196: apartments = 196 strings = apartment_string.loc[apartments] selected_strings = [int(s) for s in strings[strings >= 1].index.tolist()] selected_strings_df = string_properties.loc[selected_strings] occurence_selector = [str(i) for i in selected_strings] selected_strings_df['occurence'] = strings.loc[occurence_selector].tolist() return selected_strings_df
Selects typified model grid based on population Parameters ---------- lvgd : LVGridDistrictDing0 Low-voltage grid district object Returns ------- :pandas:`pandas.DataFrame<dataframe>` Selected string of typified model grid :pandas:`pandas.DataFrame<dataframe>` Parameters of chosen Transformer Notes ----- In total 196 distinct LV grid topologies are available that are chosen by population in the LV grid district. Population is translated to number of house branches. Each grid model fits a number of house branches. If this number exceeds 196, still the grid topology of 196 house branches is used. The peak load of the LV grid district is uniformly distributed across house branches.
def add_field(self, field): field = FieldFactory( field, ) field.set_table(self) field_name = field.get_name() for existing_field in self.fields: if existing_field.get_name() == field_name: return None self.before_add_field(field) field.before_add() if field.ignore is False: self.fields.append(field) return field
Adds a field to this table :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type field: str or dict or Field
def setup_toolbar(self): self.savefig_btn = create_toolbutton( self, icon=ima.icon('filesave'), tip=_("Save Image As..."), triggered=self.emit_save_figure) self.delfig_btn = create_toolbutton( self, icon=ima.icon('editclear'), tip=_("Delete image"), triggered=self.emit_remove_figure) toolbar = QVBoxLayout() toolbar.setContentsMargins(0, 0, 0, 0) toolbar.setSpacing(1) toolbar.addWidget(self.savefig_btn) toolbar.addWidget(self.delfig_btn) toolbar.addStretch(2) return toolbar
Setup the toolbar.
def log(cls, q): v_norm = np.linalg.norm(q.vector) q_norm = q.norm tolerance = 1e-17 if q_norm < tolerance: return Quaternion(scalar=-float('inf'), vector=float('nan')*q.vector) if v_norm < tolerance: return Quaternion(scalar=log(q_norm), vector=[0,0,0]) vec = q.vector / v_norm return Quaternion(scalar=log(q_norm), vector=acos(q.scalar/q_norm)*vec)
Quaternion Logarithm. Find the logarithm of a quaternion amount. Params: q: the input quaternion/argument as a Quaternion object. Returns: A quaternion amount representing log(q) := (log(|q|), v/|v|acos(w/|q|)). Note: The method computes the logarithm of general quaternions. See [Source](https://math.stackexchange.com/questions/2552/the-logarithm-of-quaternion/2554#2554) for more details.
def _rescanSizes(self, force=True): status = self.QUOTA_CTL(cmd=BTRFS_QUOTA_CTL_ENABLE).status logger.debug("CTL Status: %s", hex(status)) status = self.QUOTA_RESCAN_STATUS() logger.debug("RESCAN Status: %s", status) if not status.flags: if not force: return self.QUOTA_RESCAN() logger.warn("Waiting for btrfs quota usage scan...") self.QUOTA_RESCAN_WAIT()
Zero and recalculate quota sizes to subvolume sizes will be correct.
def conjugate_quat(quat): return Quat(-quat.x, -quat.y, -quat.z, quat.w)
Negate the vector part of the quaternion.
def line(self, x0, y0, x1, y1, c='*'): r steep = abs(y1 - y0) > abs(x1 - x0) if steep: (x0, y0) = (y0, x0) (x1, y1) = (y1, x1) if x0 > x1: (x0, x1) = (x1, x0) (y0, y1) = (y1, y0) deltax = x1 - x0 deltay = abs(y1 - y0) error = deltax / 2 y = y0 if y0 < y1: ystep = 1 else: ystep = -1 for x in range(x0, x1 - 1): if steep: self[y, x] = c else: self[x, y] = c error = error - deltay if error < 0: y = y + ystep error = error + deltax
r"""Draws a line Who would have thought this would be so complicated? Thanks again Wikipedia_ <3 .. _Wikipedia: http://en.wikipedia.org/wiki/Bresenham's_line_algorithm
def T(self): return ScoreMatrix(self.tests, self.models, scores=self.values, weights=self.weights, transpose=True)
Get transpose of this ScoreMatrix.
def inverse_transform(self, X, copy=None): check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot uncenter sparse matrices: pass `with_mean=False` " "instead See docstring for motivation and alternatives.") if not sparse.isspmatrix_csr(X): X = X.tocsr() copy = False if copy: X = X.copy() if self.scale_ is not None: inplace_column_scale(X, self.scale_) else: X = numpy.asarray(X) if copy: X = X.copy() if self.with_std: X *= self.scale_ if self.with_mean: X += self.mean_ return X
Scale back the data to the original representation. :param X: Scaled data matrix. :type X: numpy.ndarray, shape [n_samples, n_features] :param bool copy: Copy the X data matrix. :return: X data matrix with the scaling operation reverted. :rtype: numpy.ndarray, shape [n_samples, n_features]
def paint(self, tbl): if not isinstance(tbl, Table): logging.error("unable to paint table: invalid object") return False self.term.stream.write(self.term.clear) self.term.stream.write(str(tbl)) return True
Paint the table on terminal Currently only print out basic string format
def copy(self): copy_factor = CanonicalDistribution(self.variables, self.K.copy(), self.h.copy(), self.g) return copy_factor
Makes a copy of the factor. Returns ------- CanonicalDistribution object: Copy of the factor Examples -------- >>> from pgmpy.factors.continuous import CanonicalDistribution >>> phi = CanonicalDistribution(['X', 'Y'], np.array([[1, -1], [-1, 1]]), np.array([[1], [-1]]), -3) >>> phi.variables ['X', 'Y'] >>> phi.K array([[1, -1], [-1, 1]]) >>> phi.h array([[1], [-1]]) >>> phi.g -3 >>> phi2 = phi.copy() >>> phi2.variables ['X', 'Y'] >>> phi2.K array([[1, -1], [-1, 1]]) >>> phi2.h array([[1], [-1]]) >>> phi2.g -3
def create_templates_static_files(app_path): templates_path = os.path.join(app_path, 'templates') static_path = os.path.join(app_path, 'static') _mkdir_p(templates_path) _mkdir_p(static_path) os.chdir(static_path) img_path = os.path.join(static_path, 'img') css_path = os.path.join(static_path, 'css') js_path = os.path.join(static_path, 'js') _mkdir_p(img_path) _mkdir_p(css_path) _mkdir_p(js_path) return css_path, templates_path
create templates and static
def book(self, symbol='btcusd', limit_bids=0, limit_asks=0): url = self.base_url + '/v1/book/' + symbol params = { 'limit_bids': limit_bids, 'limit_asks': limit_asks } return requests.get(url, params)
Send a request to get the public order book, return the response. Arguments: symbol -- currency symbol (default 'btcusd') limit_bids -- limit the number of bids returned (default 0) limit_asks -- limit the number of asks returned (default 0)
def total_msgs(xml): count = 0 for x in xml: count += len(x.message) return count
count total number of msgs
def get_query_parameters(args, cell_body, date_time=datetime.datetime.now()): env = google.datalab.utils.commands.notebook_environment() config = google.datalab.utils.commands.parse_config(cell_body, env=env, as_dict=False) sql = args['query'] if sql is None: raise Exception('Cannot extract query parameters in non-query cell') if config: jsonschema.validate(config, BigQuerySchema.QUERY_PARAMS_SCHEMA) config = config or {} config_parameters = config.get('parameters', []) return bigquery.Query.get_query_parameters(config_parameters, date_time=date_time)
Extract query parameters from cell body if provided Also validates the cell body schema using jsonschema to catch errors before sending the http request. This validation isn't complete, however; it does not validate recursive schemas, but it acts as a good filter against most simple schemas Args: args: arguments passed to the magic cell cell_body: body of the magic cell date_time: The timestamp at which the date-time related parameters need to be resolved. Returns: Validated object containing query parameters
def tag(self, layer): mapping = self.layer_tagger_mapping if layer in mapping: mapping[layer]() return self
Tag the annotations of given layer. It can automatically tag any built-in layer type.
def _get_crc32(self, filename): buffer = self.zip.read(filename) if filename not in self.files_crc32: self.files_crc32[filename] = crc32(buffer) if self.files_crc32[filename] != self.zip.getinfo(filename).CRC: log.error("File '{}' has different CRC32 after unpacking! " "Declared: {:08x}, Calculated: {:08x}".format(filename, self.zip.getinfo(filename).CRC, self.files_crc32[filename])) return buffer
Calculates and compares the CRC32 and returns the raw buffer. The CRC32 is added to `files_crc32` dictionary, if not present. :param filename: filename inside the zipfile :rtype: bytes
def network_protocol(self, layer: Optional[Layer] = None) -> str: key = self._validate_enum(item=layer, enum=Layer) protocols = NETWORK_PROTOCOLS[key] return self.random.choice(protocols)
Get a random network protocol form OSI model. :param layer: Enum object Layer. :return: Protocol name. :Example: AMQP
def filter_by_size(feat_dir: Path, prefixes: List[str], feat_type: str, max_samples: int) -> List[str]: prefix_lens = get_prefix_lens(Path(feat_dir), prefixes, feat_type) prefixes = [prefix for prefix, length in prefix_lens if length <= max_samples] return prefixes
Sorts the files by their length and returns those with less than or equal to max_samples length. Returns the filename prefixes of those files. The main job of the method is to filter, but the sorting may give better efficiency when doing dynamic batching unless it gets shuffled downstream.
def write_file(self, name, path=None): if path is None: path = name self.zf.write(path, name)
Write the contents of a file from the disk to the XPI.
def wrap_list(item): if item is None: return [] elif isinstance(item, list): return item elif isinstance(item, (tuple, set)): return list(item) else: return [item]
Returns an object as a list. If the object is a list, it is returned directly. If it is a tuple or set, it is returned as a list. If it is another object, it is wrapped in a list and returned.
def get_vhost(self, vname): vname = quote(vname, '') path = Client.urls['vhosts_by_name'] % vname vhost = self._call(path, 'GET', headers=Client.json_headers) return vhost
Returns the attributes of a single named vhost in a dict. :param string vname: Name of the vhost to get. :returns dict vhost: Attribute dict for the named vhost
def assert_tz_offset(tz): tz_offset = get_tz_offset(tz) system_offset = get_system_offset() if tz_offset != system_offset: msg = ('Timezone offset does not match system offset: {0} != {1}. ' 'Please, check your config files.').format( tz_offset, system_offset ) raise ValueError(msg)
Assert that system's timezone offset equals to the timezone offset found. If they don't match, we probably have a misconfiguration, for example, an incorrect timezone set in /etc/timezone file in systemd distributions.
def build_raw_request_message(self, request, args, is_completed=False): request.flags = FlagsType.none if is_completed else FlagsType.fragment if request.state == StreamState.init: message = CallRequestMessage( flags=request.flags, ttl=request.ttl * 1000, tracing=request.tracing, service=request.service, headers=request.headers, checksum=request.checksum, args=args ) request.state = (StreamState.completed if is_completed else StreamState.streaming) elif request.state == StreamState.streaming: message = CallRequestContinueMessage( flags=request.flags, checksum=request.checksum, args=args ) request.state = (StreamState.completed if is_completed else StreamState.streaming) message.id = request.id return message
build protocol level message based on request and args. request object contains meta information about outgoing request. args are the currently chunk data from argstreams is_completed tells the flags of the message :param request: Request :param args: array of arg streams :param is_completed: message flags :return: CallRequestMessage/CallRequestContinueMessage
def C0t_(self): self._check_estimated() return self._rc.cov_XY(bessel=self.bessel)
Time-lagged covariance matrix
def from_protobuf(cls, msg): if not isinstance(msg, cls._protobuf_cls): raise TypeError("Expected message of type " "%r" % cls._protobuf_cls.__name__) kwargs = {k: getattr(msg, k) for k in cls._get_params()} return cls(**kwargs)
Create an instance from a protobuf message.
def first(seq, key=lambda x: bool(x), default=None, apply=lambda x: x): return next((apply(x) for x in seq if key(x)), default() if callable(default) else default)
Give the first value that satisfies the key test. Args: seq (iterable): key (callable): test for each element of iterable default: returned when all elements fail test apply (callable): applied to element before return, but not to default value Returns: first element in seq that passes key, mutated with optional apply Examples: >>> first([0, False, None, [], (), 42]) 42 >>> first([0, False, None, [], ()]) is None True >>> first([0, False, None, [], ()], default='ohai') 'ohai' >>> import re >>> m = first(re.match(regex, 'abc') for regex in ['b.*', 'a(.*)']) >>> m.group(1) 'bc' The optional `key` argument specifies a one-argument predicate function like that used for `filter()`. The `key` argument, if supplied, must be in keyword form. For example: >>> first([1, 1, 3, 4, 5], key=lambda x: x % 2 == 0) 4
def _start_update_server(auth_token): server = AccumulatorServer(("localhost", 0), _UpdateRequestHandler, auth_token) thread = threading.Thread(target=server.serve_forever) thread.daemon = True thread.start() return server
Start a TCP server to receive accumulator updates in a daemon thread, and returns it
def _validated_config_filename(self, name): dir_name = self._make_config_dir() filename = os.path.join(dir_name, name.split(".json")[0] + ".json") return filename
Make config dir and return full file path and extension Args: name (str): Filename without dir or extension Returns: str: Full path including extension
def recent(category=None, pages=1, sort=None, order=None): s = Search() s.recent(category, pages, sort, order) return s
Return most recently added torrents. Can be sorted and categorized and contain multiple pages.
def create(cls, config_file=None): if cls.instance is None: cls.instance = cls(config_file) cls.instance.load_ini() if config_file and config_file != cls.instance.config_file: raise RuntimeError("Configuration initialized a second time with a different file!") return cls.instance
Return the default configuration.
def lost_master_primary(self): self.primaries_disconnection_times[self.master_replica.instId] = time.perf_counter() self._schedule_view_change()
Schedule an primary connection check which in turn can send a view change message
def _flush_wait(flush_future, write_future): if write_future.done(): if not _pending_measurements(): flush_future.set_result(True) return else: write_future = _write_measurements() ioloop.IOLoop.current().add_timeout( ioloop.IOLoop.current().time() + 0.25, _flush_wait, flush_future, write_future)
Pause briefly allowing any pending metric writes to complete before shutting down. :param tornado.concurrent.Future flush_future: The future to resolve when the shutdown is complete. :param tornado.concurrent.Future write_future: The future that is for the current batch write operation.
def autodiscover(self, message): if message["version"] in self.allowed_versions: logger.debug("<%s> Client version matches server " "version." % message["cuuid"]) response = serialize_data({"method": "OHAI Client", "version": self.version, "server_name": self.server_name}, self.compression, encryption=False) else: logger.warning("<%s> Client version %s does not match allowed server " "versions %s" % (message["cuuid"], message["version"], self.version)) response = serialize_data({"method": "BYE REGISTER"}, self.compression, encryption=False) return response
This function simply returns the server version number as a response to the client. Args: message (dict): A dictionary of the autodiscover message from the client. Returns: A JSON string of the "OHAI Client" server response with the server's version number. Examples: >>> response '{"method": "OHAI Client", "version": "1.0"}'
def generate_packer_filename(provider, region, builder): filename = '{0}_{1}_{2}.json'.format(provider, region, builder) return filename
Generate a filename to be used by packer. Args: provider (str): Name of Spinnaker provider. region (str): Name of provider region to use. builder (str): Name of builder process type. Returns: str: Generated filename based on parameters.
def search_module(mod, pat, ignore_case=True, recursive=False, _seen=None): r if _seen is not None and mod in _seen: return [] import utool as ut reflags = re.IGNORECASE * ignore_case found_list = [name for name in dir(mod) if re.search(pat, name, flags=reflags)] if recursive: if _seen is None: _seen = set() _seen.add(mod) module_attrs = [getattr(mod, name) for name in dir(mod)] submodules = [ submod for submod in module_attrs if isinstance(submod, types.ModuleType) and submod not in _seen and ut.is_defined_by_module(submod, mod) ] for submod in submodules: found_list += search_module(submod, pat, ignore_case=ignore_case, recursive=recursive, _seen=_seen) found_list = ut.unique_ordered(found_list) return found_list
r""" Searches module functions, classes, and constants for members matching a pattern. Args: mod (module): live python module pat (str): regular expression Returns: list: found_list CommandLine: python -m utool.util_dev --exec-search_module --mod=utool --pat=module python -m utool.util_dev --exec-search_module --mod=opengm --pat=cut python -m utool.util_dev --exec-search_module --mod=opengm --pat=multi python -m utool.util_dev --exec-search_module --mod=plottool --pat=networkx python -m utool.util_dev --exec-search_module --mod=utool --pat=Levenshtein Example: >>> # ENABLE_DOCTEST >>> from utool.util_dev import * # NOQA >>> import utool as ut >>> recursive = True >>> ignore_case = True >>> modname = ut.get_argval('--mod', type_=str, default='utool') >>> pat = ut.get_argval('--pat', type_=str, default='search') >>> mod = ut.import_modname(modname) >>> print('pat = %r' % (pat,)) >>> print('mod = %r' % (mod,)) >>> found_list = search_module(mod, pat, recursive=recursive) >>> result = ('found_list = %s' % (ut.repr2(found_list),)) >>> print(result) Ignore: mod = cv2 pat = 'freak'
def lookup_path(self, mold_id_path, default=_marker): fragments = mold_id_path.split('/') mold_id = '/'.join(fragments[:2]) try: subpath = [] for piece in fragments[2:]: if (sep in piece or (altsep and altsep in piece) or piece == pardir): raise KeyError elif piece and piece != '.': subpath.append(piece) path = self.mold_id_to_path(mold_id) except KeyError: if default is _marker: raise return default return join(path, *subpath)
For the given mold_id_path, look up the mold_id and translate that path to its filesystem equivalent.
def sort(self): if self.is_guaranteed_sorted: self.log(u"Already sorted, returning") return self.log(u"Sorting...") self.__fragments = sorted(self.__fragments) self.log(u"Sorting... done") self.log(u"Checking relative positions...") for i in range(len(self) - 1): current_interval = self[i].interval next_interval = self[i + 1].interval if current_interval.relative_position_of(next_interval) not in self.ALLOWED_POSITIONS: self.log(u"Found overlapping fragments:") self.log([u" Index %d => %s", i, current_interval]) self.log([u" Index %d => %s", i + 1, next_interval]) self.log_exc(u"The list contains two fragments overlapping in a forbidden way", None, True, ValueError) self.log(u"Checking relative positions... done") self.__sorted = True
Sort the fragments in the list. :raises ValueError: if there is a fragment which violates the list constraints
def set_extana_callback(self, callback, data=None): self.extana_callback = callback self.extana_callback_data = data
Register a callback for incoming data packets from the SK8-ExtAna board. This method allows you to pass in a callable which will be called on receipt of each packet sent from the SK8-ExtAna board. Set to `None` to disable it again. Args: callback: a callable with the following signature: (ana1, ana2, temp, seq, timestamp, data) where: ana1, ana2 = current values of the two analogue inputs temp = temperature sensor reading seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback