code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def invoke_in_mainloop(func, *args, **kwargs): results = queue.Queue() @gcall def run(): try: data = func(*args, **kwargs) results.put(data) results.put(None) except BaseException: results.put(None) results.put(sys.exc_info()) raise data = results.get() exception = results.get() if exception is None: return data else: tp, val, tb = results.get() raise tp, val, tb
Invoke a function in the mainloop, pass the data back.
def get_adapted_session(adapter): session = requests.Session() session.mount("http://", adapter) session.mount("https://", adapter) return session
Mounts an adapter capable of communication over HTTP or HTTPS to the supplied session. :param adapter: A :class:`requests.adapters.HTTPAdapter` instance :return: The adapted :class:`requests.Session` instance
def _fetch_router_chunk_data(self, router_ids=None): curr_router = [] if len(router_ids) > self.sync_routers_chunk_size: for i in range(0, len(router_ids), self.sync_routers_chunk_size): routers = self.plugin_rpc.get_routers( self.context, (router_ids[i:i + self.sync_routers_chunk_size])) LOG.debug('Processing :%r', routers) for r in routers: curr_router.append(r) else: curr_router = self.plugin_rpc.get_routers( self.context, router_ids=router_ids) return curr_router
Fetch router data from the routing plugin in chunks. :param router_ids: List of router_ids of routers to fetch :return: List of router dicts of format: [ {router_dict1}, {router_dict2},.....]
def make_file_cm(filename, mode='a'): @contextlib.contextmanager def cm(): with open(filename, mode=mode) as fh: yield fh return cm
Open a file for appending and yield the open filehandle. Close the filehandle after yielding it. This is useful for creating a context manager for logging the output of a `Vagrant` instance. filename: a path to a file mode: The mode in which to open the file. Defaults to 'a', append Usage example: log_cm = make_file_cm('application.log') v = Vagrant(out_cm=log_cm, err_cm=log_cm)
def to_json(value, **kwargs): serial_list = [ val.serialize(**kwargs) if isinstance(val, HasProperties) else val for val in value ] return serial_list
Return a copy of the tuple as a list If the tuple contains HasProperties instances, they are serialized.
def execute(self, *args, **kwargs): args = self.parser.parse_args(*args, **kwargs) self.process(args)
Initializes and runs the tool. This is shorhand to parse command line arguments, then calling: self.setup(parsed_arguments) self.process()
def get_alert_community(self, channel=None): if channel is None: channel = self.get_network_channel() rsp = self.xraw_command(netfn=0xc, command=2, data=(channel, 16, 0, 0)) return rsp['data'][1:].partition('\x00')[0]
Get the current community string for alerts Returns the community string that will be in SNMP traps from this BMC :param channel: The channel to get configuration for, autodetect by default :returns: The community string
def doWaitWebRequest(url, method="GET", data=None, headers={}): completed = False while not completed: completed = True try: response, content = doWebRequest(url, method, data, headers) except urllib2.URLError: completed = False waitForURL(url) return response, content
Same as doWebRequest, but with built in wait-looping
def is_visit_primitive(obj): from .base import visit if (isinstance(obj, tuple(PRIMITIVE_TYPES)) and not isinstance(obj, STR) and not isinstance(obj, bytes)): return True if (isinstance(obj, CONTAINERS) and not isinstance(obj, STR) and not isinstance(obj, bytes)): return False if isinstance(obj, STR) or isinstance(obj, bytes): if len(obj) == 1: return True return False return list(visit(obj, max_enum=2)) == [obj]
Returns true if properly visiting the object returns only the object itself.
def finish(self): self.update(self.maxval) if self.signal_set: signal.signal(signal.SIGWINCH, signal.SIG_DFL)
Used to tell the progress is finished.
def fulfill_access_secret_store_condition(event, agreement_id, did, service_agreement, consumer_address, publisher_account): logger.debug(f"release reward after event {event}.") name_to_parameter = {param.name: param for param in service_agreement.condition_by_name['accessSecretStore'].parameters} document_id = add_0x_prefix(name_to_parameter['_documentId'].value) asset_id = add_0x_prefix(did_to_id(did)) assert document_id == asset_id, f'document_id {document_id} <=> asset_id {asset_id} mismatch.' try: tx_hash = Keeper.get_instance().access_secret_store_condition.fulfill( agreement_id, document_id, consumer_address, publisher_account ) process_tx_receipt( tx_hash, Keeper.get_instance().access_secret_store_condition.FULFILLED_EVENT, 'AccessSecretStoreCondition.Fulfilled' ) except Exception as e: raise e
Fulfill the access condition. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_address: ethereum account address of consumer, hex str :param publisher_account: Account instance of the publisher
def open(self, name, *args, **kwargs): if self.basedir is not None: name = os.path.join(self.basedir, name) return em.Subsystem.open(self, name, *args, **kwargs)
Open file, possibly relative to a base directory.
def publish_post(self): payload = {'content': self.content_base64.decode('utf-8')} sha_blob = self.get_sha_blob() if sha_blob: commit_msg = 'ghPublish UPDATE: {}'.format(self.title) payload.update(sha=sha_blob) payload.update(message=commit_msg) else: commit_msg = 'ghPublish ADD: {}'.format(self.title) payload.update(message=commit_msg) r = requests.put(self.api_url, auth=self.get_auth_details(), data=json.dumps(payload)) try: url = r.json()['content']['html_url'] return r.status_code, url except KeyError: return r.status_code, None
If it's a new file, add it. Else, update it.
def _get_form_defaults(self): return { 'response_format': 'html', 'geometry_type': 'esriGeometryPoint', 'projection': pyproj.Proj(str(self.service.projection)), 'return_geometry': True, 'maximum_allowable_offset': 2, 'geometry_precision': 3, 'return_z': False, 'return_m': False }
Returns default values for the identify form
def constrain_cfgdict_list(cfgdict_list_, constraint_func): cfgdict_list = [] for cfg_ in cfgdict_list_: cfg = cfg_.copy() if constraint_func(cfg) is not False and len(cfg) > 0: if cfg not in cfgdict_list: cfgdict_list.append(cfg) return cfgdict_list
constrains configurations and removes duplicates
def use_quandl_data(self, authtoken): dfs = {} st = self.start.strftime("%Y-%m-%d") at = authtoken for pair in self.pairs: symbol = "".join(pair) qsym = "CURRFX/{}".format(symbol) dfs[symbol] = qdl.get(qsym,authtoken=at, trim_start=st)['Rate'] self.build_conversion_table(dfs)
Use quandl data to build conversion table
def highlight_matches(self): if self.is_code_editor and self.highlight_button.isChecked(): text = self.search_text.currentText() words = self.words_button.isChecked() regexp = self.re_button.isChecked() self.editor.highlight_found_results(text, words=words, regexp=regexp)
Highlight found results
def connect_to_apple_tv(details, loop, protocol=None, session=None): service = _get_service_used_to_connect(details, protocol) if session is None: session = ClientSession(loop=loop) airplay = _setup_airplay(loop, session, details) if service.protocol == PROTOCOL_DMAP: return DmapAppleTV(loop, session, details, airplay) return MrpAppleTV(loop, session, details, airplay)
Connect and logins to an Apple TV.
def read(self): try: buf = os.read(self._fd, 8) except OSError as e: raise LEDError(e.errno, "Reading LED brightness: " + e.strerror) try: os.lseek(self._fd, 0, os.SEEK_SET) except OSError as e: raise LEDError(e.errno, "Rewinding LED brightness: " + e.strerror) return int(buf)
Read the brightness of the LED. Returns: int: Current brightness. Raises: LEDError: if an I/O or OS error occurs.
def update_contact(self, um_from_user, um_to_user, message): contact, created = self.get_or_create(um_from_user, um_to_user, message) if not created: contact.latest_message = message contact.save() return contact
Get or update a contacts information
def add_def(self, def_item): self.defs.append(def_item) for other in self.others: other.add_def(def_item)
Adds a def universally.
def create_knowledge_base(self, parent, knowledge_base, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): if 'create_knowledge_base' not in self._inner_api_calls: self._inner_api_calls[ 'create_knowledge_base'] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_knowledge_base, default_retry=self._method_configs[ 'CreateKnowledgeBase'].retry, default_timeout=self._method_configs['CreateKnowledgeBase'] .timeout, client_info=self._client_info, ) request = knowledge_base_pb2.CreateKnowledgeBaseRequest( parent=parent, knowledge_base=knowledge_base, ) return self._inner_api_calls['create_knowledge_base']( request, retry=retry, timeout=timeout, metadata=metadata)
Creates a knowledge base. Example: >>> import dialogflow_v2beta1 >>> >>> client = dialogflow_v2beta1.KnowledgeBasesClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize ``knowledge_base``: >>> knowledge_base = {} >>> >>> response = client.create_knowledge_base(parent, knowledge_base) Args: parent (str): Required. The agent to create a knowledge base for. Format: ``projects/<Project ID>/agent``. knowledge_base (Union[dict, ~google.cloud.dialogflow_v2beta1.types.KnowledgeBase]): Required. The knowledge base to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dialogflow_v2beta1.types.KnowledgeBase` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dialogflow_v2beta1.types.KnowledgeBase` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
def remove_network(self, action, n_name, **kwargs): c_kwargs = self.get_network_remove_kwargs(action, n_name, **kwargs) res = action.client.remove_network(**c_kwargs) del self._policy.network_names[action.client_name][n_name] return res
Removes a network. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param n_name: Network name or id. :type n_name: unicode | str :param kwargs: Additional keyword arguments. :type kwargs: dict
def update_state_from_api(self): if self.last_api_call is not None: difference = (datetime.datetime.now() - self.last_api_call).seconds else: difference = 301 if difference >= 300: url = BASE_URL + "/rest/item" payload = {'usertoken': self.token} arequest = requests.get(url, params=payload) status = str(arequest.status_code) if status == '401': _LOGGER.info("Token expired? Trying to get a new one.") self.authenticate(True) arequest = requests.get(url, params=payload) status = arequest.status_code elif status == '404': _LOGGER.error("No devices associated with this account.") elif status != '200': _LOGGER.error("API error not updating state. " + status) else: self.state = arequest.json() self.last_api_call = datetime.datetime.now() _LOGGER.info("Pulled latest state from API.")
Pull and update the current state from the API.
def create(cls, entry): try: module = import_module(entry) except ImportError: module = None mod_path, _, cls_name = entry.rpartition('.') if not mod_path: raise else: try: entry = module.default_bot except AttributeError: return cls(f'{entry}.Bot', module) else: mod_path, _, cls_name = entry.rpartition('.') mod = import_module(mod_path) try: bot_cls = getattr(mod, cls_name) except AttributeError: if module is None: import_module(entry) raise if not issubclass(bot_cls, Bot): raise ImproperlyConfigured( "'%s' isn't a subclass of Bot." % entry) return cls(entry, mod, bot_cls.label)
Factory that creates an bot config from an entry in INSTALLED_APPS.
def from_bundle(cls, b, feature): feature_ps = b.get_feature(feature) freq = feature_ps.get_value('freq', unit=u.d**-1) radamp = feature_ps.get_value('radamp', unit=u.dimensionless_unscaled) l = feature_ps.get_value('l', unit=u.dimensionless_unscaled) m = feature_ps.get_value('m', unit=u.dimensionless_unscaled) teffext = feature_ps.get_value('teffext') GM = c.G.to('solRad3 / (solMass d2)').value*b.get_value(qualifier='mass', component=feature_ps.component, context='component', unit=u.solMass) R = b.get_value(qualifier='rpole', component=feature_ps.component, section='component', unit=u.solRad) tanamp = GM/R**3/freq**2 return cls(radamp, freq, l, m, tanamp, teffext)
Initialize a Pulsation feature from the bundle.
def graph_from_edges(edges): M = nx.MultiGraph() for e in edges: n0, n1, weight, key = e M.add_edge(n0, n1, weight=weight, key=key) return M
Constructs an undirected multigraph from a list containing data on weighted edges. Parameters ---------- edges : list List of tuples each containing first node, second node, weight, key. Returns ------- M : :class:`networkx.classes.multigraph.MultiGraph
def as_dataframe(self, pattern='*', max_rows=None): data = [] for i, group in enumerate(self.list(pattern)): if max_rows is not None and i >= max_rows: break parent = self._group_dict.get(group.parent_id) parent_display_name = '' if parent is None else parent.display_name data.append([ group.id, group.display_name, group.parent_id, parent_display_name, group.is_cluster, group.filter]) return pandas.DataFrame(data, columns=self._DISPLAY_HEADERS)
Creates a pandas dataframe from the groups that match the filters. Args: pattern: An optional pattern to further filter the groups. This can include Unix shell-style wildcards. E.g. ``"Production *"``, ``"*-backend"``. max_rows: The maximum number of groups to return. If None, return all. Returns: A pandas dataframe containing matching groups.
def keyPressEvent(self, event): if event.key() == Qt.Key_Down: self.select_row(1) elif event.key() == Qt.Key_Up: self.select_row(-1)
Reimplement Qt method to allow cyclic behavior.
def set_cache_value(self, name, value): dev_info = self.json_state.get('deviceInfo') if dev_info.get(name.lower()) is None: logger.error("Could not set %s for %s (key does not exist).", name, self.name) logger.error("- dictionary %s", dev_info) return dev_info[name.lower()] = str(value)
Set a variable in the local state dictionary. This does not change the physical device. Useful if you want the device state to refect a new value which has not yet updated drom Vera.
def get_all_database_accessions(chebi_ids): all_database_accessions = [get_database_accessions(chebi_id) for chebi_id in chebi_ids] return [x for sublist in all_database_accessions for x in sublist]
Returns all database accessions
def remove_obsolete_folders(states, path): elements_in_folder = os.listdir(path) state_folders_in_file_system = [] for folder_name in elements_in_folder: if os.path.exists(os.path.join(path, folder_name, FILE_NAME_CORE_DATA)) or \ os.path.exists(os.path.join(path, folder_name, FILE_NAME_CORE_DATA_OLD)): state_folders_in_file_system.append(folder_name) for state in states: storage_folder_for_state = get_storage_id_for_state(state) if storage_folder_for_state in state_folders_in_file_system: state_folders_in_file_system.remove(storage_folder_for_state) for folder_name in state_folders_in_file_system: shutil.rmtree(os.path.join(path, folder_name))
Removes obsolete state machine folders This function removes all folders in the file system folder `path` that do not belong to the states given by `states`. :param list states: the states that should reside in this very folder :param str path: the file system path to be checked for valid folders
def thumbUrl(self): key = self.firstAttr('thumb', 'parentThumb', 'granparentThumb') return self._server.url(key, includeToken=True) if key else None
Return url to for the thumbnail image.
def cancel(self): with self._condition: if self._state in [RUNNING, FINISHED]: return False if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: return True self._state = CANCELLED self._condition.notify_all() self._invoke_callbacks() return True
Cancel the future if possible. Returns True if the future was cancelled, False otherwise. A future cannot be cancelled if it is running or has already completed.
def addMonitor(self, monitorFriendlyName, monitorURL): url = self.baseUrl url += "newMonitor?apiKey=%s" % self.apiKey url += "&monitorFriendlyName=%s" % monitorFriendlyName url += "&monitorURL=%s&monitorType=1" % monitorURL url += "&monitorAlertContacts=%s" % monitorAlertContacts url += "&noJsonCallback=1&format=json" success, response = self.requestApi(url) if success: return True else: return False
Returns True if Monitor was added, otherwise False.
def get_annotations(self) -> Dict: return { EVIDENCE: self.evidence, CITATION: self.citation.copy(), ANNOTATIONS: self.annotations.copy() }
Get the current annotations.
def edit(filename, connection=None): c = connection or connect() rev = c.ls(filename) if rev: rev[0].edit()
Checks out a file into the default changelist :param filename: File to check out :type filename: str :param connection: Connection object to use :type connection: :py:class:`Connection`
def mesh_stable_pose(mesh, T_obj_table, T_table_world=RigidTransform(from_frame='table', to_frame='world'), style='wireframe', smooth=False, color=(0.5,0.5,0.5), dim=0.15, plot_table=True, plot_com=False, name=None): T_obj_table = T_obj_table.as_frames('obj', 'table') T_obj_world = T_table_world * T_obj_table Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name) if plot_table: Visualizer3D.table(T_table_world, dim=dim) if plot_com: Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01) return T_obj_world
Visualize a mesh in a stable pose. Parameters ---------- mesh : trimesh.Trimesh The mesh to visualize. T_obj_table : autolab_core.RigidTransform Pose of object relative to table. T_table_world : autolab_core.RigidTransform Pose of table relative to world. style : str Triangular mesh style, either 'surface' or 'wireframe'. smooth : bool If true, the mesh is smoothed before rendering. color : 3-tuple Color tuple. dim : float The side-length for the table. plot_table : bool If true, a table is visualized as well. plot_com : bool If true, a ball is visualized at the object's center of mass. name : str A name for the object to be added. Returns ------- autolab_core.RigidTransform The pose of the mesh in world frame.
def change_state(self, key, value): if key not in VALID_KEYS: raise InvalidState self._data[key] = value
Changes the state of the instance data with the given ``key`` and the provided ``value``. Wrapping with a decorator is probably not necessary. :param key: A ``str`` containing the key to update :param value: A value to change the ``key`` to :return: None
def stop(self, sig=signal.SIGINT): for cpid in self.sandboxes: logger.warn('Stopping %i...' % cpid) try: os.kill(cpid, sig) except OSError: logger.exception('Error stopping %s...' % cpid) for cpid in list(self.sandboxes): try: logger.info('Waiting for %i...' % cpid) pid, status = os.waitpid(cpid, 0) logger.warn('%i stopped with status %i' % (pid, status >> 8)) except OSError: logger.exception('Error waiting for %i...' % cpid) finally: self.sandboxes.pop(cpid, None)
Stop all the workers, and then wait for them
def get_params(self): return self.timeout, self.xonxoff, self.rtscts, self.baudrate
Get parameters as a tuple. :return: timeout, xonxoff, rtscts, baudrate
def integer_binning(data=None, **kwargs) -> StaticBinning: if "range" in kwargs: kwargs["range"] = tuple(r - 0.5 for r in kwargs["range"]) return fixed_width_binning(data=data, bin_width=kwargs.pop("bin_width", 1), align=True, bin_shift=0.5, **kwargs)
Construct fixed-width binning schema with bins centered around integers. Parameters ---------- range: Optional[Tuple[int]] min (included) and max integer (excluded) bin bin_width: Optional[int] group "bin_width" integers into one bin (not recommended)
def havdalah(self): today = HDate(gdate=self.date, diaspora=self.location.diaspora) tomorrow = HDate(gdate=self.date + dt.timedelta(days=1), diaspora=self.location.diaspora) if today.is_shabbat or today.is_yom_tov: if tomorrow.is_shabbat or tomorrow.is_yom_tov: return None return self._havdalah_datetime return None
Return the time for havdalah, or None if not applicable. If havdalah_offset is 0, uses the time for three_stars. Otherwise, adds the offset to the time of sunset and uses that. If it's currently a multi-day YomTov, and the end of the stretch is after today, the havdalah value is defined to be None (to avoid misleading the user that melacha is permitted).
def space_out_camel_case(stringAsCamelCase): pattern = re.compile(r'([A-Z][A-Z][a-z])|([a-z][A-Z])') if stringAsCamelCase is None: return None return pattern.sub(lambda m: m.group()[:1] + " " + m.group()[1:], stringAsCamelCase)
Adds spaces to a camel case string. Failure to space out string returns the original string. >>> space_out_camel_case('DMLSServicesOtherBSTextLLC') 'DMLS Services Other BS Text LLC'
def monthly(self): if self._monthly is None: self._monthly = MonthlyList(self._version, account_sid=self._solution['account_sid'], ) return self._monthly
Access the monthly :returns: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList :rtype: twilio.rest.api.v2010.account.usage.record.monthly.MonthlyList
def get_tensors_by_names(names): ret = [] G = tfv1.get_default_graph() for n in names: opn, varn = get_op_tensor_name(n) ret.append(G.get_tensor_by_name(varn)) return ret
Get a list of tensors in the default graph by a list of names. Args: names (list):
def dfbool2intervals(df,colbool): df.index=range(len(df)) intervals=bools2intervals(df[colbool]) for intervali,interval in enumerate(intervals): df.loc[interval[0]:interval[1],f'{colbool} interval id']=intervali df.loc[interval[0]:interval[1],f'{colbool} interval start']=interval[0] df.loc[interval[0]:interval[1],f'{colbool} interval stop']=interval[1] df.loc[interval[0]:interval[1],f'{colbool} interval length']=interval[1]-interval[0]+1 df.loc[interval[0]:interval[1],f'{colbool} interval within index']=range(interval[1]-interval[0]+1) df[f'{colbool} interval index']=df.index return df
ds contains bool values
def send_note(self, to, subject="", body="", noetid=""): if self.standard_grant_type is not "authorization_code": raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.") response = self._req('/notes/send', post_data={ 'to[]' : to, 'subject' : subject, 'body' : body, 'noetid' : noetid }) sent_notes = [] for item in response['results']: n = {} n['success'] = item['success'] n['user'] = User() n['user'].from_dict(item['user']) sent_notes.append(n) return sent_notes
Send a note :param to: The username(s) that this note is to :param subject: The subject of the note :param body: The body of the note :param noetid: The UUID of the note that is being responded to
def convert_from_rosetta(self, residue_id, to_scheme): assert(type(residue_id) == types.IntType) chain_id = None for c, sequence in self.rosetta_sequences.iteritems(): for id, r in sequence: if r.ResidueID == residue_id: assert(chain_id == None) chain_id = c if chain_id: return self.convert(chain_id, residue_id, 'rosetta', to_scheme) else: return None
A simpler conversion function to convert from Rosetta numbering without requiring the chain identifier.
def json_pretty_print(s): s = json.loads(s) return json.dumps(s, sort_keys=True, indent=4, separators=(',', ': '))
pretty print JSON
def getPageType(name,number=False): if not name in pageNames(): return None pageType=PyOrigin.Pages(name).GetType() if number: return str(pageType) if pageType==1: return "matrix" if pageType==2: return "book" if pageType==3: return "graph" if pageType==4: return "layout" if pageType==5: return "notes"
Returns the type of the page with that name. If that name doesn't exist, None is returned. Args: name (str): name of the page to get the folder from number (bool): if True, return numbers (i.e., a graph will be 3) if False, return words where appropriate (i.e, "graph") Returns: string of the type of object the page is
def SummaryMetadata(self, run, tag): accumulator = self.GetAccumulator(run) return accumulator.SummaryMetadata(tag)
Return the summary metadata for the given tag on the given run. Args: run: A string name of the run for which summary metadata is to be retrieved. tag: A string name of the tag whose summary metadata is to be retrieved. Raises: KeyError: If the run is not found, or the tag is not available for the given run. Returns: A `SummaryMetadata` protobuf.
def callback(cfunc): return C.c_voidp.from_address(C.cast(cfunc, C.c_voidp).value)
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
def _remove_debug_handlers(self): remove = list() for handler in self.config[self.HANDLERS]: if self.config[self.HANDLERS][handler].get('debug_only'): remove.append(handler) for handler in remove: del self.config[self.HANDLERS][handler] for logger in self.config[self.LOGGERS].keys(): logger = self.config[self.LOGGERS][logger] if handler in logger[self.HANDLERS]: logger[self.HANDLERS].remove(handler) self._remove_debug_only()
Remove any handlers with an attribute of debug_only that is True and remove the references to said handlers from any loggers that are referencing them.
def get_or_create(cls, issue, header, text=None): for comment in get_comments(issue): try: if comment.body.splitlines()[0] == header: obj = cls(comment, header) break except IndexError: pass else: comment = create_comment(issue, header) obj = cls(comment, header) if text: obj.edit(text) return obj
Get or create the dashboard comment in this issue.
def _compile(self, dirpath, makename, compiler, debug, profile): from os import path options = "" if debug: options += " DEBUG=true" if profile: options += " GPROF=true" from os import system codestr = "cd {}; make -f '{}' F90={} FAM={}" + options code = system(codestr.format(dirpath, makename, compiler, compiler[0])) lcount = 0 errors = [] log = path.join(dirpath, "compile.log") with open(log) as f: for line in f: lcount += 1 if lcount > 21 and lcount < 32: errors.append(line) elif lcount > 21: break if len(errors) > 0: code = 1 msg.warn("compile generated some errors or warnings:") msg.blank() msg.info(''.join(errors)) return code
Compiles the makefile at the specified location with 'compiler'. :arg dirpath: the full path to the directory where the makefile lives. :arg compiler: one of ['ifort', 'gfortran']. :arg makename: the name of the make file to compile.
def _rgetattr(obj, key): for k in key.split("."): obj = getattr(obj, k) return obj
Recursive getattr for handling dots in keys.
def decrypt_filedata(data, keys): data.seek(-16, 2) tag = data.read() data.seek(-16, 2) data.truncate() data.seek(0) plain = tempfile.NamedTemporaryFile(mode='w+b', delete=False) pbar = progbar(fileSize(data)) obj = Cryptodome.Cipher.AES.new(keys.encryptKey, Cryptodome.Cipher.AES.MODE_GCM, keys.encryptIV) prev_chunk = b'' for chunk in iter(lambda: data.read(CHUNK_SIZE), b''): plain.write(obj.decrypt(prev_chunk)) pbar.update(len(chunk)) prev_chunk = chunk plain.write(obj.decrypt_and_verify(prev_chunk, tag)) data.close() pbar.close() plain.seek(0) return plain
Decrypts a file from Send
def _spawn(self): self.queue = Queue(maxsize=self.num_threads * 10) for i in range(self.num_threads): t = Thread(target=self._consume) t.daemon = True t.start()
Initialize the queue and the threads.
def make_context(self, docker_file=None): kwargs = {"silent_build": self.harpoon.silent_build, "extra_context": self.commands.extra_context} if docker_file is None: docker_file = self.docker_file with ContextBuilder().make_context(self.context, **kwargs) as ctxt: self.add_docker_file_to_tarfile(docker_file, ctxt.t) yield ctxt
Determine the docker lines for this image
def unit_doomed(unit=None): if not has_juju_version("2.4.1"): raise NotImplementedError("is_doomed") if unit is None: unit = local_unit() gs = goal_state() units = gs.get('units', {}) if unit not in units: return True return units[unit]['status'] in ('dying', 'dead')
Determines if the unit is being removed from the model Requires Juju 2.4.1. :param unit: string unit name, defaults to local_unit :side effect: calls goal_state :side effect: calls local_unit :side effect: calls has_juju_version :return: True if the unit is being removed, already gone, or never existed
def poke(library, session, address, width, data): if width == 8: return poke_8(library, session, address, data) elif width == 16: return poke_16(library, session, address, data) elif width == 32: return poke_32(library, session, address, data) raise ValueError('%s is not a valid size. Valid values are 8, 16 or 32' % width)
Writes an 8, 16 or 32-bit value from the specified address. Corresponds to viPoke* functions of the VISA library. :param library: the visa library wrapped by ctypes. :param session: Unique logical identifier to a session. :param address: Source address to read the value. :param width: Number of bits to read. :param data: Data to be written to the bus. :return: return value of the library call. :rtype: :class:`pyvisa.constants.StatusCode`
def print_yielded(func): print_all = functools.partial(map, print) print_results = compose(more_itertools.recipes.consume, print_all, func) return functools.wraps(func)(print_results)
Convert a generator into a function that prints all yielded elements >>> @print_yielded ... def x(): ... yield 3; yield None >>> x() 3 None
def copy_shell__(self, new_i): for prop in ONLY_COPY_PROP: setattr(new_i, prop, getattr(self, prop)) return new_i
Create all attributes listed in 'ONLY_COPY_PROP' and return `self` with these attributes. :param new_i: object to :type new_i: object :return: object with new properties added :rtype: object
def remove_spawned_gates(self, spawn_gate=None): if spawn_gate is None: for sg in list(self.spawn_list): self.spawn_list.remove(sg) sg.remove() else: spawn_gate.remove() self.spawn_list.remove(spawn_gate)
Removes all spawned gates.
def update_resource(self, resource, underlined=None): try: pymodule = self.project.get_pymodule(resource) modname = self._module_name(resource) self._add_names(pymodule, modname, underlined) except exceptions.ModuleSyntaxError: pass
Update the cache for global names in `resource`
def blank_object(obj: T, fieldlist: Sequence[str]) -> None: for f in fieldlist: setattr(obj, f, None)
Within "obj", sets all fields in the fieldlist to None.
def refresh(self): table = self.tableType() search = nativestring(self._pywidget.text()) if search == self._lastSearch: return self._lastSearch = search if not search: return if search in self._cache: records = self._cache[search] else: records = table.select(where = self.baseQuery(), order = self.order()) records = list(records.search(search, limit=self.limit())) self._cache[search] = records self._records = records self.model().setStringList(map(str, self._records)) self.complete()
Refreshes the contents of the completer based on the current text.
def check_outputs(self): self.outputs = self.expand_filenames(self.outputs) result = False if self.files_exist(self.outputs): if self.dependencies_are_newer(self.outputs, self.inputs): result = True print("Dependencies are newer than outputs.") print("Running task.") elif self.force: print("Dependencies are older than inputs, but 'force' option present.") print("Running task.") result = True else: print("Dependencies are older than inputs.") else: print("No ouput file(s).") print("Running task.") result = True return result
Check for the existence of output files
def _get_cookies_as_dict(): config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict
Get cookies as a dict
def tuple_len(self): try: return self._tuple_len except AttributeError: raise NotImplementedError("Class {} does not implement attribute 'tuple_len'.".format(self.__class__.__name__))
Length of tuples produced by this generator.
def _k_value_square_reduction(ent_pipe_id, exit_pipe_id, re, f): if re < 2500: return (1.2 + (160 / re)) * ((ent_pipe_id / exit_pipe_id) ** 4) else: return (0.6 + 0.48 * f) * (ent_pipe_id / exit_pipe_id) ** 2\ * ((ent_pipe_id / exit_pipe_id) ** 2 - 1)
Returns the minor loss coefficient for a square reducer. Parameters: ent_pipe_id: Entrance pipe's inner diameter. exit_pipe_id: Exit pipe's inner diameter. re: Reynold's number. f: Darcy friction factor.
def remove_not_requested_analyses_view(portal): logger.info("Removing 'Analyses not requested' view ...") ar_ptype = portal.portal_types.AnalysisRequest ar_ptype._actions = filter(lambda act: act.id != "analyses_not_requested", ar_ptype.listActions())
Remove the view 'Not requested analyses" from inside AR
def count_unique(table, field=-1): from collections import Counter try: ans = {} for row in table.distinct().values(field).annotate(field_value_count=models.Count(field)): ans[row[field]] = row['field_value_count'] return ans except: try: return Counter(row[field] for row in table) except: try: return Counter(row.get(field, None) for row in table) except: try: return Counter(row.getattr(field, None) for row in table) except: pass
Use the Django ORM or collections.Counter to count unique values of a field in a table `table` is one of: 1. An iterable of Django model instances for a database table (e.g. a Django queryset) 2. An iterable of dicts or lists with elements accessed by row[field] where field can be an integer or string 3. An iterable of objects or namedtuples with elements accessed by `row.field` `field` can be any immutable object (the key or index in a row of the table that access the value to be counted)
def node2geoff(node_name, properties, encoder): if properties: return '({0} {1})'.format(node_name, encoder.encode(properties)) else: return '({0})'.format(node_name)
converts a NetworkX node into a Geoff string. Parameters ---------- node_name : str or int the ID of a NetworkX node properties : dict a dictionary of node attributes encoder : json.JSONEncoder an instance of a JSON encoder (e.g. `json.JSONEncoder`) Returns ------- geoff : str a Geoff string
def get_model(self, ids): to_get_ids = [i for i in ids if i not in self._known_models] models = [dxl_to_model(m) for m in self._get_model(to_get_ids, convert=False)] self._known_models.update(zip(to_get_ids, models)) return tuple(self._known_models[id] for id in ids)
Gets the model for the specified motors.
def backupIds(self) -> Sequence[int]: return [id for id in self.started.keys() if id != 0]
Return the list of replicas that don't belong to the master protocol instance
def read_pcap_from_source(self): if self._capture_node: compute = self._capture_node["node"].compute return compute.stream_file(self._project, "tmp/captures/" + self._capture_file_name)
Return a FileStream of the Pcap from the compute node
def _compute_length(nodes): r _, num_nodes = np.shape(nodes) first_deriv = (num_nodes - 1) * (nodes[:, 1:] - nodes[:, :-1]) if num_nodes == 2: return np.linalg.norm(first_deriv[:, 0], ord=2) if _scipy_int is None: raise OSError("This function requires SciPy for quadrature.") size_func = functools.partial(vec_size, first_deriv) length, _ = _scipy_int.quad(size_func, 0.0, 1.0) return length
r"""Approximately compute the length of a curve. .. _QUADPACK: https://en.wikipedia.org/wiki/QUADPACK If ``degree`` is :math:`n`, then the Hodograph curve :math:`B'(s)` is degree :math:`d = n - 1`. Using this curve, we approximate the integral: .. math:: \int_{B\left(\left[0, 1\right]\right)} 1 \, d\mathbf{x} = \int_0^1 \left\lVert B'(s) \right\rVert_2 \, ds using `QUADPACK`_ (via SciPy). .. note:: There is also a Fortran implementation of this function, which will be used if it can be built. Args: nodes (numpy.ndarray): The nodes defining a curve. Returns: float: The length of the curve. Raises: OSError: If SciPy is not installed.
def config_get(self, pattern='*'): result = {} for name, value in self.redis_config.items(): if fnmatch.fnmatch(name, pattern): try: result[name] = int(value) except ValueError: result[name] = value return result
Get one or more configuration parameters.
def _replace(self, **kwds): 'Return a new NamedTuple object replacing specified fields with new values' result = self._make(map(kwds.pop, self._fields, self)) if kwds: raise ValueError('Got unexpected field names: %r' % kwds.keys()) return result
Return a new NamedTuple object replacing specified fields with new values
def handle(self, *args, **options): comments = Comment.objects.filter( Q(classifiedcomment__isnull=True) | Q(classifiedcomment__cls='unsure')).order_by('?') if options['count']: comments = comments[:options['count']] comment_count = comments.count() self.stdout.write('Classifying %s comments, please wait...' % comment_count) self.stdout.flush() for comment in comments: classified_comment = utils.classify_comment(comment) self.stdout.write('%s,' % classified_comment.cls[0]) self.stdout.flush() self.stdout.write('\nDone!\n')
Collect all comments that hasn't already been classified or are classified as unsure. Order randomly so we don't rehash previously unsure classifieds when count limiting.
def find_local_id(self, name_id): try: return self.db[name_id.text] except KeyError: logger.debug("name: %s", name_id.text) return None
Only find persistent IDs :param name_id: :return:
async def auth(self): credentials = await self.atv.airplay.generate_credentials() await self.atv.airplay.load_credentials(credentials) try: await self.atv.airplay.start_authentication() pin = await _read_input(self.loop, 'Enter PIN on screen: ') await self.atv.airplay.finish_authentication(pin) print('You may now use these credentials:') print(credentials) return 0 except exceptions.DeviceAuthenticationError: logging.exception('Failed to authenticate - invalid PIN?') return 1
Perform AirPlay device authentication.
def repr_args(args): res = [] for x in args: if isinstance(x, tuple) and len(x) == 2: key, value = x res += ["%s=%s" % (key, repr_arg(value))] else: res += [repr_arg(x)] return ', '.join(res)
formats a list of function arguments prettily but as working code (kwargs are tuples (argname, argvalue)
def facetintervallookupone(table, key, start='start', stop='stop', value=None, include_stop=False, strict=True): trees = facettupletrees(table, key, start=start, stop=stop, value=value) out = dict() for k in trees: out[k] = IntervalTreeLookupOne(trees[k], include_stop=include_stop, strict=strict) return out
Construct a faceted interval lookup for the given table, returning at most one result for each query. If ``strict=True``, queries returning more than one result will raise a `DuplicateKeyError`. If ``strict=False`` and there is more than one result, the first result is returned.
def filter_whitespace(mode: str, text: str) -> str: if mode == "all": return text elif mode == "single": text = re.sub(r"([\t ]+)", " ", text) text = re.sub(r"(\s*\n\s*)", "\n", text) return text elif mode == "oneline": return re.sub(r"(\s+)", " ", text) else: raise Exception("invalid whitespace mode %s" % mode)
Transform whitespace in ``text`` according to ``mode``. Available modes are: * ``all``: Return all whitespace unmodified. * ``single``: Collapse consecutive whitespace with a single whitespace character, preserving newlines. * ``oneline``: Collapse all runs of whitespace into a single space character, removing all newlines in the process. .. versionadded:: 4.3
def tail(self, n=None, **kwargs): if n is None: n = options.display.max_rows return self._handle_delay_call('execute', self, tail=n, **kwargs)
Return the last n rows. Execute at once. :param n: :return: result frame :rtype: :class:`odps.df.backends.frame.ResultFrame`
def build_insert(table_name, attributes): sql = "INSERT INTO %s" %(table_name) column_str = u"" value_str = u"" for index, (key, value) in enumerate(attributes.items()): if index > 0: column_str += u"," value_str += u"," column_str += key value_str += value_to_sql_str(value) sql = sql + u"(%s) VALUES(%s)" %(column_str, value_str) return sql
Given the table_name and the data, return the sql to insert the data
def highpass(timeseries, frequency, filter_order=8, attenuation=0.1): if not isinstance(timeseries, TimeSeries): raise TypeError("Can only resample time series") if timeseries.kind is not 'real': raise TypeError("Time series must be real") lal_data = timeseries.lal() _highpass_func[timeseries.dtype](lal_data, frequency, 1-attenuation, filter_order) return TimeSeries(lal_data.data.data, delta_t = lal_data.deltaT, dtype=timeseries.dtype, epoch=timeseries._epoch)
Return a new timeseries that is highpassed. Return a new time series that is highpassed above the `frequency`. Parameters ---------- Time Series: TimeSeries The time series to be high-passed. frequency: float The frequency below which is suppressed. filter_order: {8, int}, optional The order of the filter to use when high-passing the time series. attenuation: {0.1, float}, optional The attenuation of the filter. Returns ------- Time Series: TimeSeries A new TimeSeries that has been high-passed. Raises ------ TypeError: time_series is not an instance of TimeSeries. TypeError: time_series is not real valued
def expose(*methods): def setup(base): return expose_as(base.__name__, base, *methods) return setup
A decorator for exposing the methods of a class. Parameters ---------- *methods : str A str representation of the methods that should be exposed to callbacks. Returns ------- decorator : function A function accepting one argument - the class whose methods will be exposed - and which returns a new :class:`Watchable` that will notify a :class:`Spectator` when those methods are called. Notes ----- This is essentially a decorator version of :func:`expose_as`
def _next_file(self): while True: if self._bucket_iter: try: return self._bucket_iter.next().filename except StopIteration: self._bucket_iter = None self._bucket = None if self._index >= len(self._filenames): return filename = self._filenames[self._index] self._index += 1 if self._delimiter is None or not filename.endswith(self._delimiter): return filename self._bucket = cloudstorage.listbucket(filename, delimiter=self._delimiter) self._bucket_iter = iter(self._bucket)
Find next filename. self._filenames may need to be expanded via listbucket. Returns: None if no more file is left. Filename otherwise.
def get_level(): handler, logger = find_handler(logging.getLogger(), match_stream_handler) return handler.level if handler else DEFAULT_LOG_LEVEL
Get the logging level of the root handler. :returns: The logging level of the root handler (an integer) or :data:`DEFAULT_LOG_LEVEL` (if no root handler exists).
def list_vnets(access_token, subscription_id): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Network/', '/virtualNetworks?api-version=', NETWORK_API]) return do_get(endpoint, access_token)
List the VNETs in a subscription . Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of VNets list with properties.
def tvdb_refresh_token(token): url = "https://api.thetvdb.com/refresh_token" headers = {"Authorization": "Bearer %s" % token} status, content = _request_json(url, headers=headers, cache=False) if status == 401: raise MapiProviderException("invalid token") elif status != 200 or not content.get("token"): raise MapiNetworkException("TVDb down or unavailable?") return content["token"]
Refreshes JWT token Online docs: api.thetvdb.com/swagger#!/Authentication/get_refresh_token=
def _callback(self, ch, method, properties, body): get_logger().info("Message received! Calling listeners...") topic = method.routing_key dct = json.loads(body.decode('utf-8')) for listener in self.listeners: listener(self, topic, dct)
Internal method that will be called when receiving message.
def __load(self, redirect=True, preload=False): query_params = { "prop": "info|pageprops", "inprop": "url", "ppprop": "disambiguation", "redirects": "", } query_params.update(self.__title_query_param()) request = self.mediawiki.wiki_request(query_params) query = request["query"] pageid = list(query["pages"].keys())[0] page = query["pages"][pageid] if "missing" in page: self._raise_page_error() elif "redirects" in query: self._handle_redirect(redirect, preload, query, page) elif "pageprops" in page: self._raise_disambiguation_error(page, pageid) else: self.pageid = pageid self.title = page["title"] self.url = page["fullurl"]
load the basic page information
def name(self): try: return TIFF.TAGS[self.code] except KeyError: return str(self.code)
Return name of tag from TIFF.TAGS registry.
def total_number(slug, kind='1'): return TabPost.select().join( TabPost2Tag, on=(TabPost.uid == TabPost2Tag.post_id) ).where( (TabPost2Tag.tag_id == slug) & (TabPost.kind == kind) ).count()
Return the number of certian slug.
def child(self, **kwargs): return AutomatorDeviceObject( self.device, self.selector.clone().child(**kwargs) )
set childSelector.