code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def decode_body(cls, header, f): assert header.packet_type == MqttControlPacketType.pingresp if header.remaining_len != 0: raise DecodeError('Extra bytes at end of packet.') return 0, MqttPingresp()
Generates a `MqttPingresp` packet given a `MqttFixedHeader`. This method asserts that header.packet_type is `pingresp`. Parameters ---------- header: MqttFixedHeader f: file Object with a read method. Raises ------ DecodeError ...
def start_depth_socket(self, symbol, callback, depth=None): socket_name = symbol.lower() + '@depth' if depth and depth != '1': socket_name = '{}{}'.format(socket_name, depth) return self._start_socket(socket_name, callback)
Start a websocket for symbol market depth returning either a diff or a partial book https://github.com/binance-exchange/binance-official-api-docs/blob/master/web-socket-streams.md#partial-book-depth-streams :param symbol: required :type symbol: str :param callback: callback function to...
def indentLine(self, block, autoIndent): indent = None if indent is None: indent = self.tryMatchedAnchor(block, autoIndent) if indent is None: indent = self.tryCComment(block) if indent is None and not autoIndent: indent = self.tryCppComment(block) ...
Indent line. Return filler or null.
def users_list(self, *args): if len(self._users) == 0: self.log('No users connected') else: self.log(self._users, pretty=True)
Display a list of connected users
def stop(self, key): self._get_limiter(key).stop() self._cleanup_limiter(key)
Stop a concurrent operation. This gets the concurrency limiter for the given key (creating it if necessary) and stops a concurrent operation on it. If the concurrency limiter is empty, it is deleted.
def addNode(self, node): LOGGER.info('Adding node {}({})'.format(node.name, node.address)) message = { 'addnode': { 'nodes': [{ 'address': node.address, 'name': node.name, 'node_def_id': node.id, ...
Add a node to the NodeServer :param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
def rowgroupmap(table, key, mapper, header=None, presorted=False, buffersize=None, tempdir=None, cache=True): return RowGroupMapView(table, key, mapper, header=header, presorted=presorted, buffersize=buffersize, tempdir=tempdir, cache=cache)
Group rows under the given key then apply `mapper` to yield zero or more output rows for each input group of rows.
def use_partial_data(self, sample_pct:float=0.01, seed:int=None)->'ItemList': "Use only a sample of `sample_pct`of the full dataset and an optional `seed`." if seed is not None: np.random.seed(seed) rand_idx = np.random.permutation(range_of(self)) cut = int(sample_pct * len(self)) ...
Use only a sample of `sample_pct`of the full dataset and an optional `seed`.
def get_git_postversion(addon_dir): addon_dir = os.path.realpath(addon_dir) last_version = read_manifest(addon_dir).get('version', '0.0.0') last_version_parsed = parse_version(last_version) if not is_git_controlled(addon_dir): return last_version if get_git_uncommitted(addon_dir): un...
return the addon version number, with a developmental version increment if there were git commits in the addon_dir after the last version change. If the last change to the addon correspond to the version number in the manifest it is used as is for the python package version. Otherwise a counter is incr...
def merge_sims(oldsims, newsims, clip=None): if oldsims is None: result = newsims or [] elif newsims is None: result = oldsims else: result = sorted(oldsims + newsims, key=lambda item: -item[1]) if clip is not None: result = result[:clip] return result
Merge two precomputed similarity lists, truncating the result to `clip` most similar items.
def call_template_str(self, template): high = compile_template_str(template, self.rend, self.opts['renderer'], self.opts['renderer_blacklist'], self.opts['renderer_whit...
Enforce the states in a template, pass the template as a string
def apply(self, config, raise_on_unknown_key=True): _recursive_merge(self._data, config, raise_on_unknown_key)
Apply additional configuration from a dictionary This will look for dictionary items that exist in the base_config any apply their values on the current configuration object
def event_list_tabs(counts, current_kind, page_number=1): return { 'counts': counts, 'current_kind': current_kind, 'page_number': page_number, 'event_kinds': Event.get_kinds(), 'event_kinds_data': Event.get_kinds_data(), }
Displays the tabs to different event_list pages. `counts` is a dict of number of events for each kind, like: {'all': 30, 'gig': 12, 'movie': 18,} `current_kind` is the event kind that's active, if any. e.g. 'gig', 'movie', etc. `page_number` is the current page of this kind of events we'r...
def generate(env): global PDFAction if PDFAction is None: PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR') global DVIPDFAction if DVIPDFAction is None: DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction) from . import pdf pdf.generate...
Add Builders and construction variables for dvipdf to an Environment.
def regroup(self, group_by=None): if not group_by: group_by = self.group_by groups = self.groups self.groups = {} for g in groups: for item in groups[g]: self.add(item, group_by)
Regroup items.
def direct_horizontal_irradiance(self): analysis_period = AnalysisPeriod(timestep=self.timestep, is_leap_year=self.is_leap_year) header_dhr = Header(data_type=DirectHorizontalIrradiance(), unit='W/m2', analy...
Returns the direct irradiance on a horizontal surface at each timestep. Note that this is different from the direct_normal_irradiance needed to construct a Wea, which is NORMAL and not HORIZONTAL.
def _createValueObjects(self, valueList, varList, mapTable, indexMap, contaminant, replaceParamFile): def assign_values_to_table(value_list, layer_id): for i, value in enumerate(value_list): value = vrp(value, replaceParamFile) mtValue = MTValue(variable=varList[i], v...
Populate GSSHAPY MTValue and MTIndex Objects Method
def revoke_user_access( self, access_id ): path = "/api/v3/publisher/user/access/revoke" data = { 'api_token': self.api_token, 'access_id': access_id, } r = requests.get( self.base_url + path, data=data ) if r.status_code != 200: raise ValueEr...
Takes an access_id, probably obtained from the get_access_list structure, and revokes that access. No return value, but may raise ValueError.
def _write_subset_index_file(options, core_results): f_path = os.path.join(options['run_dir'], '_subset_index.csv') subset_strs = zip(*core_results)[0] index = np.arange(len(subset_strs)) + 1 df = pd.DataFrame({'subsets': subset_strs}, index=index) df.to_csv(f_path)
Write table giving index of subsets, giving number and subset string
def manipulate(self, stored_instance, component_instance): self._logger = logging.getLogger(self._name) setattr(component_instance, self._field, self._logger)
Called by iPOPO right after the instantiation of the component. This is the last chance to manipulate the component before the other handlers start. :param stored_instance: The iPOPO component StoredInstance :param component_instance: The component instance
def initLogging(): logging.basicConfig( level=logging.INFO, format='%(asctime)s.%(msecs)03d %(levelname)s %(name)s - %(message)s', datefmt='%H:%M:%S') logging.getLogger('').setLevel(logging.INFO) logging.getLogger('PIL').setLevel(logging.INFO) CONFIG_PATHS = [ os.path.cur...
Sets logging defaults
def best_item_from_list(item,options,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): match = best_match_from_list(item,options,fuzzy,fname_match,fuzzy_fragment,guess) if match: return match[0] return None
Returns just the best item, or ``None``
def find(self, instance_ids=None, filters=None): instances = [] reservations = self.retry_on_ec2_error(self.ec2.get_all_instances, instance_ids=instance_ids, filters=filters) for reservation in reservations: instances.extend(reservation.instances) return instances
Flatten list of reservations to a list of instances. :param instance_ids: A list of instance ids to filter by :type instance_ids: list :param filters: A dict of Filter.N values defined in http://goo.gl/jYNej9 :type filters: dict :return: A flattened list of filtered instances. ...
def setEntry(self, entry=None): busy = Purr.BusyIndicator() self.entry = entry self.setEntryTitle(entry.title) self.setEntryComment(entry.comment.replace("\n", "\n\n").replace("<BR>", "\n")) self.wdplv.clear() self.wdplv.fillDataProducts(entry.dps) self.setTimesta...
Populates the dialog with contents of an existing entry.
def combine_types(types): items = simplify_types(types) if len(items) == 1: return items[0] else: return UnionType(items)
Given some types, return a combined and simplified type. For example, if given 'int' and 'List[int]', return Union[int, List[int]]. If given 'int' and 'int', return just 'int'.
def update_check(self, existing, new): old_state = existing.state if 'NowPlayingItem' in existing.session_raw: try: old_theme = existing.session_raw['NowPlayingItem']['IsThemeMedia'] except KeyError: old_theme = False else: old_...
Check device state to see if we need to fire the callback. True if either state is 'Playing' False if both states are: 'Paused', 'Idle', or 'Off' True on any state transition.
def query( self, url: Union[str, methods], data: Optional[MutableMapping] = None, headers: Optional[MutableMapping] = None, as_json: Optional[bool] = None, ) -> dict: url, body, headers = sansio.prepare_request( url=url, data=data, ...
Query the slack API When using :class:`slack.methods` the request is made `as_json` if available Args: url: :class:`slack.methods` or url string data: JSON encodable MutableMapping headers: Custom headers as_json: Post JSON to the slack API Retur...
def off(self): for device in self: if isinstance(device, (OutputDevice, CompositeOutputDevice)): device.off()
Turn all the output devices off.
def get_option(option_name, section_name="main", default=_sentinel, cfg_file=cfg_file): defaults = get_defaults() if default != _sentinel: my_defaults = {option_name: default} else: my_defaults = defaults.get('section_name', {}) parser = get_parser(cfg_file) return parser.get(section...
Returns a specific option specific in a config file Arguments: option_name -- Name of the option (example host_name) section_name -- Which section of the config (default: name) examples: >>> get_option("some option", default="default result") 'default result'
def post(node_name, key, **kwargs): node = nago.core.get_node(node_name) if not node: raise ValueError("Node named %s not found" % node_name) token = node.token node_data[token] = node_data[token] or {} node_data[token][key] = kwargs return "thanks!"
Give the server information about this node Arguments: node -- node_name or token for the node this data belongs to key -- identifiable key, that you use later to retrieve that piece of data kwargs -- the data you need to store
def appendPoint(self, position=None, type="line", smooth=False, name=None, identifier=None, point=None): if point is not None: if position is None: position = point.position type = point.type smooth = point.smooth if name is None: n...
Append a point to the contour.
def report_saved(report_stats): if Settings.verbose: report = '' truncated_filename = truncate_cwd(report_stats.final_filename) report += '{}: '.format(truncated_filename) total = new_percent_saved(report_stats) if total: report += total else: ...
Record the percent saved & print it.
def main(): try: device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) device.on_rfx_message += handle_rfx with device.open(baudrate=BAUDRATE): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
Example application that watches for an event from a specific RF device. This feature allows you to watch for events from RF devices if you have an RF receiver. This is useful in the case of internal sensors, which don't emit a FAULT if the sensor is tripped and the panel is armed STAY. It also will m...
def get_action_handler(self, controller_name, action_name): try_actions = [ controller_name + '/' + action_name, controller_name + '/not_found', 'index/not_found' ] for path in try_actions: if path in self._controllers: return self....
Return action of controller as callable. If requested controller isn't found - return 'not_found' action of requested controller or Index controller.
def get_expected_bindings(self): sg_bindings = db_lib.get_baremetal_sg_bindings() all_expected_bindings = collections.defaultdict(set) for sg_binding, port_binding in sg_bindings: sg_id = sg_binding['security_group_id'] try: binding_profile = json.loads(po...
Query the neutron DB for SG->switch interface bindings Bindings are returned as a dict of bindings for each switch: {<switch1>: set([(intf1, acl_name, direction), (intf2, acl_name, direction)]), <switch2>: set([(intf1, acl_name, direction)]), ..., }
def buttons(self, master): box = tk.Frame(master) ttk.Button( box, text="Next", width=10, command=self.next_day ).pack(side=tk.LEFT, padx=5, pady=5) ttk.Button( box, text="OK", width=10, comma...
Add a standard button box. Override if you do not want the standard buttons
def rm_missing_values_table(d): try: for k, v in d["columns"].items(): d["columns"][k] = rm_keys_from_dict(v, ["missingValue"]) except Exception: pass return d
Loop for each table column and remove the missingValue key & data :param dict d: Metadata (table) :return dict d: Metadata (table)
def set_name_email(configurator, question, answer): name = configurator.variables['author.name'] configurator.variables['author.name_email'] = '"{0}" <{1}>'.format( name, answer) return answer
prepare "Full Name" <email@eg.com>" string
def register_custom_actions(parser: argparse.ArgumentParser) -> None: parser.register('action', None, _StoreRangeAction) parser.register('action', 'store', _StoreRangeAction) parser.register('action', 'append', _AppendRangeAction)
Register custom argument action types
def start_container(self, image, container_name: str, repo_path: Path): command = "bash -i" if self.inherit_image: command = "sh -i" container = self.client.containers.run(image, command=command, detach=True, tty=True, name=container_name, ...
Starts a container with the image and name ``container_name`` and copies the repository into the container. :type image: docker.models.images.Image :rtype: docker.models.container.Container
def request_token(self): logging.debug("Getting request token from %s:%d", self.server, self.port) token, secret = self._token("/oauth/requestToken") return "{}/oauth/authorize?oauth_token={}".format(self.host, token), \ token, secret
Returns url, request_token, request_secret
def roots(expr, types=(ops.PhysicalTable,)): stack = [ arg.to_expr() for arg in reversed(expr.op().root_tables()) if isinstance(arg, types) ] def extender(op): return reversed( list( itertools.chain.from_iterable( arg.op().root_...
Yield every node of a particular type on which an expression depends. Parameters ---------- expr : Expr The expression to analyze types : tuple(type), optional, default (:mod:`ibis.expr.operations.PhysicalTable`,) The node types to traverse Yields ------ table :...
def get_checksum32(oqparam, hazard=False): checksum = 0 for fname in get_input_files(oqparam, hazard): checksum = _checksum(fname, checksum) if hazard: hazard_params = [] for key, val in vars(oqparam).items(): if key in ('rupture_mesh_spacing', 'complex_fault_mesh_spacing...
Build an unsigned 32 bit integer from the input files of a calculation. :param oqparam: an OqParam instance :param hazard: if True, consider only the hazard files :returns: the checkume
def menu_weekly(self, building_id): din = DiningV2(self.bearer, self.token) response = {'result_data': {'Document': {}}} days = [] for i in range(7): date = str(datetime.date.today() + datetime.timedelta(days=i)) v2_response = din.menu(building_id, date) ...
Get an array of menu objects corresponding to the weekly menu for the venue with building_id. :param building_id: A string representing the id of a building, e.g. "abc". >>> commons_week = din.menu_weekly("593")
def _get_source(link): if link.startswith("http://") or link.startswith("https://"): down = httpkie.Downloader() return down.download(link) if os.path.exists(link): with open(link) as f: return f.read() raise UserWarning("html: '%s' is neither URL or data!" % link)
Return source of the `link` whether it is filename or url. Args: link (str): Filename or URL. Returns: str: Content. Raises: UserWarning: When the `link` couldn't be resolved.
def _ReadSequenceDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): if is_member: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE) else: supported_definition_values = ( self...
Reads a sequence data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data ty...
def _list_element_starts_with(items, needle): for item in items: if item.startswith(needle): return True return False
True of any of the list elements starts with needle
def _create(self): if not os.path.exists(settings.SALMON_WHISPER_DB_PATH): os.makedirs(settings.SALMON_WHISPER_DB_PATH) archives = [whisper.parseRetentionDef(retentionDef) for retentionDef in settings.ARCHIVES.split(",")] whisper.create(self.path, archives, ...
Create the Whisper file on disk
def field2type_and_format(self, field): for field_class in type(field).__mro__: if field_class in self.field_mapping: type_, fmt = self.field_mapping[field_class] break else: warnings.warn( "Field of type {} does not inherit from ma...
Return the dictionary of OpenAPI type and format based on the field type :param Field field: A marshmallow field. :rtype: dict
def contains_entry(self, key, value): check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") key_data = self._to_data(key) value_data = self._to_data(value) return self._encode_invoke_on_key(multi_map_contains_entry_codec, key_data, key=key_data, ...
Returns whether the multimap contains an entry with the value. :param key: (object), the specified key. :param value: (object), the specified value. :return: (bool), ``true`` if this multimap contains the key-value tuple.
def cylinder_inertia(mass, radius, height, transform=None): h2, r2 = height ** 2, radius ** 2 diagonal = np.array([((mass * h2) / 12) + ((mass * r2) / 4), ((mass * h2) / 12) + ((mass * r2) / 4), (mass * r2) / 2]) inertia = diagonal * np.eye(3) if transfo...
Return the inertia tensor of a cylinder. Parameters ------------ mass : float Mass of cylinder radius : float Radius of cylinder height : float Height of cylinder transform : (4,4) float Transformation of cylinder Returns ------------ inertia : (3,3) float ...
def blk_coverage_1d(blk, size): rem = size % blk maxpix = size - rem return maxpix, rem
Return the part of a 1d array covered by a block. :param blk: size of the 1d block :param size: size of the 1d a image :return: a tuple of size covered and remaining size Example: >>> blk_coverage_1d(7, 100) (98, 2)
def load(self): pg = self.usr.getPage("http://www.neopets.com/objects.phtml?type=shop&obj_type=" + self.id) self.name = pg.find("td", "contentModuleHeader").text.strip() self.inventory = MainShopInventory(self.usr, self.id)
Loads the shop name and inventory
def as_binning(obj, copy: bool = False) -> BinningBase: if isinstance(obj, BinningBase): if copy: return obj.copy() else: return obj else: bins = make_bin_array(obj) return StaticBinning(bins)
Ensure that an object is a binning Parameters --------- obj : BinningBase or array_like Can be a binning, numpy-like bins or full physt bins copy : If true, ensure that the returned object is independent
def pprint(self): items = sorted(self.items()) return u"\n".join(u"%s=%s" % (k, v.pprint()) for k, v in items)
Return tag key=value pairs in a human-readable format.
def add_filter(self, ftype, func): if not isinstance(ftype, type): raise TypeError("Expected type object, got %s" % type(ftype)) self.castfilter = [(t, f) for (t, f) in self.castfilter if t != ftype] self.castfilter.append((ftype, func)) self.castfilter.sort()
Register a new output filter. Whenever bottle hits a handler output matching `ftype`, `func` is applyed to it.
def command_max_burst_count(self, event=None): try: max_burst_count = self.max_burst_count_var.get() except ValueError: max_burst_count = self.runtime_cfg.max_burst_count if max_burst_count < 1: max_burst_count = self.runtime_cfg.max_burst_count self.r...
max CPU burst op count - self.runtime_cfg.max_burst_count
def _separate_hdxobjects(self, hdxobjects, hdxobjects_name, id_field, hdxobjectclass): new_hdxobjects = self.data.get(hdxobjects_name, list()) if new_hdxobjects: hdxobject_names = set() for hdxobject in hdxobjects: hdxobject_name = hdxobject[id_field] ...
Helper function to take a list of HDX objects contained in the internal dictionary and add them to a supplied list of HDX objects or update existing metadata if any objects already exist in the list. The list in the internal dictionary is then deleted. Args: hdxobjects (List[T <= HD...
def get_trial_info(current_trial): if current_trial.end_time and ("_" in current_trial.end_time): time_obj = datetime.datetime.strptime(current_trial.end_time, "%Y-%m-%d_%H-%M-%S") end_time = time_obj.strftime("%Y-%m-%d %H:%M:%S") else: end_t...
Get job information for current trial.
def send(self, data, flags=0): return self.llc.send(self._tco, data, flags)
Send data to the socket. The socket must be connected to a remote socket. Returns a boolean value that indicates success or failure. A false value is typically an indication that the socket or connection was closed.
def l2_norm(params): flattened, _ = flatten(params) return np.dot(flattened, flattened)
Computes l2 norm of params by flattening them into a vector.
async def get_user_groups(request): acl_callback = request.get(GROUPS_KEY) if acl_callback is None: raise RuntimeError('acl_middleware not installed') user_id = await get_auth(request) groups = await acl_callback(user_id) if groups is None: return None user_groups = (Group.Authen...
Returns the groups that the user in this request has access to. This function gets the user id from the auth.get_auth function, and passes it to the ACL callback function to get the groups. Args: request: aiohttp Request object Returns: If the ACL callback function returns None, this ...
def loadFile(self, fileName): self.fileName = fileName self.file = QtCore.QFile(fileName) if self.file.exists(): self.qteScintilla.setText(open(fileName).read()) self.qteScintilla.qteUndoStack.reset() else: msg = "File <b>{}</b> does not exist".format(...
Display the file ``fileName``.
def move_file( src_fs, src_path, dst_fs, dst_path, ): with manage_fs(src_fs) as _src_fs: with manage_fs(dst_fs, create=True) as _dst_fs: if _src_fs is _dst_fs: _src_fs.move(src_path, dst_path, overwrite=True) else: with _src_fs.lock(), ...
Move a file from one filesystem to another. Arguments: src_fs (FS or str): Source filesystem (instance or URL). src_path (str): Path to a file on ``src_fs``. dst_fs (FS or str); Destination filesystem (instance or URL). dst_path (str): Path to a file on ``dst_fs``.
def stop_experiment(args): experiment_id_list = parse_ids(args) if experiment_id_list: experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() for experiment_id in experiment_id_list: print_normal('Stoping experiment %s' % experiment_id) ...
Stop the experiment which is running
def export_process_to_csv(bpmn_diagram, directory, filename): nodes = copy.deepcopy(bpmn_diagram.get_nodes()) start_nodes = [] export_elements = [] for node in nodes: incoming_list = node[1].get(consts.Consts.incoming_flow) if len(incoming_list) == 0: ...
Root method of CSV export functionality. :param bpmn_diagram: an instance of BpmnDiagramGraph class, :param directory: a string object, which is a path of output directory, :param filename: a string object, which is a name of output file.
def cover(self, pageid): r = requests.get(self.api, params={'action': 'query', 'prop': 'pageimages', 'pageids': pageid, 'format': 'json'}, headers=self.header) jsd = r.json() image = "File:" + jsd['query']['pages'][str(pageid)]['pageimage'] ...
Get a cover image given a page id. :param str pageid: The pageid for the light novel you want a cover image for :return str: the image url
def multimask_images(images: Iterable[SpatialImage], masks: Sequence[np.ndarray], image_type: type = None ) -> Iterable[Sequence[np.ndarray]]: for image in images: yield [mask_image(image, mask, image_type) for mask in masks]
Mask images with multiple masks. Parameters ---------- images: Images to mask. masks: Masks to apply. image_type: Type to cast images to. Yields ------ Sequence[np.ndarray] For each mask, a masked image.
def load_results_from_table_definition(table_definition, table_definition_file, options): default_columns = extract_columns_from_table_definition_file(table_definition, table_definition_file) columns_relevant_for_diff = _get_columns_relevant_for_diff(default_columns) results = [] for tag in table_defini...
Load all results in files that are listed in the given table-definition file. @return: a list of RunSetResult objects
def validate( schema: GraphQLSchema, document_ast: DocumentNode, rules: Sequence[RuleType] = None, type_info: TypeInfo = None, ) -> List[GraphQLError]: if not document_ast or not isinstance(document_ast, DocumentNode): raise TypeError("You must provide a document node.") assert_valid_sch...
Implements the "Validation" section of the spec. Validation runs synchronously, returning a list of encountered errors, or an empty list if no errors were encountered and the document is valid. A list of specific validation rules may be provided. If not provided, the default list of rules defined by t...
def hide(self, bAsync = True): if bAsync: win32.ShowWindowAsync( self.get_handle(), win32.SW_HIDE ) else: win32.ShowWindow( self.get_handle(), win32.SW_HIDE )
Make the window invisible. @see: L{show} @type bAsync: bool @param bAsync: Perform the request asynchronously. @raise WindowsError: An error occured while processing this request.
def intersection(line1, line2): x1, y1, x2, y2 = line1 u1, v1, u2, v2 = line2 (a, b), (c, d) = (x2 - x1, u1 - u2), (y2 - y1, v1 - v2) e, f = u1 - x1, v1 - y1 denom = float(a * d - b * c) if _near(denom, 0): if b == 0 or d == 0: return None if _near(e / b, f / d): ...
Return the coordinates of a point of intersection given two lines. Return None if the lines are parallel, but non-colli_near. Return an arbitrary point of intersection if the lines are colli_near. Parameters: line1 and line2: lines given by 4 points (x0,y0,x1,y1).
def read_csv_to_html_table(csvFile, hasHeader='N'): txt = '<table class="as-table as-table-zebra as-table-horizontal">' with open(csvFile, "r") as f: numRows = 1 for row in f: if hasHeader == 'Y': if numRows == 1: td_begin = '<TH>' ...
reads a CSV file and converts it to HTML
def get_preparation_data(name): d = dict( name=name, sys_path=sys.path, sys_argv=sys.argv, log_to_stderr=_log_to_stderr, orig_dir=process.ORIGINAL_DIR, authkey=process.current_process().authkey, ) if _logger is not None: d['log_level'] = _log...
Return info about parent needed by child to unpickle process object. Monkey-patch from
def get_description(self, description_type='Abstract'): if 'descriptions' in self.xml: if isinstance(self.xml['descriptions']['description'], list): for description in self.xml['descriptions']['description']: if description_type in description: ...
Get DataCite description.
def __sort_analyses(sentence): for word in sentence: if ANALYSIS not in word: raise Exception( '(!) Error: no analysis found from word: '+str(word) ) else: word[ANALYSIS] = sorted(word[ANALYSIS], \ key=lambda x : "_".join( [x[ROOT],x[POSTAG],x[FORM],x[CLITIC]]...
Sorts analysis of all the words in the sentence. This is required for consistency, because by default, analyses are listed in arbitrary order;
def copy(self, name=None): r if name is None: name = ws._gen_name() proj = deepcopy(self) ws[name] = proj return proj
r""" Creates a deep copy of the current project A deep copy means that new, unique versions of all the objects are created but with identical data and properties. Parameters ---------- name : string The name to give to the new project. If not supplied, a na...
def interval_lengths( bits ): end = 0 while 1: start = bits.next_set( end ) if start == bits.size: break end = bits.next_clear( start ) yield end - start
Get the length distribution of all contiguous runs of set bits from
def remove_multi(self, kvs, quiet=None): return _Base.remove_multi(self, kvs, quiet=quiet)
Remove multiple items from the cluster :param kvs: Iterable of keys to delete from the cluster. If you wish to specify a CAS for each item, then you may pass a dictionary of keys mapping to cas, like `remove_multi({k1:cas1, k2:cas2}`) :param quiet: Whether an exception should be...
def clip(self, lower=None, upper=None): df = self.export_df() df = df.clip(lower=lower, upper=upper) self.load_df(df)
Trim values at input thresholds using pandas function
def solve_mbar_for_all_states(u_kn, N_k, f_k, solver_protocol): states_with_samples = np.where(N_k > 0)[0] if len(states_with_samples) == 1: f_k_nonzero = np.array([0.0]) else: f_k_nonzero, all_results = solve_mbar(u_kn[states_with_samples], N_k[states_with_samples], ...
Solve for free energies of states with samples, then calculate for empty states. Parameters ---------- u_kn : np.ndarray, shape=(n_states, n_samples), dtype='float' The reduced potential energies, i.e. -log unnormalized probabilities N_k : np.ndarray, shape=(n_states), dtype='int' T...
def get(cls, **kwargs): data = cls._get(**kwargs) if data is None: new = cls() new.from_miss(**kwargs) return new return cls.deserialize(data)
Get a copy of the type from the cache and reconstruct it.
def set_style(network_id, ndex_cred=None, template_id=None): if not template_id: template_id = "ea4ea3b7-6903-11e7-961c-0ac135e8bacf" server = 'http://public.ndexbio.org' username, password = get_default_ndex_cred(ndex_cred) source_network = ndex2.create_nice_cx_from_server(username=username, ...
Set the style of the network to a given template network's style Parameters ---------- network_id : str The UUID of the NDEx network whose style is to be changed. ndex_cred : dict A dictionary of NDEx credentials. template_id : Optional[str] The UUID of the NDEx network whos...
def dependents_of_addresses(self, addresses): seen = OrderedSet(addresses) for address in addresses: seen.update(self._dependent_address_map[address]) seen.update(self._implicit_dependent_address_map[address]) return seen
Given an iterable of addresses, yield all of those addresses dependents.
def _add_intermol_molecule_type(intermol_system, parent): from intermol.moleculetype import MoleculeType from intermol.forces.bond import Bond as InterMolBond molecule_type = MoleculeType(name=parent.name) intermol_system.add_molecule_type(molecule_type) for index, parent_atom in...
Create a molecule type for the parent and add bonds.
def get_package(self): package_data = self._get_data() package_data = package_schema.validate(package_data) if "requires_rez_version" in package_data: ver = package_data.pop("requires_rez_version") if _rez_Version < ver: raise PackageMetadataError( ...
Create the analogous package. Returns: `Package` object.
def raise_right_error(response): if response.status_code == 200: return if response.status_code == 500: raise ServerError('Clef servers are down.') if response.status_code == 403: message = response.json().get('error') error_class = MESSAGE_TO_ERROR_MAP[message] if er...
Raise appropriate error when bad response received.
def resources(): ind_id = request.form['ind_id'] upload_dir = os.path.abspath(app.config['UPLOAD_DIR']) req_file = request.files['file'] filename = secure_filename(req_file.filename) file_path = os.path.join(upload_dir, filename) name = request.form['name'] or filename req_file.save(file_pat...
Upload a new resource for an individual.
def generate_getter(value): @property @wraps(is_) def getter(self): return self.is_(value) return getter
Generate getter for given value.
def get_service_module(service_path): paths = [ os.path.dirname(__file__), os.path.realpath(os.path.join(service_path, "..")), os.path.realpath(os.path.join(service_path)), os.path.realpath(os.path.join(service_path, DEPS_DIR)), ] for path in paths: path = os.path.rea...
Add custom paths to sys and import service module. :param service_path: Path to service folder
def postURL(self, url, headers={}, body=None): return self._load_resource("POST", url, headers, body)
Request a URL using the HTTP method POST.
def add_service_subnet(self, context_id, subnet_id): return self.context.addServiceSubnetToNetworkTunnel(subnet_id, id=context_id)
Adds a service subnet to a tunnel context. :param int context_id: The id-value representing the context instance. :param int subnet_id: The id-value representing the service subnet. :return bool: True if service subnet addition was successful.
def setdefault (self, key, *args): assert isinstance(key, basestring) return dict.setdefault(self, key.lower(), *args)
Set lowercase key value and return.
def with_fields(self, *fields): Unihan = self.sql.base.classes.Unihan query = self.sql.session.query(Unihan) for field in fields: query = query.filter(Column(field).isnot(None)) return query
Returns list of characters with information for certain fields. Parameters ---------- *fields : list of str fields for which information should be available Returns ------- :class:`sqlalchemy.orm.query.Query` : list of matches
def hide_tool(self, context_name, tool_name): data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name not in hidden_tools: self._validate_tool(context_name, tool_name) hidden_tools.add(tool_name) self._flush_tools()
Hide a tool so that it is not exposed in the suite. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to hide.
def lerp(vec1, vec2, time): if isinstance(vec1, Vector2) \ and isinstance(vec2, Vector2): if time < 0: time = 0 elif time > 1: time = 1 x_lerp = vec1[0] + time * (vec2[0] - vec1[0]) y_lerp = vec1[1] + time * (vec2[1]...
Lerp between vec1 to vec2 based on time. Time is clamped between 0 and 1.
def plot(self, format='segments', bits=None, **kwargs): if format == 'timeseries': return super(StateVector, self).plot(**kwargs) if format == 'segments': from ..plot import Plot kwargs.setdefault('xscale', 'auto-gps') return Plot(*self.to_dqflags(bits=bit...
Plot the data for this `StateVector` Parameters ---------- format : `str`, optional, default: ``'segments'`` The type of plot to make, either 'segments' to plot the SegmentList for each bit, or 'timeseries' to plot the raw data for this `StateVector` ...
def get(self): if self.timer() > self.deadline: self.value = None return self.value
Returns existing value, or None if deadline has expired.
def patch_stackless(): global _application_set_schedule_callback _application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback) def set_schedule_callback(callable): global _application_set_schedule_callback old = _application_set_schedule_callback _applicati...
This function should be called to patch the stackless module so that new tasklets are properly tracked in the debugger.
def __step1(self): C = self.C n = self.n for i in range(n): minval = min(self.C[i]) for j in range(n): self.C[i][j] -= minval return 2
For each row of the matrix, find the smallest element and subtract it from every element in its row. Go to Step 2.